mirror of
https://github.com/ansible/awx.git
synced 2026-01-15 11:50:42 -03:30
Merge branch 'devel' of github.com:ansible/awx into 6116-incorrect-error-messages
This commit is contained in:
commit
8b9db837ca
@ -5,6 +5,7 @@
|
||||
import dateutil
|
||||
import functools
|
||||
import html
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
import requests
|
||||
@ -20,9 +21,10 @@ from urllib3.exceptions import ConnectTimeoutError
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import FieldError, ObjectDoesNotExist
|
||||
from django.db.models import Q, Sum
|
||||
from django.db.models import Q, Sum, Count
|
||||
from django.db import IntegrityError, ProgrammingError, transaction, connection
|
||||
from django.db.models.fields.related import ManyToManyField, ForeignKey
|
||||
from django.db.models.functions import Trunc
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.timezone import now
|
||||
@ -47,9 +49,6 @@ from rest_framework import status
|
||||
from rest_framework_yaml.parsers import YAMLParser
|
||||
from rest_framework_yaml.renderers import YAMLRenderer
|
||||
|
||||
# QSStats
|
||||
import qsstats
|
||||
|
||||
# ANSIConv
|
||||
import ansiconv
|
||||
|
||||
@ -283,30 +282,50 @@ class DashboardJobsGraphView(APIView):
|
||||
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
||||
|
||||
success_qss = qsstats.QuerySetStats(success_query, 'finished')
|
||||
failed_qss = qsstats.QuerySetStats(failed_query, 'finished')
|
||||
|
||||
start_date = now()
|
||||
end = now()
|
||||
interval = 'day'
|
||||
if period == 'month':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(months=1)
|
||||
interval = 'days'
|
||||
start = end - dateutil.relativedelta.relativedelta(months=1)
|
||||
elif period == 'two_weeks':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=2)
|
||||
interval = 'days'
|
||||
start = end - dateutil.relativedelta.relativedelta(weeks=2)
|
||||
elif period == 'week':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=1)
|
||||
interval = 'days'
|
||||
start = end - dateutil.relativedelta.relativedelta(weeks=1)
|
||||
elif period == 'day':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(days=1)
|
||||
interval = 'hours'
|
||||
start = end - dateutil.relativedelta.relativedelta(days=1)
|
||||
interval = 'hour'
|
||||
else:
|
||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
||||
for element in success_qss.time_series(end_date, start_date, interval=interval):
|
||||
dashboard_data['jobs']['successful'].append([time.mktime(element[0].timetuple()), element[1]])
|
||||
for element in failed_qss.time_series(end_date, start_date, interval=interval):
|
||||
dashboard_data['jobs']['failed'].append([time.mktime(element[0].timetuple()), element[1]])
|
||||
|
||||
succ_list = dashboard_data['jobs']['successful']
|
||||
fail_list = dashboard_data['jobs']['failed']
|
||||
|
||||
qs_s = (
|
||||
success_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_s = {item['d']: item['agg'] for item in qs_s}
|
||||
qs_f = (
|
||||
failed_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_f = {item['d']: item['agg'] for item in qs_f}
|
||||
|
||||
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
for d in itertools.count():
|
||||
date = start_date + dateutil.relativedelta.relativedelta(days=d)
|
||||
if date > end:
|
||||
break
|
||||
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
||||
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
||||
|
||||
return Response(dashboard_data)
|
||||
|
||||
|
||||
|
||||
@ -5,7 +5,9 @@ import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
root_key = 'awx_metrics'
|
||||
logger = logging.getLogger('awx.main.analytics')
|
||||
@ -163,7 +165,7 @@ class Metrics:
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
if instance_name:
|
||||
self.instance_name = instance_name
|
||||
elif settings.IS_TESTING():
|
||||
elif is_testing():
|
||||
self.instance_name = "awx_testing"
|
||||
else:
|
||||
self.instance_name = Instance.objects.my_hostname()
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
|
||||
import base64
|
||||
from urllib.parse import urljoin, quote
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
auth_kwargs = {
|
||||
'headers': {'Content-Type': 'text/plain'},
|
||||
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||
'data': api_key,
|
||||
'allow_redirects': False,
|
||||
}
|
||||
@ -69,9 +68,9 @@ def conjur_backend(**kwargs):
|
||||
with CertFiles(cacert) as cert:
|
||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||
auth_kwargs['verify'] = cert
|
||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
raise_for_status(resp)
|
||||
token = base64.b64encode(resp.content).decode('utf-8')
|
||||
token = resp.content.decode('utf-8')
|
||||
|
||||
lookup_kwargs = {
|
||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||
@ -79,9 +78,10 @@ def conjur_backend(**kwargs):
|
||||
}
|
||||
|
||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||
path = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||
if version:
|
||||
path = '?'.join([path, version])
|
||||
ver = "version={}".format(version)
|
||||
path = '?'.join([path, ver])
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
lookup_kwargs['verify'] = cert
|
||||
@ -90,4 +90,4 @@ def conjur_backend(**kwargs):
|
||||
return resp.text
|
||||
|
||||
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
|
||||
@ -466,7 +466,7 @@ class AutoscalePool(WorkerPool):
|
||||
task_name = 'unknown'
|
||||
if isinstance(body, dict):
|
||||
task_name = body.get('task')
|
||||
logger.warn(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||
except Exception:
|
||||
for conn in connections.all():
|
||||
|
||||
@ -1,14 +1,13 @@
|
||||
import inspect
|
||||
import logging
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from django_guid import get_guid
|
||||
|
||||
from . import pg_bus_conn
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
@ -93,7 +92,7 @@ class task:
|
||||
obj.update(**kw)
|
||||
if callable(queue):
|
||||
queue = queue()
|
||||
if not settings.IS_TESTING(sys.argv):
|
||||
if not is_testing():
|
||||
with pg_bus_conn() as conn:
|
||||
conn.notify(queue, json.dumps(obj))
|
||||
return (obj, queue)
|
||||
|
||||
@ -233,11 +233,12 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
||||
if not isinstance(vargs.get('grace_period'), int):
|
||||
vargs['grace_period'] = 60 # grace period of 60 minutes, need to set because CLI default will not take effect
|
||||
if 'exclude_strings' not in vargs and vargs.get('file_pattern'):
|
||||
active_pks = list(
|
||||
UnifiedJob.objects.filter(
|
||||
(models.Q(execution_node=self.hostname) | models.Q(controller_node=self.hostname)) & models.Q(status__in=('running', 'waiting'))
|
||||
).values_list('pk', flat=True)
|
||||
)
|
||||
active_job_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
|
||||
if self.node_type == 'execution':
|
||||
active_job_qs = active_job_qs.filter(execution_node=self.hostname)
|
||||
else:
|
||||
active_job_qs = active_job_qs.filter(controller_node=self.hostname)
|
||||
active_pks = list(active_job_qs.values_list('pk', flat=True))
|
||||
if active_pks:
|
||||
vargs['exclude_strings'] = [JOB_FOLDER_PREFIX % job_id for job_id in active_pks]
|
||||
if 'remove_images' in vargs or 'image_prune' in vargs:
|
||||
|
||||
@ -39,7 +39,7 @@ from awx.main.utils import (
|
||||
ScheduleTaskManager,
|
||||
ScheduleWorkflowManager,
|
||||
)
|
||||
from awx.main.utils.common import task_manager_bulk_reschedule
|
||||
from awx.main.utils.common import task_manager_bulk_reschedule, is_testing
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||
@ -97,7 +97,7 @@ class TaskBase:
|
||||
self.all_tasks = [t for t in qs]
|
||||
|
||||
def record_aggregate_metrics(self, *args):
|
||||
if not settings.IS_TESTING():
|
||||
if not is_testing():
|
||||
# increment task_manager_schedule_calls regardless if the other
|
||||
# metrics are recorded
|
||||
s_metrics.Metrics(auto_pipe_execute=True).inc(f"{self.prefix}__schedule_calls", 1)
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate
|
||||
from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate, Job
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
from awx.main.tasks.system import apply_cluster_membership_policies
|
||||
@ -15,6 +15,24 @@ def test_default_tower_instance_group(default_instance_group, job_factory):
|
||||
assert default_instance_group in job_factory().preferred_instance_groups
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('node_type', ('execution', 'control'))
|
||||
@pytest.mark.parametrize('active', (True, False))
|
||||
def test_get_cleanup_task_kwargs_active_jobs(node_type, active):
|
||||
instance = Instance.objects.create(hostname='foobar', node_type=node_type)
|
||||
job_kwargs = dict()
|
||||
job_kwargs['controller_node' if node_type == 'control' else 'execution_node'] = instance.hostname
|
||||
job_kwargs['status'] = 'running' if active else 'successful'
|
||||
|
||||
job = Job.objects.create(**job_kwargs)
|
||||
kwargs = instance.get_cleanup_task_kwargs()
|
||||
|
||||
if active:
|
||||
assert kwargs['exclude_strings'] == [f'awx_{job.pk}_']
|
||||
else:
|
||||
assert 'exclude_strings' not in kwargs
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestPolicyTaskScheduling:
|
||||
"""Tests make assertions about when the policy task gets scheduled"""
|
||||
|
||||
@ -11,11 +11,12 @@ import os
|
||||
import subprocess
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import urllib.parse
|
||||
import threading
|
||||
import contextlib
|
||||
import tempfile
|
||||
from functools import reduce, wraps
|
||||
import functools
|
||||
|
||||
# Django
|
||||
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
@ -73,6 +74,7 @@ __all__ = [
|
||||
'NullablePromptPseudoField',
|
||||
'model_instance_diff',
|
||||
'parse_yaml_or_json',
|
||||
'is_testing',
|
||||
'RequireDebugTrueOrTest',
|
||||
'has_model_field_prefetched',
|
||||
'set_environ',
|
||||
@ -144,6 +146,19 @@ def underscore_to_camelcase(s):
|
||||
return ''.join(x.capitalize() or '_' for x in s.split('_'))
|
||||
|
||||
|
||||
@functools.cache
|
||||
def is_testing(argv=None):
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
return True
|
||||
elif len(argv) >= 2 and argv[1] == 'test':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class RequireDebugTrueOrTest(logging.Filter):
|
||||
"""
|
||||
Logging filter to output when in DEBUG mode or running tests.
|
||||
@ -152,7 +167,7 @@ class RequireDebugTrueOrTest(logging.Filter):
|
||||
def filter(self, record):
|
||||
from django.conf import settings
|
||||
|
||||
return settings.DEBUG or settings.IS_TESTING()
|
||||
return settings.DEBUG or is_testing()
|
||||
|
||||
|
||||
class IllegalArgumentError(ValueError):
|
||||
@ -174,7 +189,7 @@ def memoize(ttl=60, cache_key=None, track_function=False, cache=None):
|
||||
cache = cache or get_memoize_cache()
|
||||
|
||||
def memoize_decorator(f):
|
||||
@wraps(f)
|
||||
@functools.wraps(f)
|
||||
def _memoizer(*args, **kwargs):
|
||||
if track_function:
|
||||
cache_dict_key = slugify('%r %r' % (args, kwargs))
|
||||
@ -992,7 +1007,7 @@ def getattrd(obj, name, default=NoDefaultProvided):
|
||||
"""
|
||||
|
||||
try:
|
||||
return reduce(getattr, name.split("."), obj)
|
||||
return functools.reduce(getattr, name.split("."), obj)
|
||||
except AttributeError:
|
||||
if default != NoDefaultProvided:
|
||||
return default
|
||||
@ -1188,7 +1203,7 @@ def cleanup_new_process(func):
|
||||
Cleanup django connection, cache connection, before executing new thread or processes entry point, func.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
@functools.wraps(func)
|
||||
def wrapper_cleanup_new_process(*args, **kwargs):
|
||||
from awx.conf.settings import SettingsWrapper # noqa
|
||||
|
||||
@ -1202,7 +1217,7 @@ def cleanup_new_process(func):
|
||||
|
||||
def log_excess_runtime(func_logger, cutoff=5.0):
|
||||
def log_excess_runtime_decorator(func):
|
||||
@wraps(func)
|
||||
@functools.wraps(func)
|
||||
def _new_func(*args, **kwargs):
|
||||
start_time = time.time()
|
||||
return_value = func(*args, **kwargs)
|
||||
|
||||
@ -10,28 +10,6 @@ import socket
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def is_testing(argv=None):
|
||||
import sys
|
||||
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
return True
|
||||
elif len(argv) >= 2 and argv[1] == 'test':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def IS_TESTING(argv=None):
|
||||
return is_testing(argv)
|
||||
|
||||
|
||||
if "pytest" in sys.modules:
|
||||
from unittest import mock
|
||||
|
||||
@ -40,9 +18,13 @@ if "pytest" in sys.modules:
|
||||
else:
|
||||
import ldap
|
||||
|
||||
|
||||
DEBUG = True
|
||||
SQL_DEBUG = DEBUG
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
# FIXME: it would be nice to cycle back around and allow this to be
|
||||
# BigAutoField going forward, but we'd have to be explicit about our
|
||||
# existing models.
|
||||
|
||||
@ -282,7 +282,7 @@ const mockInputSources = {
|
||||
summary_fields: {
|
||||
source_credential: {
|
||||
id: 20,
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'conjur',
|
||||
cloud: false,
|
||||
@ -301,7 +301,7 @@ const mockInputSources = {
|
||||
summary_fields: {
|
||||
source_credential: {
|
||||
id: 20,
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'conjur',
|
||||
cloud: false,
|
||||
|
||||
@ -36,14 +36,14 @@ const mockCredentialTypeDetail = {
|
||||
url: '/api/v2/credential_types/20/',
|
||||
related: {
|
||||
named_url:
|
||||
'/api/v2/credential_types/CyberArk Conjur Secret Lookup+external/',
|
||||
'/api/v2/credential_types/CyberArk Conjur Secrets Manager Lookup+external/',
|
||||
credentials: '/api/v2/credential_types/20/credentials/',
|
||||
activity_stream: '/api/v2/credential_types/20/activity_stream/',
|
||||
},
|
||||
summary_fields: { user_capabilities: { edit: false, delete: false } },
|
||||
created: '2020-05-18T21:53:35.398260Z',
|
||||
modified: '2020-05-18T21:54:05.451444Z',
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'external',
|
||||
namespace: 'conjur',
|
||||
|
||||
@ -546,7 +546,7 @@
|
||||
},
|
||||
"created": "2020-05-18T21:53:35.398260Z",
|
||||
"modified": "2020-05-18T21:54:05.451444Z",
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": "",
|
||||
"kind": "external",
|
||||
"namespace": "conjur",
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
"type": "credential",
|
||||
"url": "/api/v2/credentials/1/",
|
||||
"related": {
|
||||
"named_url": "/api/v2/credentials/CyberArk Conjur Secret Lookup++CyberArk Conjur Secret Lookup+external++/",
|
||||
"named_url": "/api/v2/credentials/CyberArk Conjur Secrets Manager Lookup+external++/",
|
||||
"created_by": "/api/v2/users/1/",
|
||||
"modified_by": "/api/v2/users/1/",
|
||||
"activity_stream": "/api/v2/credentials/1/activity_stream/",
|
||||
@ -19,7 +19,7 @@
|
||||
"summary_fields": {
|
||||
"credential_type": {
|
||||
"id": 20,
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": ""
|
||||
},
|
||||
"created_by": {
|
||||
@ -69,7 +69,7 @@
|
||||
},
|
||||
"created": "2020-05-19T12:51:36.956029Z",
|
||||
"modified": "2020-05-19T12:51:36.956086Z",
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": "",
|
||||
"organization": null,
|
||||
"credential_type": 20,
|
||||
|
||||
@ -70,7 +70,6 @@ const getStdOutValue = (hostEvent) => {
|
||||
function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
const [hostStatus, setHostStatus] = useState(null);
|
||||
const [activeTabKey, setActiveTabKey] = useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
setHostStatus(processEventStatus(hostEvent));
|
||||
}, [setHostStatus, hostEvent]);
|
||||
@ -108,11 +107,11 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
style={{ alignItems: 'center', marginTop: '20px' }}
|
||||
gutter="sm"
|
||||
>
|
||||
<Detail label={t`Host`} value={hostEvent.host_name} />
|
||||
{hostEvent.summary_fields.host?.description ? (
|
||||
<Detail label={t`Host`} value={hostEvent.event_data?.host} />
|
||||
{hostEvent.summary_fields?.host?.description ? (
|
||||
<Detail
|
||||
label={t`Description`}
|
||||
value={hostEvent.summary_fields.host.description}
|
||||
value={hostEvent.summary_fields?.host?.description}
|
||||
/>
|
||||
) : null}
|
||||
{hostStatus ? (
|
||||
@ -125,12 +124,9 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
<Detail label={t`Task`} value={hostEvent.task} />
|
||||
<Detail
|
||||
label={t`Module`}
|
||||
value={hostEvent.event_data.task_action || t`No result found`}
|
||||
/>
|
||||
<Detail
|
||||
label={t`Command`}
|
||||
value={hostEvent?.event_data?.res?.cmd}
|
||||
value={hostEvent.event_data?.task_action || t`No result found`}
|
||||
/>
|
||||
<Detail label={t`Command`} value={hostEvent.event_data?.res?.cmd} />
|
||||
</DetailList>
|
||||
</Tab>
|
||||
<Tab
|
||||
|
||||
@ -52,6 +52,47 @@ const hostEvent = {
|
||||
},
|
||||
};
|
||||
|
||||
const partialHostEvent = {
|
||||
changed: true,
|
||||
event: 'runner_on_ok',
|
||||
event_data: {
|
||||
host: 'foo',
|
||||
play: 'all',
|
||||
playbook: 'run_command.yml',
|
||||
res: {
|
||||
ansible_loop_var: 'item',
|
||||
changed: true,
|
||||
item: '1',
|
||||
msg: 'This is a debug message: 1',
|
||||
stdout:
|
||||
' total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023',
|
||||
stderr: 'problems',
|
||||
cmd: ['free', '-m'],
|
||||
stderr_lines: [],
|
||||
stdout_lines: [
|
||||
' total used free shared buff/cache available',
|
||||
'Mem: 7973 3005 960 30 4007 4582',
|
||||
'Swap: 1023 0 1023',
|
||||
],
|
||||
},
|
||||
task: 'command',
|
||||
task_action: 'command',
|
||||
},
|
||||
event_display: 'Host OK',
|
||||
event_level: 3,
|
||||
failed: false,
|
||||
host: 1,
|
||||
id: 123,
|
||||
job: 4,
|
||||
play: 'all',
|
||||
playbook: 'run_command.yml',
|
||||
stdout: `stdout: "[0;33mchanged: [localhost] => {"changed": true, "cmd": ["free", "-m"], "delta": "0:00:01.479609", "end": "2019-09-10 14:21:45.469533", "rc": 0, "start": "2019-09-10 14:21:43.989924", "stderr": "", "stderr_lines": [], "stdout": " total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023", "stdout_lines": [" total used free shared buff/cache available", "Mem: 7973 3005 960 30 4007 4582", "Swap: 1023 0 1023"]}[0m"
|
||||
`,
|
||||
task: 'command',
|
||||
type: 'job_event',
|
||||
url: '/api/v2/job_events/123/',
|
||||
};
|
||||
|
||||
/*
|
||||
Some libraries return a list of string in stdout
|
||||
Example: https://github.com/ansible-collections/cisco.ios/blob/main/plugins/modules/ios_command.py#L124-L128
|
||||
@ -134,6 +175,13 @@ describe('HostEventModal', () => {
|
||||
expect(wrapper).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('renders successfully with partial data', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={partialHostEvent} onClose={() => {}} />
|
||||
);
|
||||
expect(wrapper).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should render all tabs', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
|
||||
@ -52,7 +52,7 @@ options:
|
||||
- The credential type being created.
|
||||
- Can be a built-in credential type such as "Machine", or a custom credential type such as "My Credential Type"
|
||||
- Choices include Amazon Web Services, Ansible Galaxy/Automation Hub API Token, Centrify Vault Credential Provider Lookup,
|
||||
Container Registry, CyberArk AIM Central Credential Provider Lookup, CyberArk Conjur Secret Lookup, Google Compute Engine,
|
||||
Container Registry, CyberArk AIM Central Credential Provider Lookup, CyberArk Conjur Secrets Manager Lookup, Google Compute Engine,
|
||||
GitHub Personal Access Token, GitLab Personal Access Token, GPG Public Key, HashiCorp Vault Secret Lookup, HashiCorp Vault Signed SSH,
|
||||
Insights, Machine, Microsoft Azure Key Vault, Microsoft Azure Resource Manager, Network, OpenShift or Kubernetes API
|
||||
Bearer Token, OpenStack, Red Hat Ansible Automation Platform, Red Hat Satellite 6, Red Hat Virtualization, Source Control,
|
||||
|
||||
@ -52,6 +52,7 @@ html_static_path = ['_static']
|
||||
|
||||
rst_epilog = '''
|
||||
.. |prog| replace:: awx
|
||||
.. |at| replace:: Ansible Tower
|
||||
.. |RHAT| replace:: Red Hat Ansible Tower
|
||||
.. |at| replace:: automation controller
|
||||
.. |At| replace:: Automation controller
|
||||
.. |RHAT| replace:: Red Hat Ansible Automation Platform controller
|
||||
'''
|
||||
|
||||
@ -197,8 +197,10 @@ def parse_resource(client, skip_deprecated=False):
|
||||
|
||||
if hasattr(client, 'v2'):
|
||||
for k in client.v2.json.keys():
|
||||
if k in ('dashboard',):
|
||||
# the Dashboard API is deprecated and not supported
|
||||
if k in ('dashboard', 'config'):
|
||||
# - the Dashboard API is deprecated and not supported
|
||||
# - the Config command is already dealt with by the
|
||||
# CustomCommand section above
|
||||
continue
|
||||
|
||||
# argparse aliases are *only* supported in Python3 (not 2.7)
|
||||
|
||||
@ -1,24 +0,0 @@
|
||||
Copyright (c) 2010, Matt Croydon, Mikhail Korobov
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the tastypie nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL MATT CROYDON BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -1,25 +1,22 @@
|
||||
# Dependency Management
|
||||
|
||||
The `requirements.txt` file is generated from `requirements.in`, using `pip-tools` `pip-compile`.
|
||||
The `requirements.txt` file is generated from `requirements.in` and `requirements_git.txt`, using `pip-tools` and `pip-compile`.
|
||||
|
||||
## How To Use
|
||||
|
||||
Commands should be run from inside the `./requirements` directory of the awx repository.
|
||||
Commands should be run in the awx container from inside the `./requirements` directory of the awx repository.
|
||||
|
||||
### Upgrading or Adding Select Libraries
|
||||
|
||||
If you need to add or upgrade one targeted library, then modify `requirements.in`,
|
||||
then run the script:
|
||||
|
||||
`./updater.sh`
|
||||
|
||||
NOTE: `./updater.sh` uses /usr/bin/python3.6, to match the current python version
|
||||
(3.6) used to build releases.
|
||||
`./updater.sh run`
|
||||
|
||||
#### Upgrading Unpinned Dependency
|
||||
|
||||
If you require a new version of a dependency that does not have a pinned version
|
||||
for a fix or feature, pin a minimum version and run `./updater.sh`. For example,
|
||||
for a fix or feature, pin a minimum version in `requirements.in` and run `./updater.sh run`. For example,
|
||||
replace the line `asgi-amqp` with `asgi-amqp>=1.1.4`, and consider leaving a
|
||||
note.
|
||||
|
||||
|
||||
@ -19,7 +19,6 @@ django-guid==3.2.1
|
||||
django-oauth-toolkit==1.4.1
|
||||
django-polymorphic
|
||||
django-pglocks
|
||||
django-qsstats-magic
|
||||
django-redis
|
||||
django-solo
|
||||
django-split-settings
|
||||
|
||||
@ -115,9 +115,6 @@ django-pglocks==1.0.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-polymorphic==3.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-qsstats-magic==1.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
django-redis==4.5.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-solo==2.0.0
|
||||
|
||||
@ -33,11 +33,47 @@ generate_requirements() {
|
||||
|
||||
main() {
|
||||
base_dir=$(pwd)
|
||||
_tmp="$(mktemp -d --suffix .awx-requirements XXXX -p /tmp)"
|
||||
|
||||
_tmp=$(python -c "import tempfile; print(tempfile.mkdtemp(suffix='.awx-requirements', dir='/tmp'))")
|
||||
|
||||
trap _cleanup INT TERM EXIT
|
||||
|
||||
if [ "$1" = "upgrade" ]; then
|
||||
case $1 in
|
||||
"run")
|
||||
NEEDS_HELP=0
|
||||
;;
|
||||
"upgrade")
|
||||
NEEDS_HELP=0
|
||||
pip_compile="${pip_compile} --upgrade"
|
||||
;;
|
||||
"help")
|
||||
NEEDS_HELP=1
|
||||
;;
|
||||
*)
|
||||
echo ""
|
||||
echo "ERROR: Parameter $1 not valid"
|
||||
echo ""
|
||||
NEEDS_HELP=1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$NEEDS_HELP" == "1" ]] ; then
|
||||
echo "This script generates requirements.txt from requirements.in and requirements_git.in"
|
||||
echo "It should be run from within the awx container"
|
||||
echo ""
|
||||
echo "Usage: $0 [run|upgrade]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo "help Print this message"
|
||||
echo "run Run the process only upgrading pinned libraries from requirements.in"
|
||||
echo "upgrade Upgrade all libraries to latest while respecting pinnings"
|
||||
echo ""
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ ! -d /awx_devel ]] ; then
|
||||
echo "This script should be run inside the awx container"
|
||||
exit
|
||||
fi
|
||||
|
||||
cp -vf requirements.txt "${_tmp}"
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user