diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py
index 90e52ed883..525c449bc0 100644
--- a/awx/api/views/__init__.py
+++ b/awx/api/views/__init__.py
@@ -5,6 +5,7 @@
import dateutil
import functools
import html
+import itertools
import logging
import re
import requests
@@ -20,9 +21,10 @@ from urllib3.exceptions import ConnectTimeoutError
# Django
from django.conf import settings
from django.core.exceptions import FieldError, ObjectDoesNotExist
-from django.db.models import Q, Sum
+from django.db.models import Q, Sum, Count
from django.db import IntegrityError, ProgrammingError, transaction, connection
from django.db.models.fields.related import ManyToManyField, ForeignKey
+from django.db.models.functions import Trunc
from django.shortcuts import get_object_or_404
from django.utils.safestring import mark_safe
from django.utils.timezone import now
@@ -47,9 +49,6 @@ from rest_framework import status
from rest_framework_yaml.parsers import YAMLParser
from rest_framework_yaml.renderers import YAMLRenderer
-# QSStats
-import qsstats
-
# ANSIConv
import ansiconv
@@ -283,30 +282,50 @@ class DashboardJobsGraphView(APIView):
success_query = success_query.filter(instance_of=models.ProjectUpdate)
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
- success_qss = qsstats.QuerySetStats(success_query, 'finished')
- failed_qss = qsstats.QuerySetStats(failed_query, 'finished')
-
- start_date = now()
+ end = now()
+ interval = 'day'
if period == 'month':
- end_date = start_date - dateutil.relativedelta.relativedelta(months=1)
- interval = 'days'
+ start = end - dateutil.relativedelta.relativedelta(months=1)
elif period == 'two_weeks':
- end_date = start_date - dateutil.relativedelta.relativedelta(weeks=2)
- interval = 'days'
+ start = end - dateutil.relativedelta.relativedelta(weeks=2)
elif period == 'week':
- end_date = start_date - dateutil.relativedelta.relativedelta(weeks=1)
- interval = 'days'
+ start = end - dateutil.relativedelta.relativedelta(weeks=1)
elif period == 'day':
- end_date = start_date - dateutil.relativedelta.relativedelta(days=1)
- interval = 'hours'
+ start = end - dateutil.relativedelta.relativedelta(days=1)
+ interval = 'hour'
else:
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
dashboard_data = {"jobs": {"successful": [], "failed": []}}
- for element in success_qss.time_series(end_date, start_date, interval=interval):
- dashboard_data['jobs']['successful'].append([time.mktime(element[0].timetuple()), element[1]])
- for element in failed_qss.time_series(end_date, start_date, interval=interval):
- dashboard_data['jobs']['failed'].append([time.mktime(element[0].timetuple()), element[1]])
+
+ succ_list = dashboard_data['jobs']['successful']
+ fail_list = dashboard_data['jobs']['failed']
+
+ qs_s = (
+ success_query.filter(finished__range=(start, end))
+ .annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
+ .order_by()
+ .values('d')
+ .annotate(agg=Count('id', distinct=True))
+ )
+ data_s = {item['d']: item['agg'] for item in qs_s}
+ qs_f = (
+ failed_query.filter(finished__range=(start, end))
+ .annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
+ .order_by()
+ .values('d')
+ .annotate(agg=Count('id', distinct=True))
+ )
+ data_f = {item['d']: item['agg'] for item in qs_f}
+
+ start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
+ for d in itertools.count():
+ date = start_date + dateutil.relativedelta.relativedelta(days=d)
+ if date > end:
+ break
+ succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
+ fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
+
return Response(dashboard_data)
diff --git a/awx/main/analytics/subsystem_metrics.py b/awx/main/analytics/subsystem_metrics.py
index 39cc25d8dd..4b023db315 100644
--- a/awx/main/analytics/subsystem_metrics.py
+++ b/awx/main/analytics/subsystem_metrics.py
@@ -5,7 +5,9 @@ import logging
from django.conf import settings
from django.apps import apps
+
from awx.main.consumers import emit_channel_notification
+from awx.main.utils import is_testing
root_key = 'awx_metrics'
logger = logging.getLogger('awx.main.analytics')
@@ -163,7 +165,7 @@ class Metrics:
Instance = apps.get_model('main', 'Instance')
if instance_name:
self.instance_name = instance_name
- elif settings.IS_TESTING():
+ elif is_testing():
self.instance_name = "awx_testing"
else:
self.instance_name = Instance.objects.my_hostname()
diff --git a/awx/main/credential_plugins/conjur.py b/awx/main/credential_plugins/conjur.py
index 5ae6be27f3..79fe740884 100644
--- a/awx/main/credential_plugins/conjur.py
+++ b/awx/main/credential_plugins/conjur.py
@@ -1,6 +1,5 @@
from .plugin import CredentialPlugin, CertFiles, raise_for_status
-import base64
from urllib.parse import urljoin, quote
from django.utils.translation import gettext_lazy as _
@@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
cacert = kwargs.get('cacert', None)
auth_kwargs = {
- 'headers': {'Content-Type': 'text/plain'},
+ 'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
'data': api_key,
'allow_redirects': False,
}
@@ -69,9 +68,9 @@ def conjur_backend(**kwargs):
with CertFiles(cacert) as cert:
# https://www.conjur.org/api.html#authentication-authenticate-post
auth_kwargs['verify'] = cert
- resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
+ resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
raise_for_status(resp)
- token = base64.b64encode(resp.content).decode('utf-8')
+ token = resp.content.decode('utf-8')
lookup_kwargs = {
'headers': {'Authorization': 'Token token="{}"'.format(token)},
@@ -79,9 +78,10 @@ def conjur_backend(**kwargs):
}
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
- path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
+ path = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
if version:
- path = '?'.join([path, version])
+ ver = "version={}".format(version)
+ path = '?'.join([path, ver])
with CertFiles(cacert) as cert:
lookup_kwargs['verify'] = cert
@@ -90,4 +90,4 @@ def conjur_backend(**kwargs):
return resp.text
-conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
+conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py
index 841b587d8e..3310f06997 100644
--- a/awx/main/dispatch/pool.py
+++ b/awx/main/dispatch/pool.py
@@ -466,7 +466,7 @@ class AutoscalePool(WorkerPool):
task_name = 'unknown'
if isinstance(body, dict):
task_name = body.get('task')
- logger.warn(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
+ logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
return super(AutoscalePool, self).write(preferred_queue, body)
except Exception:
for conn in connections.all():
diff --git a/awx/main/dispatch/publish.py b/awx/main/dispatch/publish.py
index bc496496d5..3cfc07af15 100644
--- a/awx/main/dispatch/publish.py
+++ b/awx/main/dispatch/publish.py
@@ -1,14 +1,13 @@
import inspect
import logging
-import sys
import json
import time
from uuid import uuid4
-from django.conf import settings
from django_guid import get_guid
from . import pg_bus_conn
+from awx.main.utils import is_testing
logger = logging.getLogger('awx.main.dispatch')
@@ -93,7 +92,7 @@ class task:
obj.update(**kw)
if callable(queue):
queue = queue()
- if not settings.IS_TESTING(sys.argv):
+ if not is_testing():
with pg_bus_conn() as conn:
conn.notify(queue, json.dumps(obj))
return (obj, queue)
diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py
index dbeb81dcac..f101a94d7a 100644
--- a/awx/main/models/ha.py
+++ b/awx/main/models/ha.py
@@ -233,11 +233,12 @@ class Instance(HasPolicyEditsMixin, BaseModel):
if not isinstance(vargs.get('grace_period'), int):
vargs['grace_period'] = 60 # grace period of 60 minutes, need to set because CLI default will not take effect
if 'exclude_strings' not in vargs and vargs.get('file_pattern'):
- active_pks = list(
- UnifiedJob.objects.filter(
- (models.Q(execution_node=self.hostname) | models.Q(controller_node=self.hostname)) & models.Q(status__in=('running', 'waiting'))
- ).values_list('pk', flat=True)
- )
+ active_job_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
+ if self.node_type == 'execution':
+ active_job_qs = active_job_qs.filter(execution_node=self.hostname)
+ else:
+ active_job_qs = active_job_qs.filter(controller_node=self.hostname)
+ active_pks = list(active_job_qs.values_list('pk', flat=True))
if active_pks:
vargs['exclude_strings'] = [JOB_FOLDER_PREFIX % job_id for job_id in active_pks]
if 'remove_images' in vargs or 'image_prune' in vargs:
diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py
index 476e3b9319..d4b6ffbc35 100644
--- a/awx/main/scheduler/task_manager.py
+++ b/awx/main/scheduler/task_manager.py
@@ -39,7 +39,7 @@ from awx.main.utils import (
ScheduleTaskManager,
ScheduleWorkflowManager,
)
-from awx.main.utils.common import task_manager_bulk_reschedule
+from awx.main.utils.common import task_manager_bulk_reschedule, is_testing
from awx.main.signals import disable_activity_stream
from awx.main.constants import ACTIVE_STATES
from awx.main.scheduler.dependency_graph import DependencyGraph
@@ -97,7 +97,7 @@ class TaskBase:
self.all_tasks = [t for t in qs]
def record_aggregate_metrics(self, *args):
- if not settings.IS_TESTING():
+ if not is_testing():
# increment task_manager_schedule_calls regardless if the other
# metrics are recorded
s_metrics.Metrics(auto_pipe_execute=True).inc(f"{self.prefix}__schedule_calls", 1)
diff --git a/awx/main/tests/functional/test_instances.py b/awx/main/tests/functional/test_instances.py
index 8ce6524d38..df6d177868 100644
--- a/awx/main/tests/functional/test_instances.py
+++ b/awx/main/tests/functional/test_instances.py
@@ -1,7 +1,7 @@
import pytest
from unittest import mock
-from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate
+from awx.main.models import AdHocCommand, InventoryUpdate, JobTemplate, Job
from awx.main.models.activity_stream import ActivityStream
from awx.main.models.ha import Instance, InstanceGroup
from awx.main.tasks.system import apply_cluster_membership_policies
@@ -15,6 +15,24 @@ def test_default_tower_instance_group(default_instance_group, job_factory):
assert default_instance_group in job_factory().preferred_instance_groups
+@pytest.mark.django_db
+@pytest.mark.parametrize('node_type', ('execution', 'control'))
+@pytest.mark.parametrize('active', (True, False))
+def test_get_cleanup_task_kwargs_active_jobs(node_type, active):
+ instance = Instance.objects.create(hostname='foobar', node_type=node_type)
+ job_kwargs = dict()
+ job_kwargs['controller_node' if node_type == 'control' else 'execution_node'] = instance.hostname
+ job_kwargs['status'] = 'running' if active else 'successful'
+
+ job = Job.objects.create(**job_kwargs)
+ kwargs = instance.get_cleanup_task_kwargs()
+
+ if active:
+ assert kwargs['exclude_strings'] == [f'awx_{job.pk}_']
+ else:
+ assert 'exclude_strings' not in kwargs
+
+
@pytest.mark.django_db
class TestPolicyTaskScheduling:
"""Tests make assertions about when the policy task gets scheduled"""
diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py
index 98b20851c8..e724c1bc3f 100644
--- a/awx/main/utils/common.py
+++ b/awx/main/utils/common.py
@@ -11,11 +11,12 @@ import os
import subprocess
import re
import stat
+import sys
import urllib.parse
import threading
import contextlib
import tempfile
-from functools import reduce, wraps
+import functools
# Django
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
@@ -73,6 +74,7 @@ __all__ = [
'NullablePromptPseudoField',
'model_instance_diff',
'parse_yaml_or_json',
+ 'is_testing',
'RequireDebugTrueOrTest',
'has_model_field_prefetched',
'set_environ',
@@ -144,6 +146,19 @@ def underscore_to_camelcase(s):
return ''.join(x.capitalize() or '_' for x in s.split('_'))
+@functools.cache
+def is_testing(argv=None):
+ '''Return True if running django or py.test unit tests.'''
+ if 'PYTEST_CURRENT_TEST' in os.environ.keys():
+ return True
+ argv = sys.argv if argv is None else argv
+ if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
+ return True
+ elif len(argv) >= 2 and argv[1] == 'test':
+ return True
+ return False
+
+
class RequireDebugTrueOrTest(logging.Filter):
"""
Logging filter to output when in DEBUG mode or running tests.
@@ -152,7 +167,7 @@ class RequireDebugTrueOrTest(logging.Filter):
def filter(self, record):
from django.conf import settings
- return settings.DEBUG or settings.IS_TESTING()
+ return settings.DEBUG or is_testing()
class IllegalArgumentError(ValueError):
@@ -174,7 +189,7 @@ def memoize(ttl=60, cache_key=None, track_function=False, cache=None):
cache = cache or get_memoize_cache()
def memoize_decorator(f):
- @wraps(f)
+ @functools.wraps(f)
def _memoizer(*args, **kwargs):
if track_function:
cache_dict_key = slugify('%r %r' % (args, kwargs))
@@ -992,7 +1007,7 @@ def getattrd(obj, name, default=NoDefaultProvided):
"""
try:
- return reduce(getattr, name.split("."), obj)
+ return functools.reduce(getattr, name.split("."), obj)
except AttributeError:
if default != NoDefaultProvided:
return default
@@ -1188,7 +1203,7 @@ def cleanup_new_process(func):
Cleanup django connection, cache connection, before executing new thread or processes entry point, func.
"""
- @wraps(func)
+ @functools.wraps(func)
def wrapper_cleanup_new_process(*args, **kwargs):
from awx.conf.settings import SettingsWrapper # noqa
@@ -1202,7 +1217,7 @@ def cleanup_new_process(func):
def log_excess_runtime(func_logger, cutoff=5.0):
def log_excess_runtime_decorator(func):
- @wraps(func)
+ @functools.wraps(func)
def _new_func(*args, **kwargs):
start_time = time.time()
return_value = func(*args, **kwargs)
diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py
index b45595e6ac..e365c2a48f 100644
--- a/awx/settings/defaults.py
+++ b/awx/settings/defaults.py
@@ -10,28 +10,6 @@ import socket
from datetime import timedelta
-# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
-BASE_DIR = os.path.dirname(os.path.dirname(__file__))
-
-
-def is_testing(argv=None):
- import sys
-
- '''Return True if running django or py.test unit tests.'''
- if 'PYTEST_CURRENT_TEST' in os.environ.keys():
- return True
- argv = sys.argv if argv is None else argv
- if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
- return True
- elif len(argv) >= 2 and argv[1] == 'test':
- return True
- return False
-
-
-def IS_TESTING(argv=None):
- return is_testing(argv)
-
-
if "pytest" in sys.modules:
from unittest import mock
@@ -40,9 +18,13 @@ if "pytest" in sys.modules:
else:
import ldap
+
DEBUG = True
SQL_DEBUG = DEBUG
+# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
+BASE_DIR = os.path.dirname(os.path.dirname(__file__))
+
# FIXME: it would be nice to cycle back around and allow this to be
# BigAutoField going forward, but we'd have to be explicit about our
# existing models.
diff --git a/awx/ui/src/screens/Credential/CredentialEdit/CredentialEdit.test.js b/awx/ui/src/screens/Credential/CredentialEdit/CredentialEdit.test.js
index d228954c50..2ce8865647 100644
--- a/awx/ui/src/screens/Credential/CredentialEdit/CredentialEdit.test.js
+++ b/awx/ui/src/screens/Credential/CredentialEdit/CredentialEdit.test.js
@@ -282,7 +282,7 @@ const mockInputSources = {
summary_fields: {
source_credential: {
id: 20,
- name: 'CyberArk Conjur Secret Lookup',
+ name: 'CyberArk Conjur Secrets Manager Lookup',
description: '',
kind: 'conjur',
cloud: false,
@@ -301,7 +301,7 @@ const mockInputSources = {
summary_fields: {
source_credential: {
id: 20,
- name: 'CyberArk Conjur Secret Lookup',
+ name: 'CyberArk Conjur Secrets Manager Lookup',
description: '',
kind: 'conjur',
cloud: false,
diff --git a/awx/ui/src/screens/Credential/shared/CredentialPlugins/CredentialPluginPrompt/CredentialPluginPrompt.test.js b/awx/ui/src/screens/Credential/shared/CredentialPlugins/CredentialPluginPrompt/CredentialPluginPrompt.test.js
index 4973585804..9e587d9f6b 100644
--- a/awx/ui/src/screens/Credential/shared/CredentialPlugins/CredentialPluginPrompt/CredentialPluginPrompt.test.js
+++ b/awx/ui/src/screens/Credential/shared/CredentialPlugins/CredentialPluginPrompt/CredentialPluginPrompt.test.js
@@ -36,14 +36,14 @@ const mockCredentialTypeDetail = {
url: '/api/v2/credential_types/20/',
related: {
named_url:
- '/api/v2/credential_types/CyberArk Conjur Secret Lookup+external/',
+ '/api/v2/credential_types/CyberArk Conjur Secrets Manager Lookup+external/',
credentials: '/api/v2/credential_types/20/credentials/',
activity_stream: '/api/v2/credential_types/20/activity_stream/',
},
summary_fields: { user_capabilities: { edit: false, delete: false } },
created: '2020-05-18T21:53:35.398260Z',
modified: '2020-05-18T21:54:05.451444Z',
- name: 'CyberArk Conjur Secret Lookup',
+ name: 'CyberArk Conjur Secrets Manager Lookup',
description: '',
kind: 'external',
namespace: 'conjur',
diff --git a/awx/ui/src/screens/Credential/shared/data.credentialTypes.json b/awx/ui/src/screens/Credential/shared/data.credentialTypes.json
index 98c375aa2c..a6d7f51740 100644
--- a/awx/ui/src/screens/Credential/shared/data.credentialTypes.json
+++ b/awx/ui/src/screens/Credential/shared/data.credentialTypes.json
@@ -546,7 +546,7 @@
},
"created": "2020-05-18T21:53:35.398260Z",
"modified": "2020-05-18T21:54:05.451444Z",
- "name": "CyberArk Conjur Secret Lookup",
+ "name": "CyberArk Conjur Secrets Manager Lookup",
"description": "",
"kind": "external",
"namespace": "conjur",
diff --git a/awx/ui/src/screens/Credential/shared/data.cyberArkCredential.json b/awx/ui/src/screens/Credential/shared/data.cyberArkCredential.json
index 94b0bbd8fd..77428a3083 100644
--- a/awx/ui/src/screens/Credential/shared/data.cyberArkCredential.json
+++ b/awx/ui/src/screens/Credential/shared/data.cyberArkCredential.json
@@ -3,7 +3,7 @@
"type": "credential",
"url": "/api/v2/credentials/1/",
"related": {
- "named_url": "/api/v2/credentials/CyberArk Conjur Secret Lookup++CyberArk Conjur Secret Lookup+external++/",
+ "named_url": "/api/v2/credentials/CyberArk Conjur Secrets Manager Lookup+external++/",
"created_by": "/api/v2/users/1/",
"modified_by": "/api/v2/users/1/",
"activity_stream": "/api/v2/credentials/1/activity_stream/",
@@ -19,7 +19,7 @@
"summary_fields": {
"credential_type": {
"id": 20,
- "name": "CyberArk Conjur Secret Lookup",
+ "name": "CyberArk Conjur Secrets Manager Lookup",
"description": ""
},
"created_by": {
@@ -69,7 +69,7 @@
},
"created": "2020-05-19T12:51:36.956029Z",
"modified": "2020-05-19T12:51:36.956086Z",
- "name": "CyberArk Conjur Secret Lookup",
+ "name": "CyberArk Conjur Secrets Manager Lookup",
"description": "",
"organization": null,
"credential_type": 20,
diff --git a/awx/ui/src/screens/Job/JobOutput/HostEventModal.js b/awx/ui/src/screens/Job/JobOutput/HostEventModal.js
index 57fe7ce05f..a7295c1692 100644
--- a/awx/ui/src/screens/Job/JobOutput/HostEventModal.js
+++ b/awx/ui/src/screens/Job/JobOutput/HostEventModal.js
@@ -70,7 +70,6 @@ const getStdOutValue = (hostEvent) => {
function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
const [hostStatus, setHostStatus] = useState(null);
const [activeTabKey, setActiveTabKey] = useState(0);
-
useEffect(() => {
setHostStatus(processEventStatus(hostEvent));
}, [setHostStatus, hostEvent]);
@@ -108,11 +107,11 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
style={{ alignItems: 'center', marginTop: '20px' }}
gutter="sm"
>
-
- {hostEvent.summary_fields.host?.description ? (
+
+ {hostEvent.summary_fields?.host?.description ? (
) : null}
{hostStatus ? (
@@ -125,12 +124,9 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
-
+
{"changed": true, "cmd": ["free", "-m"], "delta": "0:00:01.479609", "end": "2019-09-10 14:21:45.469533", "rc": 0, "start": "2019-09-10 14:21:43.989924", "stderr": "", "stderr_lines": [], "stdout": " total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023", "stdout_lines": [" total used free shared buff/cache available", "Mem: 7973 3005 960 30 4007 4582", "Swap: 1023 0 1023"]}[0m"
+ `,
+ task: 'command',
+ type: 'job_event',
+ url: '/api/v2/job_events/123/',
+};
+
/*
Some libraries return a list of string in stdout
Example: https://github.com/ansible-collections/cisco.ios/blob/main/plugins/modules/ios_command.py#L124-L128
@@ -134,6 +175,13 @@ describe('HostEventModal', () => {
expect(wrapper).toHaveLength(1);
});
+ test('renders successfully with partial data', () => {
+ const wrapper = shallow(
+ {}} />
+ );
+ expect(wrapper).toHaveLength(1);
+ });
+
test('should render all tabs', () => {
const wrapper = shallow(
{}} isOpen />
diff --git a/awx_collection/plugins/modules/credential.py b/awx_collection/plugins/modules/credential.py
index 8c962ce82e..a36f565c9d 100644
--- a/awx_collection/plugins/modules/credential.py
+++ b/awx_collection/plugins/modules/credential.py
@@ -52,7 +52,7 @@ options:
- The credential type being created.
- Can be a built-in credential type such as "Machine", or a custom credential type such as "My Credential Type"
- Choices include Amazon Web Services, Ansible Galaxy/Automation Hub API Token, Centrify Vault Credential Provider Lookup,
- Container Registry, CyberArk AIM Central Credential Provider Lookup, CyberArk Conjur Secret Lookup, Google Compute Engine,
+ Container Registry, CyberArk AIM Central Credential Provider Lookup, CyberArk Conjur Secrets Manager Lookup, Google Compute Engine,
GitHub Personal Access Token, GitLab Personal Access Token, GPG Public Key, HashiCorp Vault Secret Lookup, HashiCorp Vault Signed SSH,
Insights, Machine, Microsoft Azure Key Vault, Microsoft Azure Resource Manager, Network, OpenShift or Kubernetes API
Bearer Token, OpenStack, Red Hat Ansible Automation Platform, Red Hat Satellite 6, Red Hat Virtualization, Source Control,
diff --git a/awxkit/awxkit/cli/docs/source/conf.py b/awxkit/awxkit/cli/docs/source/conf.py
index 75eb627103..db66c6292c 100644
--- a/awxkit/awxkit/cli/docs/source/conf.py
+++ b/awxkit/awxkit/cli/docs/source/conf.py
@@ -52,6 +52,7 @@ html_static_path = ['_static']
rst_epilog = '''
.. |prog| replace:: awx
-.. |at| replace:: Ansible Tower
-.. |RHAT| replace:: Red Hat Ansible Tower
+.. |at| replace:: automation controller
+.. |At| replace:: Automation controller
+.. |RHAT| replace:: Red Hat Ansible Automation Platform controller
'''
diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py
index 27b6c623ee..eb605b146b 100644
--- a/awxkit/awxkit/cli/resource.py
+++ b/awxkit/awxkit/cli/resource.py
@@ -197,8 +197,10 @@ def parse_resource(client, skip_deprecated=False):
if hasattr(client, 'v2'):
for k in client.v2.json.keys():
- if k in ('dashboard',):
- # the Dashboard API is deprecated and not supported
+ if k in ('dashboard', 'config'):
+ # - the Dashboard API is deprecated and not supported
+ # - the Config command is already dealt with by the
+ # CustomCommand section above
continue
# argparse aliases are *only* supported in Python3 (not 2.7)
diff --git a/docs/licenses/django-qsstats-magic.txt b/docs/licenses/django-qsstats-magic.txt
deleted file mode 100644
index adef47b952..0000000000
--- a/docs/licenses/django-qsstats-magic.txt
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2010, Matt Croydon, Mikhail Korobov
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
- * Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
- * Neither the name of the tastypie nor the
- names of its contributors may be used to endorse or promote products
- derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL MATT CROYDON BE LIABLE FOR ANY
-DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/requirements/README.md b/requirements/README.md
index d98557aa58..9714398c37 100644
--- a/requirements/README.md
+++ b/requirements/README.md
@@ -1,25 +1,22 @@
# Dependency Management
-The `requirements.txt` file is generated from `requirements.in`, using `pip-tools` `pip-compile`.
+The `requirements.txt` file is generated from `requirements.in` and `requirements_git.txt`, using `pip-tools` and `pip-compile`.
## How To Use
-Commands should be run from inside the `./requirements` directory of the awx repository.
+Commands should be run in the awx container from inside the `./requirements` directory of the awx repository.
### Upgrading or Adding Select Libraries
If you need to add or upgrade one targeted library, then modify `requirements.in`,
then run the script:
-`./updater.sh`
-
-NOTE: `./updater.sh` uses /usr/bin/python3.6, to match the current python version
-(3.6) used to build releases.
+`./updater.sh run`
#### Upgrading Unpinned Dependency
If you require a new version of a dependency that does not have a pinned version
-for a fix or feature, pin a minimum version and run `./updater.sh`. For example,
+for a fix or feature, pin a minimum version in `requirements.in` and run `./updater.sh run`. For example,
replace the line `asgi-amqp` with `asgi-amqp>=1.1.4`, and consider leaving a
note.
diff --git a/requirements/requirements.in b/requirements/requirements.in
index 00779e760c..3a125fec79 100644
--- a/requirements/requirements.in
+++ b/requirements/requirements.in
@@ -19,7 +19,6 @@ django-guid==3.2.1
django-oauth-toolkit==1.4.1
django-polymorphic
django-pglocks
-django-qsstats-magic
django-redis
django-solo
django-split-settings
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index c407a3f5ee..3a50cd03a0 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -115,9 +115,6 @@ django-pglocks==1.0.4
# via -r /awx_devel/requirements/requirements.in
django-polymorphic==3.1.0
# via -r /awx_devel/requirements/requirements.in
-django-qsstats-magic==1.1.0
- # via -r /awx_devel/requirements/requirements.in
- # via -r /awx_devel/requirements/requirements_git.txt
django-redis==4.5.0
# via -r /awx_devel/requirements/requirements.in
django-solo==2.0.0
diff --git a/requirements/updater.sh b/requirements/updater.sh
index 9f61376214..6f96a840e5 100755
--- a/requirements/updater.sh
+++ b/requirements/updater.sh
@@ -33,11 +33,47 @@ generate_requirements() {
main() {
base_dir=$(pwd)
- _tmp="$(mktemp -d --suffix .awx-requirements XXXX -p /tmp)"
+
+ _tmp=$(python -c "import tempfile; print(tempfile.mkdtemp(suffix='.awx-requirements', dir='/tmp'))")
+
trap _cleanup INT TERM EXIT
- if [ "$1" = "upgrade" ]; then
+ case $1 in
+ "run")
+ NEEDS_HELP=0
+ ;;
+ "upgrade")
+ NEEDS_HELP=0
pip_compile="${pip_compile} --upgrade"
+ ;;
+ "help")
+ NEEDS_HELP=1
+ ;;
+ *)
+ echo ""
+ echo "ERROR: Parameter $1 not valid"
+ echo ""
+ NEEDS_HELP=1
+ ;;
+ esac
+
+ if [[ "$NEEDS_HELP" == "1" ]] ; then
+ echo "This script generates requirements.txt from requirements.in and requirements_git.in"
+ echo "It should be run from within the awx container"
+ echo ""
+ echo "Usage: $0 [run|upgrade]"
+ echo ""
+ echo "Commands:"
+ echo "help Print this message"
+ echo "run Run the process only upgrading pinned libraries from requirements.in"
+ echo "upgrade Upgrade all libraries to latest while respecting pinnings"
+ echo ""
+ exit
+ fi
+
+ if [[ ! -d /awx_devel ]] ; then
+ echo "This script should be run inside the awx container"
+ exit
fi
cp -vf requirements.txt "${_tmp}"