mirror of
https://github.com/ansible/awx.git
synced 2026-02-17 19:20:05 -03:30
Compare commits
2 Commits
fix/redis-
...
upgrade-sq
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
543d3f940b | ||
|
|
ee7edb9179 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -150,8 +150,6 @@ use_dev_supervisor.txt
|
|||||||
|
|
||||||
awx/ui/src
|
awx/ui/src
|
||||||
awx/ui/build
|
awx/ui/build
|
||||||
awx/ui/.ui-built
|
|
||||||
awx/ui_next
|
|
||||||
|
|
||||||
# Docs build stuff
|
# Docs build stuff
|
||||||
docs/docsite/build/
|
docs/docsite/build/
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from awx.api.generics import APIView, Response
|
|||||||
from awx.api.permissions import AnalyticsPermission
|
from awx.api.permissions import AnalyticsPermission
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.utils import get_awx_version
|
from awx.main.utils import get_awx_version
|
||||||
from awx.main.utils.analytics_proxy import OIDCClient
|
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
@@ -205,7 +205,7 @@ class AnalyticsGenericView(APIView):
|
|||||||
try:
|
try:
|
||||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
||||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||||
client = OIDCClient(rh_user, rh_password)
|
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||||
response = client.make_request(
|
response = client.make_request(
|
||||||
method,
|
method,
|
||||||
url,
|
url,
|
||||||
@@ -219,8 +219,8 @@ class AnalyticsGenericView(APIView):
|
|||||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||||
except MissingSettings:
|
except MissingSettings:
|
||||||
rh_user = self._get_setting('SUBSCRIPTIONS_CLIENT_ID', None, ERROR_MISSING_USER)
|
rh_user = self._get_setting('SUBSCRIPTIONS_USERNAME', None, ERROR_MISSING_USER)
|
||||||
rh_password = self._get_setting('SUBSCRIPTIONS_CLIENT_SECRET', None, ERROR_MISSING_PASSWORD)
|
rh_password = self._get_setting('SUBSCRIPTIONS_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||||
#
|
#
|
||||||
# Missing or wrong user/pass
|
# Missing or wrong user/pass
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ from awx.api.versioning import URLPathVersioning, reverse, drf_reverse
|
|||||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
from awx.main.utils.analytics_proxy import TokenError
|
|
||||||
from awx.main.utils.licensing import get_licenser
|
from awx.main.utils.licensing import get_licenser
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.root')
|
logger = logging.getLogger('awx.api.views.root')
|
||||||
@@ -177,21 +176,19 @@ class ApiV2SubscriptionView(APIView):
|
|||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
if data.get('subscriptions_client_secret') == '$encrypted$':
|
if data.get('subscriptions_password') == '$encrypted$':
|
||||||
data['subscriptions_client_secret'] = settings.SUBSCRIPTIONS_CLIENT_SECRET
|
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||||
try:
|
try:
|
||||||
user, pw = data.get('subscriptions_client_id'), data.get('subscriptions_client_secret')
|
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||||
with set_environ(**settings.AWX_TASK_ENV):
|
with set_environ(**settings.AWX_TASK_ENV):
|
||||||
validated = get_licenser().validate_rh(user, pw)
|
validated = get_licenser().validate_rh(user, pw)
|
||||||
if user:
|
if user:
|
||||||
settings.SUBSCRIPTIONS_CLIENT_ID = data['subscriptions_client_id']
|
settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username']
|
||||||
if pw:
|
if pw:
|
||||||
settings.SUBSCRIPTIONS_CLIENT_SECRET = data['subscriptions_client_secret']
|
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
msg = _("Invalid Subscription")
|
msg = _("Invalid Subscription")
|
||||||
if isinstance(exc, TokenError) or (
|
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||||
isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
|
||||||
):
|
|
||||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||||
msg = _("Unable to connect to proxy server.")
|
msg = _("Unable to connect to proxy server.")
|
||||||
@@ -218,12 +215,12 @@ class ApiV2AttachView(APIView):
|
|||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
subscription_id = data.get('subscription_id', None)
|
pool_id = data.get('pool_id', None)
|
||||||
if not subscription_id:
|
if not pool_id:
|
||||||
return Response({"error": _("No subscription ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
user = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||||
pw = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||||
if subscription_id and user and pw:
|
if pool_id and user and pw:
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
try:
|
try:
|
||||||
with set_environ(**settings.AWX_TASK_ENV):
|
with set_environ(**settings.AWX_TASK_ENV):
|
||||||
@@ -242,7 +239,7 @@ class ApiV2AttachView(APIView):
|
|||||||
logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
for sub in validated:
|
for sub in validated:
|
||||||
if sub['subscription_id'] == subscription_id:
|
if sub['pool_id'] == pool_id:
|
||||||
sub['valid_key'] = True
|
sub['valid_key'] = True
|
||||||
settings.LICENSE = sub
|
settings.LICENSE = sub
|
||||||
return Response(sub)
|
return Response(sub)
|
||||||
|
|||||||
@@ -207,8 +207,7 @@ class URLField(CharField):
|
|||||||
if self.allow_plain_hostname:
|
if self.allow_plain_hostname:
|
||||||
try:
|
try:
|
||||||
url_parts = urlparse.urlsplit(value)
|
url_parts = urlparse.urlsplit(value)
|
||||||
looks_like_ipv6 = bool(url_parts.netloc and url_parts.netloc.startswith('[') and url_parts.netloc.endswith(']'))
|
if url_parts.hostname and '.' not in url_parts.hostname:
|
||||||
if not looks_like_ipv6 and url_parts.hostname and '.' not in url_parts.hostname:
|
|
||||||
netloc = '{}.local'.format(url_parts.hostname)
|
netloc = '{}.local'.format(url_parts.hostname)
|
||||||
if url_parts.port:
|
if url_parts.port:
|
||||||
netloc = '{}:{}'.format(netloc, url_parts.port)
|
netloc = '{}:{}'.format(netloc, url_parts.port)
|
||||||
|
|||||||
@@ -27,5 +27,5 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
|||||||
|
|
||||||
|
|
||||||
def prefill_rh_credentials(apps, schema_editor):
|
def prefill_rh_credentials(apps, schema_editor):
|
||||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_CLIENT_ID', encrypted=False)
|
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False)
|
||||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_CLIENT_SECRET', encrypted=True)
|
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True)
|
||||||
|
|||||||
@@ -128,41 +128,3 @@ class TestURLField:
|
|||||||
else:
|
else:
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
field.run_validators(url)
|
field.run_validators(url)
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"url, expect_error",
|
|
||||||
[
|
|
||||||
("https://[1:2:3]", True),
|
|
||||||
("http://[1:2:3]", True),
|
|
||||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888", True),
|
|
||||||
("https://2001:db8:3333:4444:5555:6666:7777:8888", True),
|
|
||||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888]", False),
|
|
||||||
("https://[::1]", False),
|
|
||||||
("https://[::]", False),
|
|
||||||
("https://[2001:db8::1]", False),
|
|
||||||
("https://[2001:db8:0:0:0:0:1:1]", False),
|
|
||||||
("https://[fe80::2%eth0]", True), # ipv6 scope identifier
|
|
||||||
("https://[fe80:0:0:0:200:f8ff:fe21:67cf]", False),
|
|
||||||
("https://[::ffff:192.168.1.10]", False),
|
|
||||||
("https://[0:0:0:0:0:ffff:c000:0201]", False),
|
|
||||||
("https://[2001:0db8:000a:0001:0000:0000:0000:0000]", False),
|
|
||||||
("https://[2001:db8:a:1::]", False),
|
|
||||||
("https://[ff02::1]", False),
|
|
||||||
("https://[ff02:0:0:0:0:0:0:1]", False),
|
|
||||||
("https://[fc00::1]", False),
|
|
||||||
("https://[fd12:3456:789a:1::1]", False),
|
|
||||||
("https://[2001:db8::abcd:ef12:3456:7890]", False),
|
|
||||||
("https://[2001:db8:0000:abcd:0000:ef12:0000:3456]", False),
|
|
||||||
("https://[::ffff:10.0.0.1]", False),
|
|
||||||
("https://[2001:db8:cafe::]", False),
|
|
||||||
("https://[2001:db8:cafe:0:0:0:0:0]", False),
|
|
||||||
("https://[fe80::210:f3ff:fedf:4567%3]", True), # ipv6 scope identifier, numerical interface
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_ipv6_urls(self, url, expect_error):
|
|
||||||
field = URLField()
|
|
||||||
if expect_error:
|
|
||||||
with pytest.raises(ValidationError, match="Enter a valid URL"):
|
|
||||||
field.run_validators(url)
|
|
||||||
else:
|
|
||||||
field.run_validators(url)
|
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ from ansible_base.lib.utils.db import advisory_lock
|
|||||||
from awx.main.models import Job
|
from awx.main.models import Job
|
||||||
from awx.main.access import access_registry
|
from awx.main.access import access_registry
|
||||||
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
||||||
from awx.main.utils.analytics_proxy import OIDCClient
|
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||||
|
|
||||||
__all__ = ['register', 'gather', 'ship']
|
__all__ = ['register', 'gather', 'ship']
|
||||||
|
|
||||||
@@ -186,7 +186,7 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
|||||||
|
|
||||||
if not (
|
if not (
|
||||||
settings.AUTOMATION_ANALYTICS_URL
|
settings.AUTOMATION_ANALYTICS_URL
|
||||||
and ((settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD) or (settings.SUBSCRIPTIONS_CLIENT_ID and settings.SUBSCRIPTIONS_CLIENT_SECRET))
|
and ((settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD) or (settings.SUBSCRIPTIONS_USERNAME and settings.SUBSCRIPTIONS_PASSWORD))
|
||||||
):
|
):
|
||||||
logger.log(log_level, "Not gathering analytics, configuration is invalid. Use --dry-run to gather locally without sending.")
|
logger.log(log_level, "Not gathering analytics, configuration is invalid. Use --dry-run to gather locally without sending.")
|
||||||
return None
|
return None
|
||||||
@@ -368,20 +368,8 @@ def ship(path):
|
|||||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
rh_id = getattr(settings, 'REDHAT_USERNAME', None)
|
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||||
rh_secret = getattr(settings, 'REDHAT_PASSWORD', None)
|
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||||
|
|
||||||
if not (rh_id and rh_secret):
|
|
||||||
rh_id = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
|
||||||
rh_secret = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
|
||||||
|
|
||||||
if not rh_id:
|
|
||||||
logger.error('Neither REDHAT_USERNAME nor SUBSCRIPTIONS_CLIENT_ID are set')
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not rh_secret:
|
|
||||||
logger.error('Neither REDHAT_PASSWORD nor SUBSCRIPTIONS_CLIENT_SECRET are set')
|
|
||||||
return False
|
|
||||||
|
|
||||||
with open(path, 'rb') as f:
|
with open(path, 'rb') as f:
|
||||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||||
@@ -389,13 +377,25 @@ def ship(path):
|
|||||||
s.headers = get_awx_http_client_headers()
|
s.headers = get_awx_http_client_headers()
|
||||||
s.headers.pop('Content-Type')
|
s.headers.pop('Content-Type')
|
||||||
with set_environ(**settings.AWX_TASK_ENV):
|
with set_environ(**settings.AWX_TASK_ENV):
|
||||||
try:
|
if rh_user and rh_password:
|
||||||
client = OIDCClient(rh_id, rh_secret)
|
try:
|
||||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||||
except requests.RequestException:
|
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
except requests.RequestException:
|
||||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_id, rh_secret), headers=s.headers, timeout=(31, 31))
|
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||||
|
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||||
|
elif not rh_user or not rh_password:
|
||||||
|
logger.info('REDHAT_USERNAME and REDHAT_PASSWORD are not set, using SUBSCRIPTIONS_USERNAME and SUBSCRIPTIONS_PASSWORD')
|
||||||
|
rh_user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||||
|
rh_password = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||||
|
if rh_user and rh_password:
|
||||||
|
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||||
|
elif not rh_user:
|
||||||
|
logger.error('REDHAT_USERNAME and SUBSCRIPTIONS_USERNAME are not set')
|
||||||
|
return False
|
||||||
|
elif not rh_password:
|
||||||
|
logger.error('REDHAT_PASSWORD and SUBSCRIPTIONS_USERNAME are not set')
|
||||||
|
return False
|
||||||
# Accept 2XX status_codes
|
# Accept 2XX status_codes
|
||||||
if response.status_code >= 300:
|
if response.status_code >= 300:
|
||||||
logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
||||||
|
|||||||
@@ -124,8 +124,8 @@ register(
|
|||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
encrypted=False,
|
encrypted=False,
|
||||||
read_only=False,
|
read_only=False,
|
||||||
label=_('Red Hat Client ID for Analytics'),
|
label=_('Red Hat customer username'),
|
||||||
help_text=_('Client ID used to send data to Automation Analytics'),
|
help_text=_('This username is used to send data to Automation Analytics'),
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
)
|
)
|
||||||
@@ -137,34 +137,34 @@ register(
|
|||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
encrypted=True,
|
encrypted=True,
|
||||||
read_only=False,
|
read_only=False,
|
||||||
label=_('Red Hat Client Secret for Analytics'),
|
label=_('Red Hat customer password'),
|
||||||
help_text=_('Client secret used to send data to Automation Analytics'),
|
help_text=_('This password is used to send data to Automation Analytics'),
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
'SUBSCRIPTIONS_CLIENT_ID',
|
'SUBSCRIPTIONS_USERNAME',
|
||||||
field_class=fields.CharField,
|
field_class=fields.CharField,
|
||||||
default='',
|
default='',
|
||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
encrypted=False,
|
encrypted=False,
|
||||||
read_only=False,
|
read_only=False,
|
||||||
label=_('Red Hat Client ID for Subscriptions'),
|
label=_('Red Hat or Satellite username'),
|
||||||
help_text=_('Client ID used to retrieve subscription and content information'), # noqa
|
help_text=_('This username is used to retrieve subscription and content information'), # noqa
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET',
|
'SUBSCRIPTIONS_PASSWORD',
|
||||||
field_class=fields.CharField,
|
field_class=fields.CharField,
|
||||||
default='',
|
default='',
|
||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
encrypted=True,
|
encrypted=True,
|
||||||
read_only=False,
|
read_only=False,
|
||||||
label=_('Red Hat Client Secret for Subscriptions'),
|
label=_('Red Hat or Satellite password'),
|
||||||
help_text=_('Client secret used to retrieve subscription and content information'), # noqa
|
help_text=_('This password is used to retrieve subscription and content information'), # noqa
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import time
|
|||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
import json
|
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
from multiprocessing import Process
|
from multiprocessing import Process
|
||||||
@@ -26,10 +25,7 @@ from ansible_base.lib.logging.runtime import log_excess_runtime
|
|||||||
|
|
||||||
from awx.main.models import UnifiedJob
|
from awx.main.models import UnifiedJob
|
||||||
from awx.main.dispatch import reaper
|
from awx.main.dispatch import reaper
|
||||||
from awx.main.utils.common import get_mem_effective_capacity, get_corrected_memory, get_corrected_cpu, get_cpu_effective_capacity
|
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity
|
||||||
|
|
||||||
# ansible-runner
|
|
||||||
from ansible_runner.utils.capacity import get_mem_in_bytes, get_cpu_count
|
|
||||||
|
|
||||||
if 'run_callback_receiver' in sys.argv:
|
if 'run_callback_receiver' in sys.argv:
|
||||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||||
@@ -311,41 +307,6 @@ class WorkerPool(object):
|
|||||||
logger.exception('could not kill {}'.format(worker.pid))
|
logger.exception('could not kill {}'.format(worker.pid))
|
||||||
|
|
||||||
|
|
||||||
def get_auto_max_workers():
|
|
||||||
"""Method we normally rely on to get max_workers
|
|
||||||
|
|
||||||
Uses almost same logic as Instance.local_health_check
|
|
||||||
The important thing is to be MORE than Instance.capacity
|
|
||||||
so that the task-manager does not over-schedule this node
|
|
||||||
|
|
||||||
Ideally we would just use the capacity from the database plus reserve workers,
|
|
||||||
but this poses some bootstrap problems where OCP task containers
|
|
||||||
register themselves after startup
|
|
||||||
"""
|
|
||||||
# Get memory from ansible-runner
|
|
||||||
total_memory_gb = get_mem_in_bytes()
|
|
||||||
|
|
||||||
# This may replace memory calculation with a user override
|
|
||||||
corrected_memory = get_corrected_memory(total_memory_gb)
|
|
||||||
|
|
||||||
# Get same number as max forks based on memory, this function takes memory as bytes
|
|
||||||
mem_capacity = get_mem_effective_capacity(corrected_memory, is_control_node=True)
|
|
||||||
|
|
||||||
# Follow same process for CPU capacity constraint
|
|
||||||
cpu_count = get_cpu_count()
|
|
||||||
corrected_cpu = get_corrected_cpu(cpu_count)
|
|
||||||
cpu_capacity = get_cpu_effective_capacity(corrected_cpu, is_control_node=True)
|
|
||||||
|
|
||||||
# Here is what is different from health checks,
|
|
||||||
auto_max = max(mem_capacity, cpu_capacity)
|
|
||||||
|
|
||||||
# add magic number of extra workers to ensure
|
|
||||||
# we have a few extra workers to run the heartbeat
|
|
||||||
auto_max += 7
|
|
||||||
|
|
||||||
return auto_max
|
|
||||||
|
|
||||||
|
|
||||||
class AutoscalePool(WorkerPool):
|
class AutoscalePool(WorkerPool):
|
||||||
"""
|
"""
|
||||||
An extended pool implementation that automatically scales workers up and
|
An extended pool implementation that automatically scales workers up and
|
||||||
@@ -359,7 +320,19 @@ class AutoscalePool(WorkerPool):
|
|||||||
super(AutoscalePool, self).__init__(*args, **kwargs)
|
super(AutoscalePool, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
if self.max_workers is None:
|
if self.max_workers is None:
|
||||||
self.max_workers = get_auto_max_workers()
|
settings_absmem = getattr(settings, 'SYSTEM_TASK_ABS_MEM', None)
|
||||||
|
if settings_absmem is not None:
|
||||||
|
# There are 1073741824 bytes in a gigabyte. Convert bytes to gigabytes by dividing by 2**30
|
||||||
|
total_memory_gb = convert_mem_str_to_bytes(settings_absmem) // 2**30
|
||||||
|
else:
|
||||||
|
total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up
|
||||||
|
|
||||||
|
# Get same number as max forks based on memory, this function takes memory as bytes
|
||||||
|
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
|
||||||
|
|
||||||
|
# add magic prime number of extra workers to ensure
|
||||||
|
# we have a few extra workers to run the heartbeat
|
||||||
|
self.max_workers += 7
|
||||||
|
|
||||||
# max workers can't be less than min_workers
|
# max workers can't be less than min_workers
|
||||||
self.max_workers = max(self.min_workers, self.max_workers)
|
self.max_workers = max(self.min_workers, self.max_workers)
|
||||||
@@ -373,9 +346,6 @@ class AutoscalePool(WorkerPool):
|
|||||||
self.scale_up_ct = 0
|
self.scale_up_ct = 0
|
||||||
self.worker_count_max = 0
|
self.worker_count_max = 0
|
||||||
|
|
||||||
# last time we wrote current tasks, to avoid too much log spam
|
|
||||||
self.last_task_list_log = time.monotonic()
|
|
||||||
|
|
||||||
def produce_subsystem_metrics(self, metrics_object):
|
def produce_subsystem_metrics(self, metrics_object):
|
||||||
metrics_object.set('dispatcher_pool_scale_up_events', self.scale_up_ct)
|
metrics_object.set('dispatcher_pool_scale_up_events', self.scale_up_ct)
|
||||||
metrics_object.set('dispatcher_pool_active_task_count', sum(len(w.managed_tasks) for w in self.workers))
|
metrics_object.set('dispatcher_pool_active_task_count', sum(len(w.managed_tasks) for w in self.workers))
|
||||||
@@ -493,14 +463,6 @@ class AutoscalePool(WorkerPool):
|
|||||||
self.worker_count_max = new_worker_ct
|
self.worker_count_max = new_worker_ct
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fast_task_serialization(current_task):
|
|
||||||
try:
|
|
||||||
return str(current_task.get('task')) + ' - ' + str(sorted(current_task.get('args', []))) + ' - ' + str(sorted(current_task.get('kwargs', {})))
|
|
||||||
except Exception:
|
|
||||||
# just make sure this does not make things worse
|
|
||||||
return str(current_task)
|
|
||||||
|
|
||||||
def write(self, preferred_queue, body):
|
def write(self, preferred_queue, body):
|
||||||
if 'guid' in body:
|
if 'guid' in body:
|
||||||
set_guid(body['guid'])
|
set_guid(body['guid'])
|
||||||
@@ -522,15 +484,6 @@ class AutoscalePool(WorkerPool):
|
|||||||
if isinstance(body, dict):
|
if isinstance(body, dict):
|
||||||
task_name = body.get('task')
|
task_name = body.get('task')
|
||||||
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||||
# Once every 10 seconds write out task list for debugging
|
|
||||||
if time.monotonic() - self.last_task_list_log >= 10.0:
|
|
||||||
task_counts = {}
|
|
||||||
for worker in self.workers:
|
|
||||||
task_slug = self.fast_task_serialization(worker.current_task)
|
|
||||||
task_counts.setdefault(task_slug, 0)
|
|
||||||
task_counts[task_slug] += 1
|
|
||||||
logger.info(f'Running tasks by count:\n{json.dumps(task_counts, indent=2)}')
|
|
||||||
self.last_task_list_log = time.monotonic()
|
|
||||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||||
except Exception:
|
except Exception:
|
||||||
for conn in connections.all():
|
for conn in connections.all():
|
||||||
|
|||||||
@@ -238,7 +238,7 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
def run(self, *args, **kwargs):
|
def run(self, *args, **kwargs):
|
||||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||||
|
|
||||||
logger.info(f"Running {self.name}, workers min={self.pool.min_workers} max={self.pool.max_workers}, listening to queues {self.queues}")
|
logger.info(f"Running worker {self.name} listening to queues {self.queues}")
|
||||||
init = False
|
init = False
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
# AIA: Primarily AI, Modified content, Human-initiated, Reviewed, Claude (Anthropic AI) via Cursor
|
|
||||||
# AIA PAI Mc Hin R Claude Cursor - https://aiattribution.github.io/interpret-attribution
|
|
||||||
|
|
||||||
# Python
|
# Python
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
@@ -29,151 +26,7 @@ class CallbackQueueDispatcher(object):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.queue = getattr(settings, 'CALLBACK_QUEUE', '')
|
self.queue = getattr(settings, 'CALLBACK_QUEUE', '')
|
||||||
self.logger = logging.getLogger('awx.main.queue.CallbackQueueDispatcher')
|
self.logger = logging.getLogger('awx.main.queue.CallbackQueueDispatcher')
|
||||||
self._broker_url = settings.BROKER_URL
|
self.connection = redis.Redis.from_url(settings.BROKER_URL)
|
||||||
self.connection = redis.Redis.from_url(self._broker_url)
|
|
||||||
self._connection_failures = 0
|
|
||||||
self._max_reconnect_attempts = 3
|
|
||||||
self._total_reconnections = 0
|
|
||||||
self._events_lost = 0
|
|
||||||
|
|
||||||
def _reconnect(self):
|
|
||||||
"""
|
|
||||||
Attempt to reconnect to Redis after connection failure.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if reconnection successful, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
attempt = self._connection_failures + 1
|
|
||||||
self.logger.warning(
|
|
||||||
f"Redis reconnection attempt {attempt}/{self._max_reconnect_attempts} " f"(total reconnections this session: {self._total_reconnections})"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create new connection
|
|
||||||
self.connection = redis.Redis.from_url(self._broker_url)
|
|
||||||
|
|
||||||
# Verify connection works
|
|
||||||
self.connection.ping()
|
|
||||||
|
|
||||||
# Success
|
|
||||||
self._connection_failures = 0
|
|
||||||
self._total_reconnections += 1
|
|
||||||
self.logger.info(f"Successfully reconnected to Redis (session reconnections: {self._total_reconnections})")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self._connection_failures += 1
|
|
||||||
self.logger.error(f"Redis reconnection failed (attempt {self._connection_failures}): {type(e).__name__}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def dispatch(self, obj):
|
def dispatch(self, obj):
|
||||||
"""
|
self.connection.rpush(self.queue, json.dumps(obj, cls=AnsibleJSONEncoder))
|
||||||
Dispatch event to Redis queue with automatic reconnection on failure.
|
|
||||||
|
|
||||||
Handles BrokenPipeError and ConnectionError by attempting reconnection.
|
|
||||||
If all reconnection attempts fail, logs the event loss but allows job to continue.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
obj: Event data to dispatch (dict or serializable object)
|
|
||||||
"""
|
|
||||||
max_attempts = self._max_reconnect_attempts + 1
|
|
||||||
last_error = None
|
|
||||||
|
|
||||||
# Extract diagnostic info from event
|
|
||||||
event_type = 'unknown'
|
|
||||||
job_id = 'unknown'
|
|
||||||
if isinstance(obj, dict):
|
|
||||||
event_type = obj.get('event', obj.get('event_name', 'unknown'))
|
|
||||||
job_id = obj.get('job_id', obj.get('unified_job_id', 'unknown'))
|
|
||||||
|
|
||||||
for attempt in range(max_attempts):
|
|
||||||
try:
|
|
||||||
# Attempt to push event to Redis
|
|
||||||
self.connection.rpush(self.queue, json.dumps(obj, cls=AnsibleJSONEncoder))
|
|
||||||
|
|
||||||
# Success - reset failure counter if this was a recovery
|
|
||||||
if self._connection_failures > 0:
|
|
||||||
self.logger.info(f"Redis connection recovered after reconnection. " f"job_id={job_id}, event_type={event_type}")
|
|
||||||
self._connection_failures = 0
|
|
||||||
|
|
||||||
return # Successfully dispatched
|
|
||||||
|
|
||||||
except (BrokenPipeError, redis.exceptions.ConnectionError) as e:
|
|
||||||
last_error = e
|
|
||||||
error_type = type(e).__name__
|
|
||||||
|
|
||||||
self.logger.warning(f"Redis connection error during event dispatch " f"(attempt {attempt + 1}/{max_attempts}): {error_type}: {e}")
|
|
||||||
|
|
||||||
# Enhanced diagnostics
|
|
||||||
self.logger.warning(
|
|
||||||
f"Failed event details: job_id={job_id}, event_type={event_type}, " f"queue={self.queue}, attempt={attempt + 1}/{max_attempts}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if attempt < max_attempts - 1:
|
|
||||||
# Try to reconnect before next attempt
|
|
||||||
reconnected = self._reconnect()
|
|
||||||
if reconnected:
|
|
||||||
self.logger.info("Retrying event dispatch after successful reconnection")
|
|
||||||
else:
|
|
||||||
self.logger.warning(f"Reconnection failed, will retry dispatch anyway " f"(attempt {attempt + 2} coming)")
|
|
||||||
# Continue to next attempt
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
# All attempts exhausted
|
|
||||||
self._events_lost += 1
|
|
||||||
self.logger.error(
|
|
||||||
f"CRITICAL: Failed to dispatch event after {max_attempts} attempts. "
|
|
||||||
f"Event will be lost. Total events lost this session: {self._events_lost}"
|
|
||||||
)
|
|
||||||
self.logger.error(
|
|
||||||
f"DIAGNOSTIC INFO: "
|
|
||||||
f"job_id={job_id}, "
|
|
||||||
f"event_type={event_type}, "
|
|
||||||
f"queue={self.queue}, "
|
|
||||||
f"broker_url={self._broker_url}, "
|
|
||||||
f"last_error={error_type}: {last_error}, "
|
|
||||||
f"session_reconnections={self._total_reconnections}, "
|
|
||||||
f"session_events_lost={self._events_lost}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# IMPORTANT: Don't raise exception
|
|
||||||
# Allow job to continue even though this event was lost
|
|
||||||
# This prevents losing 17+ minutes of work due to event logging failure
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
# Catch any other unexpected Redis errors
|
|
||||||
self.logger.error(f"Unexpected error dispatching event to Redis: {type(e).__name__}: {e}")
|
|
||||||
self.logger.error(f"Event context: job_id={job_id}, event_type={event_type}")
|
|
||||||
# Don't raise - allow job to continue
|
|
||||||
break
|
|
||||||
|
|
||||||
def health_check(self):
|
|
||||||
"""
|
|
||||||
Check Redis connection health.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if connection is healthy, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.connection.ping()
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.warning(f"Redis health check failed: {type(e).__name__}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_connection_stats(self):
|
|
||||||
"""
|
|
||||||
Get Redis connection statistics for monitoring.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Connection statistics
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
'broker_url': self._broker_url,
|
|
||||||
'queue': self.queue,
|
|
||||||
'connected': self.health_check(),
|
|
||||||
'connection_failures': self._connection_failures,
|
|
||||||
'total_reconnections': self._total_reconnections,
|
|
||||||
'events_lost': self._events_lost,
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -77,14 +77,7 @@ def build_indirect_host_data(job: Job, job_event_queries: dict[str, dict[str, st
|
|||||||
if jq_str_for_event not in compiled_jq_expressions:
|
if jq_str_for_event not in compiled_jq_expressions:
|
||||||
compiled_jq_expressions[resolved_action] = jq.compile(jq_str_for_event)
|
compiled_jq_expressions[resolved_action] = jq.compile(jq_str_for_event)
|
||||||
compiled_jq = compiled_jq_expressions[resolved_action]
|
compiled_jq = compiled_jq_expressions[resolved_action]
|
||||||
|
for data in compiled_jq.input(event.event_data['res']).all():
|
||||||
try:
|
|
||||||
data_source = compiled_jq.input(event.event_data['res']).all()
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f'error for module {resolved_action} and data {event.event_data["res"]}: {e}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
for data in data_source:
|
|
||||||
# From this jq result (specific to a single Ansible module), get index information about this host record
|
# From this jq result (specific to a single Ansible module), get index information about this host record
|
||||||
if not data.get('canonical_facts'):
|
if not data.get('canonical_facts'):
|
||||||
if not facts_missing_logged:
|
if not facts_missing_logged:
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
import time
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from awx.main.dispatch import get_task_queuename
|
|
||||||
from awx.main.dispatch.publish import task
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@task(queue=get_task_queuename)
|
|
||||||
def sleep_task(seconds=10, log=False):
|
|
||||||
if log:
|
|
||||||
logger.info('starting sleep_task')
|
|
||||||
time.sleep(seconds)
|
|
||||||
if log:
|
|
||||||
logger.info('finished sleep_task')
|
|
||||||
@@ -87,8 +87,8 @@ def mock_analytic_post():
|
|||||||
{
|
{
|
||||||
'REDHAT_USERNAME': 'redhat_user',
|
'REDHAT_USERNAME': 'redhat_user',
|
||||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
'SUBSCRIPTIONS_USERNAME': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
'SUBSCRIPTIONS_PASSWORD': '',
|
||||||
},
|
},
|
||||||
True,
|
True,
|
||||||
('redhat_user', 'redhat_pass'),
|
('redhat_user', 'redhat_pass'),
|
||||||
@@ -98,8 +98,8 @@ def mock_analytic_post():
|
|||||||
{
|
{
|
||||||
'REDHAT_USERNAME': None,
|
'REDHAT_USERNAME': None,
|
||||||
'REDHAT_PASSWORD': None,
|
'REDHAT_PASSWORD': None,
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||||
},
|
},
|
||||||
True,
|
True,
|
||||||
('subs_user', 'subs_pass'),
|
('subs_user', 'subs_pass'),
|
||||||
@@ -109,8 +109,8 @@ def mock_analytic_post():
|
|||||||
{
|
{
|
||||||
'REDHAT_USERNAME': '',
|
'REDHAT_USERNAME': '',
|
||||||
'REDHAT_PASSWORD': '',
|
'REDHAT_PASSWORD': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||||
},
|
},
|
||||||
True,
|
True,
|
||||||
('subs_user', 'subs_pass'),
|
('subs_user', 'subs_pass'),
|
||||||
@@ -120,8 +120,8 @@ def mock_analytic_post():
|
|||||||
{
|
{
|
||||||
'REDHAT_USERNAME': '',
|
'REDHAT_USERNAME': '',
|
||||||
'REDHAT_PASSWORD': '',
|
'REDHAT_PASSWORD': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
'SUBSCRIPTIONS_USERNAME': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
'SUBSCRIPTIONS_PASSWORD': '',
|
||||||
},
|
},
|
||||||
False,
|
False,
|
||||||
None, # No request should be made
|
None, # No request should be made
|
||||||
@@ -131,8 +131,8 @@ def mock_analytic_post():
|
|||||||
{
|
{
|
||||||
'REDHAT_USERNAME': '',
|
'REDHAT_USERNAME': '',
|
||||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
'SUBSCRIPTIONS_PASSWORD': '',
|
||||||
},
|
},
|
||||||
False,
|
False,
|
||||||
None, # Invalid, no request should be made
|
None, # Invalid, no request should be made
|
||||||
|
|||||||
@@ -97,8 +97,8 @@ class TestAnalyticsGenericView:
|
|||||||
'INSIGHTS_TRACKING_STATE': True,
|
'INSIGHTS_TRACKING_STATE': True,
|
||||||
'REDHAT_USERNAME': 'redhat_user',
|
'REDHAT_USERNAME': 'redhat_user',
|
||||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
'SUBSCRIPTIONS_USERNAME': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
'SUBSCRIPTIONS_PASSWORD': '',
|
||||||
},
|
},
|
||||||
('redhat_user', 'redhat_pass'),
|
('redhat_user', 'redhat_pass'),
|
||||||
None,
|
None,
|
||||||
@@ -109,8 +109,8 @@ class TestAnalyticsGenericView:
|
|||||||
'INSIGHTS_TRACKING_STATE': True,
|
'INSIGHTS_TRACKING_STATE': True,
|
||||||
'REDHAT_USERNAME': '',
|
'REDHAT_USERNAME': '',
|
||||||
'REDHAT_PASSWORD': '',
|
'REDHAT_PASSWORD': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||||
},
|
},
|
||||||
('subs_user', 'subs_pass'),
|
('subs_user', 'subs_pass'),
|
||||||
None,
|
None,
|
||||||
@@ -121,8 +121,8 @@ class TestAnalyticsGenericView:
|
|||||||
'INSIGHTS_TRACKING_STATE': True,
|
'INSIGHTS_TRACKING_STATE': True,
|
||||||
'REDHAT_USERNAME': '',
|
'REDHAT_USERNAME': '',
|
||||||
'REDHAT_PASSWORD': '',
|
'REDHAT_PASSWORD': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
'SUBSCRIPTIONS_USERNAME': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
'SUBSCRIPTIONS_PASSWORD': '',
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
ERROR_MISSING_USER,
|
ERROR_MISSING_USER,
|
||||||
@@ -133,8 +133,8 @@ class TestAnalyticsGenericView:
|
|||||||
'INSIGHTS_TRACKING_STATE': True,
|
'INSIGHTS_TRACKING_STATE': True,
|
||||||
'REDHAT_USERNAME': 'redhat_user',
|
'REDHAT_USERNAME': 'redhat_user',
|
||||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||||
},
|
},
|
||||||
('redhat_user', 'redhat_pass'),
|
('redhat_user', 'redhat_pass'),
|
||||||
None,
|
None,
|
||||||
@@ -145,8 +145,8 @@ class TestAnalyticsGenericView:
|
|||||||
'INSIGHTS_TRACKING_STATE': True,
|
'INSIGHTS_TRACKING_STATE': True,
|
||||||
'REDHAT_USERNAME': '',
|
'REDHAT_USERNAME': '',
|
||||||
'REDHAT_PASSWORD': '',
|
'REDHAT_PASSWORD': '',
|
||||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user', # NOSONAR
|
'SUBSCRIPTIONS_USERNAME': 'subs_user', # NOSONAR
|
||||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
'SUBSCRIPTIONS_PASSWORD': '',
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
ERROR_MISSING_PASSWORD,
|
ERROR_MISSING_PASSWORD,
|
||||||
|
|||||||
@@ -34,18 +34,40 @@ def test_wrapup_does_send_notifications(mocker):
|
|||||||
mock.assert_called_once_with('succeeded')
|
mock.assert_called_once_with('succeeded')
|
||||||
|
|
||||||
|
|
||||||
|
class FakeRedis:
|
||||||
|
def keys(self, *args, **kwargs):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def set(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_url(cls, *args, **kwargs):
|
||||||
|
return cls()
|
||||||
|
|
||||||
|
def pipeline(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
class TestCallbackBrokerWorker(TransactionTestCase):
|
class TestCallbackBrokerWorker(TransactionTestCase):
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def turn_off_websockets_and_redis(self, fake_redis):
|
def turn_off_websockets(self):
|
||||||
with mock.patch('awx.main.dispatch.worker.callback.emit_event_detail', lambda *a, **kw: None):
|
with mock.patch('awx.main.dispatch.worker.callback.emit_event_detail', lambda *a, **kw: None):
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
def get_worker(self):
|
||||||
|
with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff
|
||||||
|
return CallbackBrokerWorker()
|
||||||
|
|
||||||
def event_create_kwargs(self):
|
def event_create_kwargs(self):
|
||||||
inventory_update = InventoryUpdate.objects.create(source='file', inventory_source=InventorySource.objects.create(source='file'))
|
inventory_update = InventoryUpdate.objects.create(source='file', inventory_source=InventorySource.objects.create(source='file'))
|
||||||
return dict(inventory_update=inventory_update, created=inventory_update.created)
|
return dict(inventory_update=inventory_update, created=inventory_update.created)
|
||||||
|
|
||||||
def test_flush_with_valid_event(self):
|
def test_flush_with_valid_event(self):
|
||||||
worker = CallbackBrokerWorker()
|
worker = self.get_worker()
|
||||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
||||||
worker.buff = {InventoryUpdateEvent: events}
|
worker.buff = {InventoryUpdateEvent: events}
|
||||||
worker.flush()
|
worker.flush()
|
||||||
@@ -53,7 +75,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
|||||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1
|
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1
|
||||||
|
|
||||||
def test_flush_with_invalid_event(self):
|
def test_flush_with_invalid_event(self):
|
||||||
worker = CallbackBrokerWorker()
|
worker = self.get_worker()
|
||||||
kwargs = self.event_create_kwargs()
|
kwargs = self.event_create_kwargs()
|
||||||
events = [
|
events = [
|
||||||
InventoryUpdateEvent(uuid=str(uuid4()), stdout='good1', **kwargs),
|
InventoryUpdateEvent(uuid=str(uuid4()), stdout='good1', **kwargs),
|
||||||
@@ -68,7 +90,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
|||||||
assert worker.buff == {InventoryUpdateEvent: [events[1]]}
|
assert worker.buff == {InventoryUpdateEvent: [events[1]]}
|
||||||
|
|
||||||
def test_duplicate_key_not_saved_twice(self):
|
def test_duplicate_key_not_saved_twice(self):
|
||||||
worker = CallbackBrokerWorker()
|
worker = self.get_worker()
|
||||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
||||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||||
worker.flush()
|
worker.flush()
|
||||||
@@ -82,7 +104,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
|||||||
assert worker.buff.get(InventoryUpdateEvent, []) == []
|
assert worker.buff.get(InventoryUpdateEvent, []) == []
|
||||||
|
|
||||||
def test_give_up_on_bad_event(self):
|
def test_give_up_on_bad_event(self):
|
||||||
worker = CallbackBrokerWorker()
|
worker = self.get_worker()
|
||||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), counter=-2, **self.event_create_kwargs())]
|
events = [InventoryUpdateEvent(uuid=str(uuid4()), counter=-2, **self.event_create_kwargs())]
|
||||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||||
|
|
||||||
@@ -95,7 +117,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
|||||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 0 # sanity
|
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 0 # sanity
|
||||||
|
|
||||||
def test_flush_with_empty_buffer(self):
|
def test_flush_with_empty_buffer(self):
|
||||||
worker = CallbackBrokerWorker()
|
worker = self.get_worker()
|
||||||
worker.buff = {InventoryUpdateEvent: []}
|
worker.buff = {InventoryUpdateEvent: []}
|
||||||
with mock.patch.object(InventoryUpdateEvent.objects, 'bulk_create') as flush_mock:
|
with mock.patch.object(InventoryUpdateEvent.objects, 'bulk_create') as flush_mock:
|
||||||
worker.flush()
|
worker.flush()
|
||||||
@@ -105,7 +127,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
|||||||
# In postgres, text fields reject NUL character, 0x00
|
# In postgres, text fields reject NUL character, 0x00
|
||||||
# tests use sqlite3 which will not raise an error
|
# tests use sqlite3 which will not raise an error
|
||||||
# but we can still test that it is sanitized before saving
|
# but we can still test that it is sanitized before saving
|
||||||
worker = CallbackBrokerWorker()
|
worker = self.get_worker()
|
||||||
kwargs = self.event_create_kwargs()
|
kwargs = self.event_create_kwargs()
|
||||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), stdout="\x00", **kwargs)]
|
events = [InventoryUpdateEvent(uuid=str(uuid4()), stdout="\x00", **kwargs)]
|
||||||
assert "\x00" in events[0].stdout # sanity
|
assert "\x00" in events[0].stdout # sanity
|
||||||
|
|||||||
@@ -63,33 +63,6 @@ def swagger_autogen(requests=__SWAGGER_REQUESTS__):
|
|||||||
return requests
|
return requests
|
||||||
|
|
||||||
|
|
||||||
class FakeRedis:
|
|
||||||
def keys(self, *args, **kwargs):
|
|
||||||
return []
|
|
||||||
|
|
||||||
def set(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_url(cls, *args, **kwargs):
|
|
||||||
return cls()
|
|
||||||
|
|
||||||
def pipeline(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def ping(self):
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def fake_redis():
|
|
||||||
with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def user():
|
def user():
|
||||||
def u(name, is_superuser=False):
|
def u(name, is_superuser=False):
|
||||||
|
|||||||
@@ -3,10 +3,6 @@ import pytest
|
|||||||
# AWX
|
# AWX
|
||||||
from awx.main.ha import is_ha_environment
|
from awx.main.ha import is_ha_environment
|
||||||
from awx.main.models.ha import Instance
|
from awx.main.models.ha import Instance
|
||||||
from awx.main.dispatch.pool import get_auto_max_workers
|
|
||||||
|
|
||||||
# Django
|
|
||||||
from django.test.utils import override_settings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@@ -21,25 +17,3 @@ def test_db_localhost():
|
|||||||
Instance.objects.create(hostname='foo', node_type='hybrid')
|
Instance.objects.create(hostname='foo', node_type='hybrid')
|
||||||
Instance.objects.create(hostname='bar', node_type='execution')
|
Instance.objects.create(hostname='bar', node_type='execution')
|
||||||
assert is_ha_environment() is False
|
assert is_ha_environment() is False
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'settings',
|
|
||||||
[
|
|
||||||
dict(SYSTEM_TASK_ABS_MEM='16Gi', SYSTEM_TASK_ABS_CPU='24', SYSTEM_TASK_FORKS_MEM=400, SYSTEM_TASK_FORKS_CPU=4),
|
|
||||||
dict(SYSTEM_TASK_ABS_MEM='124Gi', SYSTEM_TASK_ABS_CPU='2', SYSTEM_TASK_FORKS_MEM=None, SYSTEM_TASK_FORKS_CPU=None),
|
|
||||||
],
|
|
||||||
ids=['cpu_dominated', 'memory_dominated'],
|
|
||||||
)
|
|
||||||
def test_dispatcher_max_workers_reserve(settings, fake_redis):
|
|
||||||
"""This tests that the dispatcher max_workers matches instance capacity
|
|
||||||
|
|
||||||
Assumes capacity_adjustment is 1,
|
|
||||||
plus reserve worker count
|
|
||||||
"""
|
|
||||||
with override_settings(**settings):
|
|
||||||
i = Instance.objects.create(hostname='test-1', node_type='hybrid')
|
|
||||||
i.local_health_check()
|
|
||||||
|
|
||||||
assert get_auto_max_workers() == i.capacity + 7, (i.cpu, i.memory, i.cpu_capacity, i.mem_capacity)
|
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
import json
|
|
||||||
from http import HTTPStatus
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from requests import Response
|
|
||||||
|
|
||||||
from awx.main.utils.licensing import Licenser
|
|
||||||
|
|
||||||
|
|
||||||
def test_rhsm_licensing():
|
|
||||||
def mocked_requests_get(*args, **kwargs):
|
|
||||||
assert kwargs['verify'] == True
|
|
||||||
response = Response()
|
|
||||||
subs = json.dumps({'body': []})
|
|
||||||
response.status_code = HTTPStatus.OK
|
|
||||||
response._content = bytes(subs, 'utf-8')
|
|
||||||
return response
|
|
||||||
|
|
||||||
licenser = Licenser()
|
|
||||||
with patch('awx.main.utils.analytics_proxy.OIDCClient.make_request', new=mocked_requests_get):
|
|
||||||
subs = licenser.get_rhsm_subs('localhost', 'admin', 'admin')
|
|
||||||
assert subs == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_satellite_licensing():
|
|
||||||
def mocked_requests_get(*args, **kwargs):
|
|
||||||
assert kwargs['verify'] == True
|
|
||||||
response = Response()
|
|
||||||
subs = json.dumps({'results': []})
|
|
||||||
response.status_code = HTTPStatus.OK
|
|
||||||
response._content = bytes(subs, 'utf-8')
|
|
||||||
return response
|
|
||||||
|
|
||||||
licenser = Licenser()
|
|
||||||
with patch('requests.get', new=mocked_requests_get):
|
|
||||||
subs = licenser.get_satellite_subs('localhost', 'admin', 'admin')
|
|
||||||
assert subs == []
|
|
||||||
@@ -23,7 +23,7 @@ class TokenError(requests.RequestException):
|
|||||||
try:
|
try:
|
||||||
client = OIDCClient(...)
|
client = OIDCClient(...)
|
||||||
client.make_request(...)
|
client.make_request(...)
|
||||||
except TokenError as e:
|
except TokenGenerationError as e:
|
||||||
print(f"Token generation failed due to {e.__cause__}")
|
print(f"Token generation failed due to {e.__cause__}")
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
print("API request failed)
|
print("API request failed)
|
||||||
@@ -102,15 +102,13 @@ class OIDCClient:
|
|||||||
self,
|
self,
|
||||||
client_id: str,
|
client_id: str,
|
||||||
client_secret: str,
|
client_secret: str,
|
||||||
token_url: str = DEFAULT_OIDC_TOKEN_ENDPOINT,
|
token_url: str,
|
||||||
scopes: list[str] = None,
|
scopes: list[str],
|
||||||
base_url: str = '',
|
base_url: str = '',
|
||||||
) -> None:
|
) -> None:
|
||||||
self.client_id: str = client_id
|
self.client_id: str = client_id
|
||||||
self.client_secret: str = client_secret
|
self.client_secret: str = client_secret
|
||||||
self.token_url: str = token_url
|
self.token_url: str = token_url
|
||||||
if scopes is None:
|
|
||||||
scopes = ['api.console']
|
|
||||||
self.scopes = scopes
|
self.scopes = scopes
|
||||||
self.base_url: str = base_url
|
self.base_url: str = base_url
|
||||||
self.token: Optional[Token] = None
|
self.token: Optional[Token] = None
|
||||||
|
|||||||
@@ -38,7 +38,6 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from awx_plugins.interfaces._temporary_private_licensing_api import detect_server_product_name
|
from awx_plugins.interfaces._temporary_private_licensing_api import detect_server_product_name
|
||||||
|
|
||||||
from awx.main.constants import SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS
|
from awx.main.constants import SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS
|
||||||
from awx.main.utils.analytics_proxy import OIDCClient
|
|
||||||
|
|
||||||
MAX_INSTANCES = 9999999
|
MAX_INSTANCES = 9999999
|
||||||
|
|
||||||
@@ -229,38 +228,37 @@ class Licenser(object):
|
|||||||
host = getattr(settings, 'REDHAT_CANDLEPIN_HOST', None)
|
host = getattr(settings, 'REDHAT_CANDLEPIN_HOST', None)
|
||||||
|
|
||||||
if not user:
|
if not user:
|
||||||
raise ValueError('subscriptions_client_id is required')
|
raise ValueError('subscriptions_username is required')
|
||||||
|
|
||||||
if not pw:
|
if not pw:
|
||||||
raise ValueError('subscriptions_client_secret is required')
|
raise ValueError('subscriptions_password is required')
|
||||||
|
|
||||||
if host and user and pw:
|
if host and user and pw:
|
||||||
if 'subscription.rhsm.redhat.com' in host:
|
if 'subscription.rhsm.redhat.com' in host:
|
||||||
json = self.get_rhsm_subs(settings.SUBSCRIPTIONS_RHSM_URL, user, pw)
|
json = self.get_rhsm_subs(host, user, pw)
|
||||||
else:
|
else:
|
||||||
json = self.get_satellite_subs(host, user, pw)
|
json = self.get_satellite_subs(host, user, pw)
|
||||||
return self.generate_license_options_from_entitlements(json)
|
return self.generate_license_options_from_entitlements(json)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_rhsm_subs(self, host, client_id, client_secret):
|
def get_rhsm_subs(self, host, user, pw):
|
||||||
client = OIDCClient(client_id, client_secret)
|
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||||
subs = client.make_request(
|
json = []
|
||||||
'GET',
|
try:
|
||||||
host,
|
subs = requests.get('/'.join([host, 'subscription/users/{}/owners'.format(user)]), verify=verify, auth=(user, pw))
|
||||||
verify=True,
|
except requests.exceptions.ConnectionError as error:
|
||||||
timeout=(31, 31),
|
raise error
|
||||||
)
|
except OSError as error:
|
||||||
|
raise OSError(
|
||||||
|
'Unable to open certificate bundle {}. Check that the service is running on Red Hat Enterprise Linux.'.format(verify)
|
||||||
|
) from error # noqa
|
||||||
subs.raise_for_status()
|
subs.raise_for_status()
|
||||||
subs_formatted = []
|
|
||||||
for sku in subs.json()['body']:
|
|
||||||
sku_data = {k: v for k, v in sku.items() if k != 'subscriptions'}
|
|
||||||
for sub in sku['subscriptions']:
|
|
||||||
sub_data = sku_data.copy()
|
|
||||||
sub_data['subscriptions'] = sub
|
|
||||||
subs_formatted.append(sub_data)
|
|
||||||
|
|
||||||
return subs_formatted
|
for sub in subs.json():
|
||||||
|
resp = requests.get('/'.join([host, 'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])]), verify=verify, auth=(user, pw))
|
||||||
|
resp.raise_for_status()
|
||||||
|
json.extend(resp.json())
|
||||||
|
return json
|
||||||
|
|
||||||
def get_satellite_subs(self, host, user, pw):
|
def get_satellite_subs(self, host, user, pw):
|
||||||
port = None
|
port = None
|
||||||
@@ -269,7 +267,7 @@ class Licenser(object):
|
|||||||
port = str(self.config.get("server", "port"))
|
port = str(self.config.get("server", "port"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Unable to read rhsm config to get ca_cert location. {}'.format(str(e)))
|
logger.exception('Unable to read rhsm config to get ca_cert location. {}'.format(str(e)))
|
||||||
verify = True
|
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||||
if port:
|
if port:
|
||||||
host = ':'.join([host, port])
|
host = ':'.join([host, port])
|
||||||
json = []
|
json = []
|
||||||
@@ -316,11 +314,20 @@ class Licenser(object):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def is_appropriate_sub(self, sub):
|
||||||
|
if sub['activeSubscription'] is False:
|
||||||
|
return False
|
||||||
|
# Products that contain Ansible Tower
|
||||||
|
products = sub.get('providedProducts', [])
|
||||||
|
if any(product.get('productId') == '480' for product in products):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def generate_license_options_from_entitlements(self, json):
|
def generate_license_options_from_entitlements(self, json):
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
|
|
||||||
ValidSub = collections.namedtuple(
|
ValidSub = collections.namedtuple(
|
||||||
'ValidSub', 'sku name support_level end_date trial developer_license quantity satellite subscription_id account_number usage'
|
'ValidSub', 'sku name support_level end_date trial developer_license quantity pool_id satellite subscription_id account_number usage'
|
||||||
)
|
)
|
||||||
valid_subs = []
|
valid_subs = []
|
||||||
for sub in json:
|
for sub in json:
|
||||||
@@ -328,14 +335,10 @@ class Licenser(object):
|
|||||||
if satellite:
|
if satellite:
|
||||||
is_valid = self.is_appropriate_sat_sub(sub)
|
is_valid = self.is_appropriate_sat_sub(sub)
|
||||||
else:
|
else:
|
||||||
# the list of subs from console.redhat.com are already valid based on the query params we provided
|
is_valid = self.is_appropriate_sub(sub)
|
||||||
is_valid = True
|
|
||||||
if is_valid:
|
if is_valid:
|
||||||
try:
|
try:
|
||||||
if satellite:
|
end_date = parse(sub.get('endDate'))
|
||||||
end_date = parse(sub.get('endDate'))
|
|
||||||
else:
|
|
||||||
end_date = parse(sub['subscriptions']['endDate'])
|
|
||||||
except Exception:
|
except Exception:
|
||||||
continue
|
continue
|
||||||
now = datetime.utcnow()
|
now = datetime.utcnow()
|
||||||
@@ -343,50 +346,44 @@ class Licenser(object):
|
|||||||
if end_date < now:
|
if end_date < now:
|
||||||
# If the sub has a past end date, skip it
|
# If the sub has a past end date, skip it
|
||||||
continue
|
continue
|
||||||
|
try:
|
||||||
|
quantity = int(sub['quantity'])
|
||||||
|
if quantity == -1:
|
||||||
|
# effectively, unlimited
|
||||||
|
quantity = MAX_INSTANCES
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
sku = sub['productId']
|
||||||
|
trial = sku.startswith('S') # i.e.,, SER/SVC
|
||||||
developer_license = False
|
developer_license = False
|
||||||
support_level = ''
|
support_level = ''
|
||||||
account_number = ''
|
usage = ''
|
||||||
usage = sub.get('usage', '')
|
pool_id = sub['id']
|
||||||
|
subscription_id = sub['subscriptionId']
|
||||||
|
account_number = sub['accountNumber']
|
||||||
if satellite:
|
if satellite:
|
||||||
try:
|
|
||||||
quantity = int(sub['quantity'])
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
sku = sub['productId']
|
|
||||||
subscription_id = sub['subscriptionId']
|
|
||||||
sub_name = sub['productName']
|
|
||||||
support_level = sub['support_level']
|
support_level = sub['support_level']
|
||||||
account_number = sub['accountNumber']
|
usage = sub['usage']
|
||||||
else:
|
else:
|
||||||
try:
|
for attr in sub.get('productAttributes', []):
|
||||||
if sub['capacity']['name'] == "Nodes":
|
if attr.get('name') == 'support_level':
|
||||||
quantity = int(sub['capacity']['quantity']) * int(sub['subscriptions']['quantity'])
|
support_level = attr.get('value')
|
||||||
else:
|
elif attr.get('name') == 'usage':
|
||||||
continue
|
usage = attr.get('value')
|
||||||
except Exception:
|
elif attr.get('name') == 'ph_product_name' and attr.get('value') == 'RHEL Developer':
|
||||||
continue
|
developer_license = True
|
||||||
sku = sub['sku']
|
|
||||||
sub_name = sub['name']
|
|
||||||
support_level = sub['serviceLevel']
|
|
||||||
subscription_id = sub['subscriptions']['number']
|
|
||||||
if sub.get('name') == 'RHEL Developer':
|
|
||||||
developer_license = True
|
|
||||||
|
|
||||||
if quantity == -1:
|
|
||||||
# effectively, unlimited
|
|
||||||
quantity = MAX_INSTANCES
|
|
||||||
trial = sku.startswith('S') # i.e.,, SER/SVC
|
|
||||||
|
|
||||||
valid_subs.append(
|
valid_subs.append(
|
||||||
ValidSub(
|
ValidSub(
|
||||||
sku,
|
sku,
|
||||||
sub_name,
|
sub['productName'],
|
||||||
support_level,
|
support_level,
|
||||||
end_date,
|
end_date,
|
||||||
trial,
|
trial,
|
||||||
developer_license,
|
developer_license,
|
||||||
quantity,
|
quantity,
|
||||||
|
pool_id,
|
||||||
satellite,
|
satellite,
|
||||||
subscription_id,
|
subscription_id,
|
||||||
account_number,
|
account_number,
|
||||||
@@ -417,6 +414,7 @@ class Licenser(object):
|
|||||||
license._attrs['satellite'] = satellite
|
license._attrs['satellite'] = satellite
|
||||||
license._attrs['valid_key'] = True
|
license._attrs['valid_key'] = True
|
||||||
license.update(license_date=int(sub.end_date.strftime('%s')))
|
license.update(license_date=int(sub.end_date.strftime('%s')))
|
||||||
|
license.update(pool_id=sub.pool_id)
|
||||||
license.update(subscription_id=sub.subscription_id)
|
license.update(subscription_id=sub.subscription_id)
|
||||||
license.update(account_number=sub.account_number)
|
license.update(account_number=sub.account_number)
|
||||||
licenses.append(license._attrs.copy())
|
licenses.append(license._attrs.copy())
|
||||||
|
|||||||
@@ -964,9 +964,6 @@ CLUSTER_HOST_ID = socket.gethostname()
|
|||||||
# - 'unique_managed_hosts': Compliant = automated - deleted hosts (using /api/v2/host_metrics/)
|
# - 'unique_managed_hosts': Compliant = automated - deleted hosts (using /api/v2/host_metrics/)
|
||||||
SUBSCRIPTION_USAGE_MODEL = ''
|
SUBSCRIPTION_USAGE_MODEL = ''
|
||||||
|
|
||||||
# Default URL and query params for obtaining valid AAP subscriptions
|
|
||||||
SUBSCRIPTIONS_RHSM_URL = 'https://console.redhat.com/api/rhsm/v2/products?include=providedProducts&oids=480&status=Active'
|
|
||||||
|
|
||||||
# Host metrics cleanup - last time of the task/command run
|
# Host metrics cleanup - last time of the task/command run
|
||||||
CLEANUP_HOST_METRICS_LAST_TS = None
|
CLEANUP_HOST_METRICS_LAST_TS = None
|
||||||
# Host metrics cleanup - minimal interval between two cleanups in days
|
# Host metrics cleanup - minimal interval between two cleanups in days
|
||||||
|
|||||||
@@ -31,9 +31,9 @@ options:
|
|||||||
unlicensed or trial licensed. When force=true, the license is always applied.
|
unlicensed or trial licensed. When force=true, the license is always applied.
|
||||||
type: bool
|
type: bool
|
||||||
default: 'False'
|
default: 'False'
|
||||||
subscription_id:
|
pool_id:
|
||||||
description:
|
description:
|
||||||
- Red Hat or Red Hat Satellite subscription_id to attach to
|
- Red Hat or Red Hat Satellite pool_id to attach to
|
||||||
required: False
|
required: False
|
||||||
type: str
|
type: str
|
||||||
state:
|
state:
|
||||||
@@ -57,9 +57,9 @@ EXAMPLES = '''
|
|||||||
username: "my_satellite_username"
|
username: "my_satellite_username"
|
||||||
password: "my_satellite_password"
|
password: "my_satellite_password"
|
||||||
|
|
||||||
- name: Attach to a subscription (requires fetching subscriptions at least once before)
|
- name: Attach to a pool (requires fetching subscriptions at least once before)
|
||||||
license:
|
license:
|
||||||
subscription_id: 123456
|
pool_id: 123456
|
||||||
|
|
||||||
- name: Remove license
|
- name: Remove license
|
||||||
license:
|
license:
|
||||||
@@ -75,14 +75,14 @@ def main():
|
|||||||
module = ControllerAPIModule(
|
module = ControllerAPIModule(
|
||||||
argument_spec=dict(
|
argument_spec=dict(
|
||||||
manifest=dict(type='str', required=False),
|
manifest=dict(type='str', required=False),
|
||||||
subscription_id=dict(type='str', required=False),
|
pool_id=dict(type='str', required=False),
|
||||||
force=dict(type='bool', default=False),
|
force=dict(type='bool', default=False),
|
||||||
state=dict(choices=['present', 'absent'], default='present'),
|
state=dict(choices=['present', 'absent'], default='present'),
|
||||||
),
|
),
|
||||||
required_if=[
|
required_if=[
|
||||||
['state', 'present', ['manifest', 'subscription_id'], True],
|
['state', 'present', ['manifest', 'pool_id'], True],
|
||||||
],
|
],
|
||||||
mutually_exclusive=[("manifest", "subscription_id")],
|
mutually_exclusive=[("manifest", "pool_id")],
|
||||||
)
|
)
|
||||||
|
|
||||||
json_output = {'changed': False}
|
json_output = {'changed': False}
|
||||||
@@ -124,7 +124,7 @@ def main():
|
|||||||
if module.params.get('manifest', None):
|
if module.params.get('manifest', None):
|
||||||
module.post_endpoint('config', data={'manifest': manifest.decode()})
|
module.post_endpoint('config', data={'manifest': manifest.decode()})
|
||||||
else:
|
else:
|
||||||
module.post_endpoint('config/attach', data={'subscription_id': module.params.get('subscription_id')})
|
module.post_endpoint('config/attach', data={'pool_id': module.params.get('pool_id')})
|
||||||
|
|
||||||
module.exit_json(**json_output)
|
module.exit_json(**json_output)
|
||||||
|
|
||||||
|
|||||||
@@ -20,15 +20,15 @@ description:
|
|||||||
- Get subscriptions available to Automation Platform Controller. See
|
- Get subscriptions available to Automation Platform Controller. See
|
||||||
U(https://www.ansible.com/tower) for an overview.
|
U(https://www.ansible.com/tower) for an overview.
|
||||||
options:
|
options:
|
||||||
client_id:
|
username:
|
||||||
description:
|
description:
|
||||||
- Red Hat service account client ID or Red Hat Satellite username to get available subscriptions.
|
- Red Hat or Red Hat Satellite username to get available subscriptions.
|
||||||
- The credentials you use will be stored for future use in retrieving renewal or expanded subscriptions
|
- The credentials you use will be stored for future use in retrieving renewal or expanded subscriptions
|
||||||
required: True
|
required: True
|
||||||
type: str
|
type: str
|
||||||
client_secret:
|
password:
|
||||||
description:
|
description:
|
||||||
- Red Hat service account client secret or Red Hat Satellite password to get available subscriptions.
|
- Red Hat or Red Hat Satellite password to get available subscriptions.
|
||||||
- The credentials you use will be stored for future use in retrieving renewal or expanded subscriptions
|
- The credentials you use will be stored for future use in retrieving renewal or expanded subscriptions
|
||||||
required: True
|
required: True
|
||||||
type: str
|
type: str
|
||||||
@@ -53,13 +53,13 @@ subscriptions:
|
|||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
- name: Get subscriptions
|
- name: Get subscriptions
|
||||||
subscriptions:
|
subscriptions:
|
||||||
client_id: "c6bd7594-d776-46e5-8156-6d17af147479"
|
username: "my_username"
|
||||||
client_secret: "MO9QUvoOZ5fc5JQKXoTch1AsTLI7nFsZ"
|
password: "My Password"
|
||||||
|
|
||||||
- name: Get subscriptions with a filter
|
- name: Get subscriptions with a filter
|
||||||
subscriptions:
|
subscriptions:
|
||||||
client_id: "c6bd7594-d776-46e5-8156-6d17af147479"
|
username: "my_username"
|
||||||
client_secret: "MO9QUvoOZ5fc5JQKXoTch1AsTLI7nFsZ"
|
password: "My Password"
|
||||||
filters:
|
filters:
|
||||||
product_name: "Red Hat Ansible Automation Platform"
|
product_name: "Red Hat Ansible Automation Platform"
|
||||||
support_level: "Self-Support"
|
support_level: "Self-Support"
|
||||||
@@ -72,8 +72,8 @@ def main():
|
|||||||
|
|
||||||
module = ControllerAPIModule(
|
module = ControllerAPIModule(
|
||||||
argument_spec=dict(
|
argument_spec=dict(
|
||||||
client_id=dict(type='str', required=True),
|
username=dict(type='str', required=True),
|
||||||
client_secret=dict(type='str', no_log=True, required=True),
|
password=dict(type='str', no_log=True, required=True),
|
||||||
filters=dict(type='dict', required=False, default={}),
|
filters=dict(type='dict', required=False, default={}),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -82,8 +82,8 @@ def main():
|
|||||||
|
|
||||||
# Check if Tower is already licensed
|
# Check if Tower is already licensed
|
||||||
post_data = {
|
post_data = {
|
||||||
'subscriptions_client_secret': module.params.get('client_secret'),
|
'subscriptions_password': module.params.get('password'),
|
||||||
'subscriptions_client_id': module.params.get('client_id'),
|
'subscriptions_username': module.params.get('username'),
|
||||||
}
|
}
|
||||||
all_subscriptions = module.post_endpoint('config/subscriptions', data=post_data)['json']
|
all_subscriptions = module.post_endpoint('config/subscriptions', data=post_data)['json']
|
||||||
json_output['subscriptions'] = []
|
json_output['subscriptions'] = []
|
||||||
|
|||||||
@@ -1,11 +0,0 @@
|
|||||||
Copyright 2022 Rick van Hattem
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
Binary file not shown.
BIN
licenses/psycopg-3.2.6.tar.gz
Normal file
BIN
licenses/psycopg-3.2.6.tar.gz
Normal file
Binary file not shown.
Binary file not shown.
BIN
licenses/uwsgi-2.0.29.tar.gz
Normal file
BIN
licenses/uwsgi-2.0.29.tar.gz
Normal file
Binary file not shown.
@@ -54,7 +54,7 @@ python-tss-sdk>=1.2.1
|
|||||||
pyyaml>=6.0.2 # require packing fix for cython 3 or higher
|
pyyaml>=6.0.2 # require packing fix for cython 3 or higher
|
||||||
pyzstd # otel collector log file compression library
|
pyzstd # otel collector log file compression library
|
||||||
receptorctl
|
receptorctl
|
||||||
sqlparse>=0.4.4 # Required by django https://github.com/ansible/awx/security/dependabot/96
|
sqlparse>=0.5.2
|
||||||
redis[hiredis]
|
redis[hiredis]
|
||||||
requests
|
requests
|
||||||
slack-sdk
|
slack-sdk
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
adal==1.2.7
|
adal==1.2.7
|
||||||
# via msrestazure
|
# via msrestazure
|
||||||
aiohappyeyeballs==2.4.4
|
aiohappyeyeballs==2.6.1
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
aiohttp==3.11.11
|
aiohttp==3.11.16
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# aiohttp-retry
|
# aiohttp-retry
|
||||||
# twilio
|
# twilio
|
||||||
aiohttp-retry==2.8.3
|
aiohttp-retry==2.9.1
|
||||||
# via twilio
|
# via twilio
|
||||||
aiosignal==1.3.2
|
aiosignal==1.3.2
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
@@ -25,9 +25,9 @@ asgiref==3.8.1
|
|||||||
# django
|
# django
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
# django-cors-headers
|
# django-cors-headers
|
||||||
asn1==2.7.1
|
asn1==3.0.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
attrs==24.3.0
|
attrs==25.3.0
|
||||||
# via
|
# via
|
||||||
# aiohttp
|
# aiohttp
|
||||||
# jsonschema
|
# jsonschema
|
||||||
@@ -46,14 +46,14 @@ awx-plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.g
|
|||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements_git.txt
|
# -r /awx_devel/requirements/requirements_git.txt
|
||||||
# awx-plugins-core
|
# awx-plugins-core
|
||||||
azure-core==1.32.0
|
azure-core==1.33.0
|
||||||
# via
|
# via
|
||||||
# azure-identity
|
# azure-identity
|
||||||
# azure-keyvault-certificates
|
# azure-keyvault-certificates
|
||||||
# azure-keyvault-keys
|
# azure-keyvault-keys
|
||||||
# azure-keyvault-secrets
|
# azure-keyvault-secrets
|
||||||
# msrest
|
# msrest
|
||||||
azure-identity==1.19.0
|
azure-identity==1.21.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
azure-keyvault==4.2.0
|
azure-keyvault==4.2.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
@@ -65,14 +65,14 @@ azure-keyvault-secrets==4.9.0
|
|||||||
# via azure-keyvault
|
# via azure-keyvault
|
||||||
backports-tarfile==1.2.0
|
backports-tarfile==1.2.0
|
||||||
# via jaraco-context
|
# via jaraco-context
|
||||||
boto3==1.35.96
|
boto3==1.37.34
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
botocore==1.35.96
|
botocore==1.37.34
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# boto3
|
# boto3
|
||||||
# s3transfer
|
# s3transfer
|
||||||
cachetools==5.5.0
|
cachetools==5.5.2
|
||||||
# via google-auth
|
# via google-auth
|
||||||
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
|
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
|
||||||
# via
|
# via
|
||||||
@@ -84,7 +84,7 @@ cffi==1.17.1
|
|||||||
# via
|
# via
|
||||||
# cryptography
|
# cryptography
|
||||||
# pynacl
|
# pynacl
|
||||||
channels==4.2.0
|
channels==4.2.2
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# channels-redis
|
# channels-redis
|
||||||
@@ -109,11 +109,11 @@ cryptography==41.0.7
|
|||||||
# pyjwt
|
# pyjwt
|
||||||
# pyopenssl
|
# pyopenssl
|
||||||
# service-identity
|
# service-identity
|
||||||
cython==3.0.11
|
cython==3.0.12
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
daphne==4.1.2
|
daphne==4.1.2
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
deprecated==1.2.15
|
deprecated==1.2.18
|
||||||
# via
|
# via
|
||||||
# opentelemetry-api
|
# opentelemetry-api
|
||||||
# opentelemetry-exporter-otlp-proto-grpc
|
# opentelemetry-exporter-otlp-proto-grpc
|
||||||
@@ -138,19 +138,19 @@ django==4.2.20
|
|||||||
# djangorestframework
|
# djangorestframework
|
||||||
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
|
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
|
||||||
# via -r /awx_devel/requirements/requirements_git.txt
|
# via -r /awx_devel/requirements/requirements_git.txt
|
||||||
django-cors-headers==4.6.0
|
django-cors-headers==4.7.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
django-crum==0.7.9
|
django-crum==0.7.9
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
django-extensions==3.2.3
|
django-extensions==4.1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
django-flags==5.0.13
|
django-flags==5.0.13
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
django-guid==3.5.0
|
django-guid==3.5.1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
django-oauth-toolkit==1.7.1
|
django-oauth-toolkit==1.7.1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
@@ -158,7 +158,7 @@ django-polymorphic==3.1.0
|
|||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
django-solo==2.4.0
|
django-solo==2.4.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
djangorestframework==3.15.2
|
djangorestframework==3.16.0
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
@@ -172,7 +172,7 @@ dynaconf==3.2.10
|
|||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
enum-compat==0.0.3
|
enum-compat==0.0.3
|
||||||
# via asn1
|
# via asn1
|
||||||
filelock==3.16.1
|
filelock==3.18.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
frozenlist==1.5.0
|
frozenlist==1.5.0
|
||||||
# via
|
# via
|
||||||
@@ -182,13 +182,13 @@ gitdb==4.0.12
|
|||||||
# via gitpython
|
# via gitpython
|
||||||
gitpython==3.1.44
|
gitpython==3.1.44
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
google-auth==2.37.0
|
google-auth==2.39.0
|
||||||
# via kubernetes
|
# via kubernetes
|
||||||
googleapis-common-protos==1.66.0
|
googleapis-common-protos==1.70.0
|
||||||
# via
|
# via
|
||||||
# opentelemetry-exporter-otlp-proto-grpc
|
# opentelemetry-exporter-otlp-proto-grpc
|
||||||
# opentelemetry-exporter-otlp-proto-http
|
# opentelemetry-exporter-otlp-proto-http
|
||||||
grpcio==1.69.0
|
grpcio==1.71.0
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# opentelemetry-exporter-otlp-proto-grpc
|
# opentelemetry-exporter-otlp-proto-grpc
|
||||||
@@ -204,7 +204,7 @@ idna==3.10
|
|||||||
# requests
|
# requests
|
||||||
# twisted
|
# twisted
|
||||||
# yarl
|
# yarl
|
||||||
importlib-metadata==8.5.0
|
importlib-metadata==8.6.1
|
||||||
# via opentelemetry-api
|
# via opentelemetry-api
|
||||||
importlib-resources==6.5.2
|
importlib-resources==6.5.2
|
||||||
# via irc
|
# via irc
|
||||||
@@ -237,7 +237,7 @@ jaraco-text==4.0.0
|
|||||||
# via
|
# via
|
||||||
# irc
|
# irc
|
||||||
# jaraco-collections
|
# jaraco-collections
|
||||||
jinja2==3.1.5
|
jinja2==3.1.6
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
jmespath==1.0.1
|
jmespath==1.0.1
|
||||||
# via
|
# via
|
||||||
@@ -245,7 +245,7 @@ jmespath==1.0.1
|
|||||||
# botocore
|
# botocore
|
||||||
jq==1.8.0
|
jq==1.8.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
json-log-formatter==1.1
|
json-log-formatter==1.1.1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
jsonschema==4.23.0
|
jsonschema==4.23.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
@@ -253,27 +253,27 @@ jsonschema-specifications==2024.10.1
|
|||||||
# via jsonschema
|
# via jsonschema
|
||||||
jwcrypto==1.5.6
|
jwcrypto==1.5.6
|
||||||
# via django-oauth-toolkit
|
# via django-oauth-toolkit
|
||||||
kubernetes==31.0.0
|
kubernetes==32.0.1
|
||||||
# via openshift
|
# via openshift
|
||||||
lockfile==0.12.2
|
lockfile==0.12.2
|
||||||
# via python-daemon
|
# via python-daemon
|
||||||
markdown==3.7
|
markdown==3.8
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
markupsafe==3.0.2
|
markupsafe==3.0.2
|
||||||
# via jinja2
|
# via jinja2
|
||||||
maturin==1.8.1
|
maturin==1.8.3
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
more-itertools==10.5.0
|
more-itertools==10.6.0
|
||||||
# via
|
# via
|
||||||
# irc
|
# irc
|
||||||
# jaraco-functools
|
# jaraco-functools
|
||||||
# jaraco-stream
|
# jaraco-stream
|
||||||
# jaraco-text
|
# jaraco-text
|
||||||
msal==1.31.1
|
msal==1.32.0
|
||||||
# via
|
# via
|
||||||
# azure-identity
|
# azure-identity
|
||||||
# msal-extensions
|
# msal-extensions
|
||||||
msal-extensions==1.2.0
|
msal-extensions==1.3.1
|
||||||
# via azure-identity
|
# via azure-identity
|
||||||
msgpack==1.1.0
|
msgpack==1.1.0
|
||||||
# via
|
# via
|
||||||
@@ -283,7 +283,7 @@ msrest==0.7.1
|
|||||||
# via msrestazure
|
# via msrestazure
|
||||||
msrestazure==0.6.4.post1
|
msrestazure==0.6.4.post1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
multidict==6.1.0
|
multidict==6.4.3
|
||||||
# via
|
# via
|
||||||
# aiohttp
|
# aiohttp
|
||||||
# yarl
|
# yarl
|
||||||
@@ -294,7 +294,7 @@ oauthlib==3.2.2
|
|||||||
# requests-oauthlib
|
# requests-oauthlib
|
||||||
openshift==0.13.2
|
openshift==0.13.2
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
opentelemetry-api==1.29.0
|
opentelemetry-api==1.32.0
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# opentelemetry-exporter-otlp-proto-grpc
|
# opentelemetry-exporter-otlp-proto-grpc
|
||||||
@@ -303,31 +303,31 @@ opentelemetry-api==1.29.0
|
|||||||
# opentelemetry-instrumentation-logging
|
# opentelemetry-instrumentation-logging
|
||||||
# opentelemetry-sdk
|
# opentelemetry-sdk
|
||||||
# opentelemetry-semantic-conventions
|
# opentelemetry-semantic-conventions
|
||||||
opentelemetry-exporter-otlp==1.29.0
|
opentelemetry-exporter-otlp==1.32.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
opentelemetry-exporter-otlp-proto-common==1.29.0
|
opentelemetry-exporter-otlp-proto-common==1.32.0
|
||||||
# via
|
# via
|
||||||
# opentelemetry-exporter-otlp-proto-grpc
|
# opentelemetry-exporter-otlp-proto-grpc
|
||||||
# opentelemetry-exporter-otlp-proto-http
|
# opentelemetry-exporter-otlp-proto-http
|
||||||
opentelemetry-exporter-otlp-proto-grpc==1.29.0
|
opentelemetry-exporter-otlp-proto-grpc==1.32.0
|
||||||
# via opentelemetry-exporter-otlp
|
# via opentelemetry-exporter-otlp
|
||||||
opentelemetry-exporter-otlp-proto-http==1.29.0
|
opentelemetry-exporter-otlp-proto-http==1.32.0
|
||||||
# via opentelemetry-exporter-otlp
|
# via opentelemetry-exporter-otlp
|
||||||
opentelemetry-instrumentation==0.50b0
|
opentelemetry-instrumentation==0.53b0
|
||||||
# via opentelemetry-instrumentation-logging
|
# via opentelemetry-instrumentation-logging
|
||||||
opentelemetry-instrumentation-logging==0.50b0
|
opentelemetry-instrumentation-logging==0.53b0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
opentelemetry-proto==1.29.0
|
opentelemetry-proto==1.32.0
|
||||||
# via
|
# via
|
||||||
# opentelemetry-exporter-otlp-proto-common
|
# opentelemetry-exporter-otlp-proto-common
|
||||||
# opentelemetry-exporter-otlp-proto-grpc
|
# opentelemetry-exporter-otlp-proto-grpc
|
||||||
# opentelemetry-exporter-otlp-proto-http
|
# opentelemetry-exporter-otlp-proto-http
|
||||||
opentelemetry-sdk==1.29.0
|
opentelemetry-sdk==1.32.0
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# opentelemetry-exporter-otlp-proto-grpc
|
# opentelemetry-exporter-otlp-proto-grpc
|
||||||
# opentelemetry-exporter-otlp-proto-http
|
# opentelemetry-exporter-otlp-proto-http
|
||||||
opentelemetry-semantic-conventions==0.50b0
|
opentelemetry-semantic-conventions==0.53b0
|
||||||
# via
|
# via
|
||||||
# opentelemetry-instrumentation
|
# opentelemetry-instrumentation
|
||||||
# opentelemetry-sdk
|
# opentelemetry-sdk
|
||||||
@@ -342,21 +342,19 @@ pexpect==4.7.0
|
|||||||
# ansible-runner
|
# ansible-runner
|
||||||
pkgconfig==1.5.5
|
pkgconfig==1.5.5
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
portalocker==2.10.1
|
|
||||||
# via msal-extensions
|
|
||||||
prometheus-client==0.21.1
|
prometheus-client==0.21.1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
propcache==0.2.1
|
propcache==0.3.1
|
||||||
# via
|
# via
|
||||||
# aiohttp
|
# aiohttp
|
||||||
# yarl
|
# yarl
|
||||||
protobuf==5.29.3
|
protobuf==5.29.4
|
||||||
# via
|
# via
|
||||||
# googleapis-common-protos
|
# googleapis-common-protos
|
||||||
# opentelemetry-proto
|
# opentelemetry-proto
|
||||||
psutil==6.1.1
|
psutil==7.0.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
psycopg==3.2.3
|
psycopg==3.2.6
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
ptyprocess==0.7.0
|
ptyprocess==0.7.0
|
||||||
# via pexpect
|
# via pexpect
|
||||||
@@ -365,7 +363,7 @@ pyasn1==0.6.1
|
|||||||
# pyasn1-modules
|
# pyasn1-modules
|
||||||
# rsa
|
# rsa
|
||||||
# service-identity
|
# service-identity
|
||||||
pyasn1-modules==0.4.1
|
pyasn1-modules==0.4.2
|
||||||
# via
|
# via
|
||||||
# google-auth
|
# google-auth
|
||||||
# service-identity
|
# service-identity
|
||||||
@@ -384,7 +382,7 @@ pyjwt[crypto]==2.10.1
|
|||||||
# twilio
|
# twilio
|
||||||
pynacl==1.5.0
|
pynacl==1.5.0
|
||||||
# via pygithub
|
# via pygithub
|
||||||
pyopenssl==24.3.0
|
pyopenssl==25.0.0
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# twisted
|
# twisted
|
||||||
@@ -407,7 +405,7 @@ python-string-utils==1.0.0
|
|||||||
# via openshift
|
# via openshift
|
||||||
python-tss-sdk==1.2.3
|
python-tss-sdk==1.2.3
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
pytz==2024.2
|
pytz==2025.2
|
||||||
# via irc
|
# via irc
|
||||||
pyyaml==6.0.2
|
pyyaml==6.0.2
|
||||||
# via
|
# via
|
||||||
@@ -418,13 +416,13 @@ pyyaml==6.0.2
|
|||||||
# receptorctl
|
# receptorctl
|
||||||
pyzstd==0.16.2
|
pyzstd==0.16.2
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
receptorctl==1.5.2
|
receptorctl==1.5.4
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
redis[hiredis]==5.2.1
|
redis[hiredis]==5.2.1
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
# channels-redis
|
# channels-redis
|
||||||
referencing==0.35.1
|
referencing==0.36.2
|
||||||
# via
|
# via
|
||||||
# jsonschema
|
# jsonschema
|
||||||
# jsonschema-specifications
|
# jsonschema-specifications
|
||||||
@@ -448,21 +446,21 @@ requests-oauthlib==2.0.0
|
|||||||
# via
|
# via
|
||||||
# kubernetes
|
# kubernetes
|
||||||
# msrest
|
# msrest
|
||||||
rpds-py==0.22.3
|
rpds-py==0.24.0
|
||||||
# via
|
# via
|
||||||
# jsonschema
|
# jsonschema
|
||||||
# referencing
|
# referencing
|
||||||
rsa==4.9
|
rsa==4.9
|
||||||
# via google-auth
|
# via google-auth
|
||||||
s3transfer==0.10.4
|
s3transfer==0.11.4
|
||||||
# via boto3
|
# via boto3
|
||||||
semantic-version==2.10.0
|
semantic-version==2.10.0
|
||||||
# via setuptools-rust
|
# via setuptools-rust
|
||||||
service-identity==24.2.0
|
service-identity==24.2.0
|
||||||
# via twisted
|
# via twisted
|
||||||
setuptools-rust==1.10.2
|
setuptools-rust==1.11.1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
setuptools-scm[toml]==8.1.0
|
setuptools-scm[toml]==8.2.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
six==1.17.0
|
six==1.17.0
|
||||||
# via
|
# via
|
||||||
@@ -472,7 +470,7 @@ six==1.17.0
|
|||||||
# openshift
|
# openshift
|
||||||
# pygerduty
|
# pygerduty
|
||||||
# python-dateutil
|
# python-dateutil
|
||||||
slack-sdk==3.34.0
|
slack-sdk==3.35.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
smmap==5.0.2
|
smmap==5.0.2
|
||||||
# via gitdb
|
# via gitdb
|
||||||
@@ -485,7 +483,7 @@ tempora==5.8.0
|
|||||||
# via
|
# via
|
||||||
# irc
|
# irc
|
||||||
# jaraco-logging
|
# jaraco-logging
|
||||||
twilio==9.4.2
|
twilio==9.5.2
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
twisted[tls]==24.11.0
|
twisted[tls]==24.11.0
|
||||||
# via
|
# via
|
||||||
@@ -493,7 +491,7 @@ twisted[tls]==24.11.0
|
|||||||
# daphne
|
# daphne
|
||||||
txaio==23.1.1
|
txaio==23.1.1
|
||||||
# via autobahn
|
# via autobahn
|
||||||
typing-extensions==4.12.2
|
typing-extensions==4.13.2
|
||||||
# via
|
# via
|
||||||
# azure-core
|
# azure-core
|
||||||
# azure-identity
|
# azure-identity
|
||||||
@@ -504,15 +502,17 @@ typing-extensions==4.12.2
|
|||||||
# opentelemetry-sdk
|
# opentelemetry-sdk
|
||||||
# psycopg
|
# psycopg
|
||||||
# pygithub
|
# pygithub
|
||||||
|
# pyopenssl
|
||||||
|
# referencing
|
||||||
# twisted
|
# twisted
|
||||||
urllib3==2.3.0
|
urllib3==2.4.0
|
||||||
# via
|
# via
|
||||||
# botocore
|
# botocore
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
# kubernetes
|
# kubernetes
|
||||||
# pygithub
|
# pygithub
|
||||||
# requests
|
# requests
|
||||||
uwsgi==2.0.28
|
uwsgi==2.0.29
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
uwsgitop==0.12
|
uwsgitop==0.12
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
@@ -520,11 +520,11 @@ websocket-client==1.8.0
|
|||||||
# via kubernetes
|
# via kubernetes
|
||||||
wheel==0.45.1
|
wheel==0.45.1
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
wrapt==1.17.0
|
wrapt==1.17.2
|
||||||
# via
|
# via
|
||||||
# deprecated
|
# deprecated
|
||||||
# opentelemetry-instrumentation
|
# opentelemetry-instrumentation
|
||||||
yarl==1.18.3
|
yarl==1.19.0
|
||||||
# via aiohttp
|
# via aiohttp
|
||||||
zipp==3.21.0
|
zipp==3.21.0
|
||||||
# via importlib-metadata
|
# via importlib-metadata
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
from django import setup
|
|
||||||
|
|
||||||
from awx import prepare_env
|
|
||||||
|
|
||||||
prepare_env()
|
|
||||||
|
|
||||||
setup()
|
|
||||||
|
|
||||||
# Keeping this in test folder allows it to be importable
|
|
||||||
from awx.main.tests.data.sleep_task import sleep_task
|
|
||||||
|
|
||||||
|
|
||||||
for i in range(634):
|
|
||||||
sleep_task.delay()
|
|
||||||
Reference in New Issue
Block a user