mirror of
https://github.com/ansible/awx.git
synced 2026-02-09 13:44:42 -03:30
Compare commits
6 Commits
upgrade-sq
...
fix/redis-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8033b7cbe9 | ||
|
|
95289ff28c | ||
|
|
000f6b0708 | ||
|
|
c799d51ec8 | ||
|
|
db6e8b9bad | ||
|
|
483417762f |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -150,6 +150,8 @@ use_dev_supervisor.txt
|
||||
|
||||
awx/ui/src
|
||||
awx/ui/build
|
||||
awx/ui/.ui-built
|
||||
awx/ui_next
|
||||
|
||||
# Docs build stuff
|
||||
docs/docsite/build/
|
||||
|
||||
@@ -10,7 +10,7 @@ from awx.api.generics import APIView, Response
|
||||
from awx.api.permissions import AnalyticsPermission
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import get_awx_version
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||
from awx.main.utils.analytics_proxy import OIDCClient
|
||||
from rest_framework import status
|
||||
|
||||
from collections import OrderedDict
|
||||
@@ -205,7 +205,7 @@ class AnalyticsGenericView(APIView):
|
||||
try:
|
||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||
client = OIDCClient(rh_user, rh_password)
|
||||
response = client.make_request(
|
||||
method,
|
||||
url,
|
||||
@@ -219,8 +219,8 @@ class AnalyticsGenericView(APIView):
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||
except MissingSettings:
|
||||
rh_user = self._get_setting('SUBSCRIPTIONS_USERNAME', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('SUBSCRIPTIONS_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||
rh_user = self._get_setting('SUBSCRIPTIONS_CLIENT_ID', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('SUBSCRIPTIONS_CLIENT_SECRET', None, ERROR_MISSING_PASSWORD)
|
||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||
#
|
||||
# Missing or wrong user/pass
|
||||
|
||||
@@ -32,6 +32,7 @@ from awx.api.versioning import URLPathVersioning, reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
from awx.main.utils.analytics_proxy import TokenError
|
||||
from awx.main.utils.licensing import get_licenser
|
||||
|
||||
logger = logging.getLogger('awx.api.views.root')
|
||||
@@ -176,19 +177,21 @@ class ApiV2SubscriptionView(APIView):
|
||||
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
if data.get('subscriptions_password') == '$encrypted$':
|
||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||
if data.get('subscriptions_client_secret') == '$encrypted$':
|
||||
data['subscriptions_client_secret'] = settings.SUBSCRIPTIONS_CLIENT_SECRET
|
||||
try:
|
||||
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||
user, pw = data.get('subscriptions_client_id'), data.get('subscriptions_client_secret')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
if user:
|
||||
settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username']
|
||||
settings.SUBSCRIPTIONS_CLIENT_ID = data['subscriptions_client_id']
|
||||
if pw:
|
||||
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||
settings.SUBSCRIPTIONS_CLIENT_SECRET = data['subscriptions_client_secret']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
if isinstance(exc, TokenError) or (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
@@ -215,12 +218,12 @@ class ApiV2AttachView(APIView):
|
||||
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
pool_id = data.get('pool_id', None)
|
||||
if not pool_id:
|
||||
return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
subscription_id = data.get('subscription_id', None)
|
||||
if not subscription_id:
|
||||
return Response({"error": _("No subscription ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
||||
if subscription_id and user and pw:
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
@@ -239,7 +242,7 @@ class ApiV2AttachView(APIView):
|
||||
logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
for sub in validated:
|
||||
if sub['pool_id'] == pool_id:
|
||||
if sub['subscription_id'] == subscription_id:
|
||||
sub['valid_key'] = True
|
||||
settings.LICENSE = sub
|
||||
return Response(sub)
|
||||
|
||||
@@ -207,7 +207,8 @@ class URLField(CharField):
|
||||
if self.allow_plain_hostname:
|
||||
try:
|
||||
url_parts = urlparse.urlsplit(value)
|
||||
if url_parts.hostname and '.' not in url_parts.hostname:
|
||||
looks_like_ipv6 = bool(url_parts.netloc and url_parts.netloc.startswith('[') and url_parts.netloc.endswith(']'))
|
||||
if not looks_like_ipv6 and url_parts.hostname and '.' not in url_parts.hostname:
|
||||
netloc = '{}.local'.format(url_parts.hostname)
|
||||
if url_parts.port:
|
||||
netloc = '{}:{}'.format(netloc, url_parts.port)
|
||||
|
||||
@@ -27,5 +27,5 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
||||
|
||||
|
||||
def prefill_rh_credentials(apps, schema_editor):
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True)
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_CLIENT_ID', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_CLIENT_SECRET', encrypted=True)
|
||||
|
||||
@@ -128,3 +128,41 @@ class TestURLField:
|
||||
else:
|
||||
with pytest.raises(ValidationError):
|
||||
field.run_validators(url)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"url, expect_error",
|
||||
[
|
||||
("https://[1:2:3]", True),
|
||||
("http://[1:2:3]", True),
|
||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888", True),
|
||||
("https://2001:db8:3333:4444:5555:6666:7777:8888", True),
|
||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888]", False),
|
||||
("https://[::1]", False),
|
||||
("https://[::]", False),
|
||||
("https://[2001:db8::1]", False),
|
||||
("https://[2001:db8:0:0:0:0:1:1]", False),
|
||||
("https://[fe80::2%eth0]", True), # ipv6 scope identifier
|
||||
("https://[fe80:0:0:0:200:f8ff:fe21:67cf]", False),
|
||||
("https://[::ffff:192.168.1.10]", False),
|
||||
("https://[0:0:0:0:0:ffff:c000:0201]", False),
|
||||
("https://[2001:0db8:000a:0001:0000:0000:0000:0000]", False),
|
||||
("https://[2001:db8:a:1::]", False),
|
||||
("https://[ff02::1]", False),
|
||||
("https://[ff02:0:0:0:0:0:0:1]", False),
|
||||
("https://[fc00::1]", False),
|
||||
("https://[fd12:3456:789a:1::1]", False),
|
||||
("https://[2001:db8::abcd:ef12:3456:7890]", False),
|
||||
("https://[2001:db8:0000:abcd:0000:ef12:0000:3456]", False),
|
||||
("https://[::ffff:10.0.0.1]", False),
|
||||
("https://[2001:db8:cafe::]", False),
|
||||
("https://[2001:db8:cafe:0:0:0:0:0]", False),
|
||||
("https://[fe80::210:f3ff:fedf:4567%3]", True), # ipv6 scope identifier, numerical interface
|
||||
],
|
||||
)
|
||||
def test_ipv6_urls(self, url, expect_error):
|
||||
field = URLField()
|
||||
if expect_error:
|
||||
with pytest.raises(ValidationError, match="Enter a valid URL"):
|
||||
field.run_validators(url)
|
||||
else:
|
||||
field.run_validators(url)
|
||||
|
||||
@@ -22,7 +22,7 @@ from ansible_base.lib.utils.db import advisory_lock
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||
from awx.main.utils.analytics_proxy import OIDCClient
|
||||
|
||||
__all__ = ['register', 'gather', 'ship']
|
||||
|
||||
@@ -186,7 +186,7 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
|
||||
if not (
|
||||
settings.AUTOMATION_ANALYTICS_URL
|
||||
and ((settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD) or (settings.SUBSCRIPTIONS_USERNAME and settings.SUBSCRIPTIONS_PASSWORD))
|
||||
and ((settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD) or (settings.SUBSCRIPTIONS_CLIENT_ID and settings.SUBSCRIPTIONS_CLIENT_SECRET))
|
||||
):
|
||||
logger.log(log_level, "Not gathering analytics, configuration is invalid. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
@@ -368,8 +368,20 @@ def ship(path):
|
||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||
return False
|
||||
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
rh_id = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_secret = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
|
||||
if not (rh_id and rh_secret):
|
||||
rh_id = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
||||
rh_secret = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
||||
|
||||
if not rh_id:
|
||||
logger.error('Neither REDHAT_USERNAME nor SUBSCRIPTIONS_CLIENT_ID are set')
|
||||
return False
|
||||
|
||||
if not rh_secret:
|
||||
logger.error('Neither REDHAT_PASSWORD nor SUBSCRIPTIONS_CLIENT_SECRET are set')
|
||||
return False
|
||||
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
@@ -377,25 +389,13 @@ def ship(path):
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
if rh_user and rh_password:
|
||||
try:
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||
except requests.RequestException:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||
elif not rh_user or not rh_password:
|
||||
logger.info('REDHAT_USERNAME and REDHAT_PASSWORD are not set, using SUBSCRIPTIONS_USERNAME and SUBSCRIPTIONS_PASSWORD')
|
||||
rh_user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
rh_password = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if rh_user and rh_password:
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||
elif not rh_user:
|
||||
logger.error('REDHAT_USERNAME and SUBSCRIPTIONS_USERNAME are not set')
|
||||
return False
|
||||
elif not rh_password:
|
||||
logger.error('REDHAT_PASSWORD and SUBSCRIPTIONS_USERNAME are not set')
|
||||
return False
|
||||
try:
|
||||
client = OIDCClient(rh_id, rh_secret)
|
||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||
except requests.RequestException:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_id, rh_secret), headers=s.headers, timeout=(31, 31))
|
||||
|
||||
# Accept 2XX status_codes
|
||||
if response.status_code >= 300:
|
||||
logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
||||
|
||||
@@ -124,8 +124,8 @@ register(
|
||||
allow_blank=True,
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer username'),
|
||||
help_text=_('This username is used to send data to Automation Analytics'),
|
||||
label=_('Red Hat Client ID for Analytics'),
|
||||
help_text=_('Client ID used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -137,34 +137,34 @@ register(
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer password'),
|
||||
help_text=_('This password is used to send data to Automation Analytics'),
|
||||
label=_('Red Hat Client Secret for Analytics'),
|
||||
help_text=_('Client secret used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_USERNAME',
|
||||
'SUBSCRIPTIONS_CLIENT_ID',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite username'),
|
||||
help_text=_('This username is used to retrieve subscription and content information'), # noqa
|
||||
label=_('Red Hat Client ID for Subscriptions'),
|
||||
help_text=_('Client ID used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_PASSWORD',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite password'),
|
||||
help_text=_('This password is used to retrieve subscription and content information'), # noqa
|
||||
label=_('Red Hat Client Secret for Subscriptions'),
|
||||
help_text=_('Client secret used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ import time
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
import json
|
||||
|
||||
import collections
|
||||
from multiprocessing import Process
|
||||
@@ -25,7 +26,10 @@ from ansible_base.lib.logging.runtime import log_excess_runtime
|
||||
|
||||
from awx.main.models import UnifiedJob
|
||||
from awx.main.dispatch import reaper
|
||||
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity
|
||||
from awx.main.utils.common import get_mem_effective_capacity, get_corrected_memory, get_corrected_cpu, get_cpu_effective_capacity
|
||||
|
||||
# ansible-runner
|
||||
from ansible_runner.utils.capacity import get_mem_in_bytes, get_cpu_count
|
||||
|
||||
if 'run_callback_receiver' in sys.argv:
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
@@ -307,6 +311,41 @@ class WorkerPool(object):
|
||||
logger.exception('could not kill {}'.format(worker.pid))
|
||||
|
||||
|
||||
def get_auto_max_workers():
|
||||
"""Method we normally rely on to get max_workers
|
||||
|
||||
Uses almost same logic as Instance.local_health_check
|
||||
The important thing is to be MORE than Instance.capacity
|
||||
so that the task-manager does not over-schedule this node
|
||||
|
||||
Ideally we would just use the capacity from the database plus reserve workers,
|
||||
but this poses some bootstrap problems where OCP task containers
|
||||
register themselves after startup
|
||||
"""
|
||||
# Get memory from ansible-runner
|
||||
total_memory_gb = get_mem_in_bytes()
|
||||
|
||||
# This may replace memory calculation with a user override
|
||||
corrected_memory = get_corrected_memory(total_memory_gb)
|
||||
|
||||
# Get same number as max forks based on memory, this function takes memory as bytes
|
||||
mem_capacity = get_mem_effective_capacity(corrected_memory, is_control_node=True)
|
||||
|
||||
# Follow same process for CPU capacity constraint
|
||||
cpu_count = get_cpu_count()
|
||||
corrected_cpu = get_corrected_cpu(cpu_count)
|
||||
cpu_capacity = get_cpu_effective_capacity(corrected_cpu, is_control_node=True)
|
||||
|
||||
# Here is what is different from health checks,
|
||||
auto_max = max(mem_capacity, cpu_capacity)
|
||||
|
||||
# add magic number of extra workers to ensure
|
||||
# we have a few extra workers to run the heartbeat
|
||||
auto_max += 7
|
||||
|
||||
return auto_max
|
||||
|
||||
|
||||
class AutoscalePool(WorkerPool):
|
||||
"""
|
||||
An extended pool implementation that automatically scales workers up and
|
||||
@@ -320,19 +359,7 @@ class AutoscalePool(WorkerPool):
|
||||
super(AutoscalePool, self).__init__(*args, **kwargs)
|
||||
|
||||
if self.max_workers is None:
|
||||
settings_absmem = getattr(settings, 'SYSTEM_TASK_ABS_MEM', None)
|
||||
if settings_absmem is not None:
|
||||
# There are 1073741824 bytes in a gigabyte. Convert bytes to gigabytes by dividing by 2**30
|
||||
total_memory_gb = convert_mem_str_to_bytes(settings_absmem) // 2**30
|
||||
else:
|
||||
total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up
|
||||
|
||||
# Get same number as max forks based on memory, this function takes memory as bytes
|
||||
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
|
||||
|
||||
# add magic prime number of extra workers to ensure
|
||||
# we have a few extra workers to run the heartbeat
|
||||
self.max_workers += 7
|
||||
self.max_workers = get_auto_max_workers()
|
||||
|
||||
# max workers can't be less than min_workers
|
||||
self.max_workers = max(self.min_workers, self.max_workers)
|
||||
@@ -346,6 +373,9 @@ class AutoscalePool(WorkerPool):
|
||||
self.scale_up_ct = 0
|
||||
self.worker_count_max = 0
|
||||
|
||||
# last time we wrote current tasks, to avoid too much log spam
|
||||
self.last_task_list_log = time.monotonic()
|
||||
|
||||
def produce_subsystem_metrics(self, metrics_object):
|
||||
metrics_object.set('dispatcher_pool_scale_up_events', self.scale_up_ct)
|
||||
metrics_object.set('dispatcher_pool_active_task_count', sum(len(w.managed_tasks) for w in self.workers))
|
||||
@@ -463,6 +493,14 @@ class AutoscalePool(WorkerPool):
|
||||
self.worker_count_max = new_worker_ct
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def fast_task_serialization(current_task):
|
||||
try:
|
||||
return str(current_task.get('task')) + ' - ' + str(sorted(current_task.get('args', []))) + ' - ' + str(sorted(current_task.get('kwargs', {})))
|
||||
except Exception:
|
||||
# just make sure this does not make things worse
|
||||
return str(current_task)
|
||||
|
||||
def write(self, preferred_queue, body):
|
||||
if 'guid' in body:
|
||||
set_guid(body['guid'])
|
||||
@@ -484,6 +522,15 @@ class AutoscalePool(WorkerPool):
|
||||
if isinstance(body, dict):
|
||||
task_name = body.get('task')
|
||||
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
# Once every 10 seconds write out task list for debugging
|
||||
if time.monotonic() - self.last_task_list_log >= 10.0:
|
||||
task_counts = {}
|
||||
for worker in self.workers:
|
||||
task_slug = self.fast_task_serialization(worker.current_task)
|
||||
task_counts.setdefault(task_slug, 0)
|
||||
task_counts[task_slug] += 1
|
||||
logger.info(f'Running tasks by count:\n{json.dumps(task_counts, indent=2)}')
|
||||
self.last_task_list_log = time.monotonic()
|
||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||
except Exception:
|
||||
for conn in connections.all():
|
||||
|
||||
@@ -238,7 +238,7 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||
|
||||
logger.info(f"Running worker {self.name} listening to queues {self.queues}")
|
||||
logger.info(f"Running {self.name}, workers min={self.pool.min_workers} max={self.pool.max_workers}, listening to queues {self.queues}")
|
||||
init = False
|
||||
|
||||
while True:
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# AIA: Primarily AI, Modified content, Human-initiated, Reviewed, Claude (Anthropic AI) via Cursor
|
||||
# AIA PAI Mc Hin R Claude Cursor - https://aiattribution.github.io/interpret-attribution
|
||||
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
@@ -26,7 +29,151 @@ class CallbackQueueDispatcher(object):
|
||||
def __init__(self):
|
||||
self.queue = getattr(settings, 'CALLBACK_QUEUE', '')
|
||||
self.logger = logging.getLogger('awx.main.queue.CallbackQueueDispatcher')
|
||||
self.connection = redis.Redis.from_url(settings.BROKER_URL)
|
||||
self._broker_url = settings.BROKER_URL
|
||||
self.connection = redis.Redis.from_url(self._broker_url)
|
||||
self._connection_failures = 0
|
||||
self._max_reconnect_attempts = 3
|
||||
self._total_reconnections = 0
|
||||
self._events_lost = 0
|
||||
|
||||
def _reconnect(self):
|
||||
"""
|
||||
Attempt to reconnect to Redis after connection failure.
|
||||
|
||||
Returns:
|
||||
bool: True if reconnection successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
attempt = self._connection_failures + 1
|
||||
self.logger.warning(
|
||||
f"Redis reconnection attempt {attempt}/{self._max_reconnect_attempts} " f"(total reconnections this session: {self._total_reconnections})"
|
||||
)
|
||||
|
||||
# Create new connection
|
||||
self.connection = redis.Redis.from_url(self._broker_url)
|
||||
|
||||
# Verify connection works
|
||||
self.connection.ping()
|
||||
|
||||
# Success
|
||||
self._connection_failures = 0
|
||||
self._total_reconnections += 1
|
||||
self.logger.info(f"Successfully reconnected to Redis (session reconnections: {self._total_reconnections})")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self._connection_failures += 1
|
||||
self.logger.error(f"Redis reconnection failed (attempt {self._connection_failures}): {type(e).__name__}: {e}")
|
||||
return False
|
||||
|
||||
def dispatch(self, obj):
|
||||
self.connection.rpush(self.queue, json.dumps(obj, cls=AnsibleJSONEncoder))
|
||||
"""
|
||||
Dispatch event to Redis queue with automatic reconnection on failure.
|
||||
|
||||
Handles BrokenPipeError and ConnectionError by attempting reconnection.
|
||||
If all reconnection attempts fail, logs the event loss but allows job to continue.
|
||||
|
||||
Args:
|
||||
obj: Event data to dispatch (dict or serializable object)
|
||||
"""
|
||||
max_attempts = self._max_reconnect_attempts + 1
|
||||
last_error = None
|
||||
|
||||
# Extract diagnostic info from event
|
||||
event_type = 'unknown'
|
||||
job_id = 'unknown'
|
||||
if isinstance(obj, dict):
|
||||
event_type = obj.get('event', obj.get('event_name', 'unknown'))
|
||||
job_id = obj.get('job_id', obj.get('unified_job_id', 'unknown'))
|
||||
|
||||
for attempt in range(max_attempts):
|
||||
try:
|
||||
# Attempt to push event to Redis
|
||||
self.connection.rpush(self.queue, json.dumps(obj, cls=AnsibleJSONEncoder))
|
||||
|
||||
# Success - reset failure counter if this was a recovery
|
||||
if self._connection_failures > 0:
|
||||
self.logger.info(f"Redis connection recovered after reconnection. " f"job_id={job_id}, event_type={event_type}")
|
||||
self._connection_failures = 0
|
||||
|
||||
return # Successfully dispatched
|
||||
|
||||
except (BrokenPipeError, redis.exceptions.ConnectionError) as e:
|
||||
last_error = e
|
||||
error_type = type(e).__name__
|
||||
|
||||
self.logger.warning(f"Redis connection error during event dispatch " f"(attempt {attempt + 1}/{max_attempts}): {error_type}: {e}")
|
||||
|
||||
# Enhanced diagnostics
|
||||
self.logger.warning(
|
||||
f"Failed event details: job_id={job_id}, event_type={event_type}, " f"queue={self.queue}, attempt={attempt + 1}/{max_attempts}"
|
||||
)
|
||||
|
||||
if attempt < max_attempts - 1:
|
||||
# Try to reconnect before next attempt
|
||||
reconnected = self._reconnect()
|
||||
if reconnected:
|
||||
self.logger.info("Retrying event dispatch after successful reconnection")
|
||||
else:
|
||||
self.logger.warning(f"Reconnection failed, will retry dispatch anyway " f"(attempt {attempt + 2} coming)")
|
||||
# Continue to next attempt
|
||||
continue
|
||||
else:
|
||||
# All attempts exhausted
|
||||
self._events_lost += 1
|
||||
self.logger.error(
|
||||
f"CRITICAL: Failed to dispatch event after {max_attempts} attempts. "
|
||||
f"Event will be lost. Total events lost this session: {self._events_lost}"
|
||||
)
|
||||
self.logger.error(
|
||||
f"DIAGNOSTIC INFO: "
|
||||
f"job_id={job_id}, "
|
||||
f"event_type={event_type}, "
|
||||
f"queue={self.queue}, "
|
||||
f"broker_url={self._broker_url}, "
|
||||
f"last_error={error_type}: {last_error}, "
|
||||
f"session_reconnections={self._total_reconnections}, "
|
||||
f"session_events_lost={self._events_lost}"
|
||||
)
|
||||
|
||||
# IMPORTANT: Don't raise exception
|
||||
# Allow job to continue even though this event was lost
|
||||
# This prevents losing 17+ minutes of work due to event logging failure
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
# Catch any other unexpected Redis errors
|
||||
self.logger.error(f"Unexpected error dispatching event to Redis: {type(e).__name__}: {e}")
|
||||
self.logger.error(f"Event context: job_id={job_id}, event_type={event_type}")
|
||||
# Don't raise - allow job to continue
|
||||
break
|
||||
|
||||
def health_check(self):
|
||||
"""
|
||||
Check Redis connection health.
|
||||
|
||||
Returns:
|
||||
bool: True if connection is healthy, False otherwise
|
||||
"""
|
||||
try:
|
||||
self.connection.ping()
|
||||
return True
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Redis health check failed: {type(e).__name__}: {e}")
|
||||
return False
|
||||
|
||||
def get_connection_stats(self):
|
||||
"""
|
||||
Get Redis connection statistics for monitoring.
|
||||
|
||||
Returns:
|
||||
dict: Connection statistics
|
||||
"""
|
||||
return {
|
||||
'broker_url': self._broker_url,
|
||||
'queue': self.queue,
|
||||
'connected': self.health_check(),
|
||||
'connection_failures': self._connection_failures,
|
||||
'total_reconnections': self._total_reconnections,
|
||||
'events_lost': self._events_lost,
|
||||
}
|
||||
|
||||
@@ -77,7 +77,14 @@ def build_indirect_host_data(job: Job, job_event_queries: dict[str, dict[str, st
|
||||
if jq_str_for_event not in compiled_jq_expressions:
|
||||
compiled_jq_expressions[resolved_action] = jq.compile(jq_str_for_event)
|
||||
compiled_jq = compiled_jq_expressions[resolved_action]
|
||||
for data in compiled_jq.input(event.event_data['res']).all():
|
||||
|
||||
try:
|
||||
data_source = compiled_jq.input(event.event_data['res']).all()
|
||||
except Exception as e:
|
||||
logger.warning(f'error for module {resolved_action} and data {event.event_data["res"]}: {e}')
|
||||
continue
|
||||
|
||||
for data in data_source:
|
||||
# From this jq result (specific to a single Ansible module), get index information about this host record
|
||||
if not data.get('canonical_facts'):
|
||||
if not facts_missing_logged:
|
||||
|
||||
17
awx/main/tests/data/sleep_task.py
Normal file
17
awx/main/tests/data/sleep_task.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import time
|
||||
import logging
|
||||
|
||||
from awx.main.dispatch import get_task_queuename
|
||||
from awx.main.dispatch.publish import task
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def sleep_task(seconds=10, log=False):
|
||||
if log:
|
||||
logger.info('starting sleep_task')
|
||||
time.sleep(seconds)
|
||||
if log:
|
||||
logger.info('finished sleep_task')
|
||||
@@ -87,8 +87,8 @@ def mock_analytic_post():
|
||||
{
|
||||
'REDHAT_USERNAME': 'redhat_user',
|
||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_USERNAME': '',
|
||||
'SUBSCRIPTIONS_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
||||
},
|
||||
True,
|
||||
('redhat_user', 'redhat_pass'),
|
||||
@@ -98,8 +98,8 @@ def mock_analytic_post():
|
||||
{
|
||||
'REDHAT_USERNAME': None,
|
||||
'REDHAT_PASSWORD': None,
|
||||
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
||||
},
|
||||
True,
|
||||
('subs_user', 'subs_pass'),
|
||||
@@ -109,8 +109,8 @@ def mock_analytic_post():
|
||||
{
|
||||
'REDHAT_USERNAME': '',
|
||||
'REDHAT_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
||||
},
|
||||
True,
|
||||
('subs_user', 'subs_pass'),
|
||||
@@ -120,8 +120,8 @@ def mock_analytic_post():
|
||||
{
|
||||
'REDHAT_USERNAME': '',
|
||||
'REDHAT_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_USERNAME': '',
|
||||
'SUBSCRIPTIONS_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
||||
},
|
||||
False,
|
||||
None, # No request should be made
|
||||
@@ -131,8 +131,8 @@ def mock_analytic_post():
|
||||
{
|
||||
'REDHAT_USERNAME': '',
|
||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||
'SUBSCRIPTIONS_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
||||
},
|
||||
False,
|
||||
None, # Invalid, no request should be made
|
||||
|
||||
@@ -97,8 +97,8 @@ class TestAnalyticsGenericView:
|
||||
'INSIGHTS_TRACKING_STATE': True,
|
||||
'REDHAT_USERNAME': 'redhat_user',
|
||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_USERNAME': '',
|
||||
'SUBSCRIPTIONS_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
||||
},
|
||||
('redhat_user', 'redhat_pass'),
|
||||
None,
|
||||
@@ -109,8 +109,8 @@ class TestAnalyticsGenericView:
|
||||
'INSIGHTS_TRACKING_STATE': True,
|
||||
'REDHAT_USERNAME': '',
|
||||
'REDHAT_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
||||
},
|
||||
('subs_user', 'subs_pass'),
|
||||
None,
|
||||
@@ -121,8 +121,8 @@ class TestAnalyticsGenericView:
|
||||
'INSIGHTS_TRACKING_STATE': True,
|
||||
'REDHAT_USERNAME': '',
|
||||
'REDHAT_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_USERNAME': '',
|
||||
'SUBSCRIPTIONS_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_CLIENT_ID': '',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
||||
},
|
||||
None,
|
||||
ERROR_MISSING_USER,
|
||||
@@ -133,8 +133,8 @@ class TestAnalyticsGenericView:
|
||||
'INSIGHTS_TRACKING_STATE': True,
|
||||
'REDHAT_USERNAME': 'redhat_user',
|
||||
'REDHAT_PASSWORD': 'redhat_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_USERNAME': 'subs_user',
|
||||
'SUBSCRIPTIONS_PASSWORD': 'subs_pass', # NOSONAR
|
||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user',
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': 'subs_pass', # NOSONAR
|
||||
},
|
||||
('redhat_user', 'redhat_pass'),
|
||||
None,
|
||||
@@ -145,8 +145,8 @@ class TestAnalyticsGenericView:
|
||||
'INSIGHTS_TRACKING_STATE': True,
|
||||
'REDHAT_USERNAME': '',
|
||||
'REDHAT_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_USERNAME': 'subs_user', # NOSONAR
|
||||
'SUBSCRIPTIONS_PASSWORD': '',
|
||||
'SUBSCRIPTIONS_CLIENT_ID': 'subs_user', # NOSONAR
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET': '',
|
||||
},
|
||||
None,
|
||||
ERROR_MISSING_PASSWORD,
|
||||
|
||||
@@ -34,40 +34,18 @@ def test_wrapup_does_send_notifications(mocker):
|
||||
mock.assert_called_once_with('succeeded')
|
||||
|
||||
|
||||
class FakeRedis:
|
||||
def keys(self, *args, **kwargs):
|
||||
return []
|
||||
|
||||
def set(self):
|
||||
pass
|
||||
|
||||
def get(self):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, *args, **kwargs):
|
||||
return cls()
|
||||
|
||||
def pipeline(self):
|
||||
return self
|
||||
|
||||
|
||||
class TestCallbackBrokerWorker(TransactionTestCase):
|
||||
@pytest.fixture(autouse=True)
|
||||
def turn_off_websockets(self):
|
||||
def turn_off_websockets_and_redis(self, fake_redis):
|
||||
with mock.patch('awx.main.dispatch.worker.callback.emit_event_detail', lambda *a, **kw: None):
|
||||
yield
|
||||
|
||||
def get_worker(self):
|
||||
with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff
|
||||
return CallbackBrokerWorker()
|
||||
|
||||
def event_create_kwargs(self):
|
||||
inventory_update = InventoryUpdate.objects.create(source='file', inventory_source=InventorySource.objects.create(source='file'))
|
||||
return dict(inventory_update=inventory_update, created=inventory_update.created)
|
||||
|
||||
def test_flush_with_valid_event(self):
|
||||
worker = self.get_worker()
|
||||
worker = CallbackBrokerWorker()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
||||
worker.buff = {InventoryUpdateEvent: events}
|
||||
worker.flush()
|
||||
@@ -75,7 +53,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1
|
||||
|
||||
def test_flush_with_invalid_event(self):
|
||||
worker = self.get_worker()
|
||||
worker = CallbackBrokerWorker()
|
||||
kwargs = self.event_create_kwargs()
|
||||
events = [
|
||||
InventoryUpdateEvent(uuid=str(uuid4()), stdout='good1', **kwargs),
|
||||
@@ -90,7 +68,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
||||
assert worker.buff == {InventoryUpdateEvent: [events[1]]}
|
||||
|
||||
def test_duplicate_key_not_saved_twice(self):
|
||||
worker = self.get_worker()
|
||||
worker = CallbackBrokerWorker()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||
worker.flush()
|
||||
@@ -104,7 +82,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
||||
assert worker.buff.get(InventoryUpdateEvent, []) == []
|
||||
|
||||
def test_give_up_on_bad_event(self):
|
||||
worker = self.get_worker()
|
||||
worker = CallbackBrokerWorker()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), counter=-2, **self.event_create_kwargs())]
|
||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||
|
||||
@@ -117,7 +95,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 0 # sanity
|
||||
|
||||
def test_flush_with_empty_buffer(self):
|
||||
worker = self.get_worker()
|
||||
worker = CallbackBrokerWorker()
|
||||
worker.buff = {InventoryUpdateEvent: []}
|
||||
with mock.patch.object(InventoryUpdateEvent.objects, 'bulk_create') as flush_mock:
|
||||
worker.flush()
|
||||
@@ -127,7 +105,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
|
||||
# In postgres, text fields reject NUL character, 0x00
|
||||
# tests use sqlite3 which will not raise an error
|
||||
# but we can still test that it is sanitized before saving
|
||||
worker = self.get_worker()
|
||||
worker = CallbackBrokerWorker()
|
||||
kwargs = self.event_create_kwargs()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), stdout="\x00", **kwargs)]
|
||||
assert "\x00" in events[0].stdout # sanity
|
||||
|
||||
@@ -63,6 +63,33 @@ def swagger_autogen(requests=__SWAGGER_REQUESTS__):
|
||||
return requests
|
||||
|
||||
|
||||
class FakeRedis:
|
||||
def keys(self, *args, **kwargs):
|
||||
return []
|
||||
|
||||
def set(self):
|
||||
pass
|
||||
|
||||
def get(self):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, *args, **kwargs):
|
||||
return cls()
|
||||
|
||||
def pipeline(self):
|
||||
return self
|
||||
|
||||
def ping(self):
|
||||
return
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_redis():
|
||||
with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def user():
|
||||
def u(name, is_superuser=False):
|
||||
|
||||
@@ -3,6 +3,10 @@ import pytest
|
||||
# AWX
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.dispatch.pool import get_auto_max_workers
|
||||
|
||||
# Django
|
||||
from django.test.utils import override_settings
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -17,3 +21,25 @@ def test_db_localhost():
|
||||
Instance.objects.create(hostname='foo', node_type='hybrid')
|
||||
Instance.objects.create(hostname='bar', node_type='execution')
|
||||
assert is_ha_environment() is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'settings',
|
||||
[
|
||||
dict(SYSTEM_TASK_ABS_MEM='16Gi', SYSTEM_TASK_ABS_CPU='24', SYSTEM_TASK_FORKS_MEM=400, SYSTEM_TASK_FORKS_CPU=4),
|
||||
dict(SYSTEM_TASK_ABS_MEM='124Gi', SYSTEM_TASK_ABS_CPU='2', SYSTEM_TASK_FORKS_MEM=None, SYSTEM_TASK_FORKS_CPU=None),
|
||||
],
|
||||
ids=['cpu_dominated', 'memory_dominated'],
|
||||
)
|
||||
def test_dispatcher_max_workers_reserve(settings, fake_redis):
|
||||
"""This tests that the dispatcher max_workers matches instance capacity
|
||||
|
||||
Assumes capacity_adjustment is 1,
|
||||
plus reserve worker count
|
||||
"""
|
||||
with override_settings(**settings):
|
||||
i = Instance.objects.create(hostname='test-1', node_type='hybrid')
|
||||
i.local_health_check()
|
||||
|
||||
assert get_auto_max_workers() == i.capacity + 7, (i.cpu, i.memory, i.cpu_capacity, i.mem_capacity)
|
||||
|
||||
37
awx/main/tests/unit/utils/test_licensing.py
Normal file
37
awx/main/tests/unit/utils/test_licensing.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import json
|
||||
from http import HTTPStatus
|
||||
from unittest.mock import patch
|
||||
|
||||
from requests import Response
|
||||
|
||||
from awx.main.utils.licensing import Licenser
|
||||
|
||||
|
||||
def test_rhsm_licensing():
|
||||
def mocked_requests_get(*args, **kwargs):
|
||||
assert kwargs['verify'] == True
|
||||
response = Response()
|
||||
subs = json.dumps({'body': []})
|
||||
response.status_code = HTTPStatus.OK
|
||||
response._content = bytes(subs, 'utf-8')
|
||||
return response
|
||||
|
||||
licenser = Licenser()
|
||||
with patch('awx.main.utils.analytics_proxy.OIDCClient.make_request', new=mocked_requests_get):
|
||||
subs = licenser.get_rhsm_subs('localhost', 'admin', 'admin')
|
||||
assert subs == []
|
||||
|
||||
|
||||
def test_satellite_licensing():
|
||||
def mocked_requests_get(*args, **kwargs):
|
||||
assert kwargs['verify'] == True
|
||||
response = Response()
|
||||
subs = json.dumps({'results': []})
|
||||
response.status_code = HTTPStatus.OK
|
||||
response._content = bytes(subs, 'utf-8')
|
||||
return response
|
||||
|
||||
licenser = Licenser()
|
||||
with patch('requests.get', new=mocked_requests_get):
|
||||
subs = licenser.get_satellite_subs('localhost', 'admin', 'admin')
|
||||
assert subs == []
|
||||
@@ -23,7 +23,7 @@ class TokenError(requests.RequestException):
|
||||
try:
|
||||
client = OIDCClient(...)
|
||||
client.make_request(...)
|
||||
except TokenGenerationError as e:
|
||||
except TokenError as e:
|
||||
print(f"Token generation failed due to {e.__cause__}")
|
||||
except requests.RequestException:
|
||||
print("API request failed)
|
||||
@@ -102,13 +102,15 @@ class OIDCClient:
|
||||
self,
|
||||
client_id: str,
|
||||
client_secret: str,
|
||||
token_url: str,
|
||||
scopes: list[str],
|
||||
token_url: str = DEFAULT_OIDC_TOKEN_ENDPOINT,
|
||||
scopes: list[str] = None,
|
||||
base_url: str = '',
|
||||
) -> None:
|
||||
self.client_id: str = client_id
|
||||
self.client_secret: str = client_secret
|
||||
self.token_url: str = token_url
|
||||
if scopes is None:
|
||||
scopes = ['api.console']
|
||||
self.scopes = scopes
|
||||
self.base_url: str = base_url
|
||||
self.token: Optional[Token] = None
|
||||
|
||||
@@ -38,6 +38,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from awx_plugins.interfaces._temporary_private_licensing_api import detect_server_product_name
|
||||
|
||||
from awx.main.constants import SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS
|
||||
from awx.main.utils.analytics_proxy import OIDCClient
|
||||
|
||||
MAX_INSTANCES = 9999999
|
||||
|
||||
@@ -228,37 +229,38 @@ class Licenser(object):
|
||||
host = getattr(settings, 'REDHAT_CANDLEPIN_HOST', None)
|
||||
|
||||
if not user:
|
||||
raise ValueError('subscriptions_username is required')
|
||||
raise ValueError('subscriptions_client_id is required')
|
||||
|
||||
if not pw:
|
||||
raise ValueError('subscriptions_password is required')
|
||||
raise ValueError('subscriptions_client_secret is required')
|
||||
|
||||
if host and user and pw:
|
||||
if 'subscription.rhsm.redhat.com' in host:
|
||||
json = self.get_rhsm_subs(host, user, pw)
|
||||
json = self.get_rhsm_subs(settings.SUBSCRIPTIONS_RHSM_URL, user, pw)
|
||||
else:
|
||||
json = self.get_satellite_subs(host, user, pw)
|
||||
return self.generate_license_options_from_entitlements(json)
|
||||
return []
|
||||
|
||||
def get_rhsm_subs(self, host, user, pw):
|
||||
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||
json = []
|
||||
try:
|
||||
subs = requests.get('/'.join([host, 'subscription/users/{}/owners'.format(user)]), verify=verify, auth=(user, pw))
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
raise error
|
||||
except OSError as error:
|
||||
raise OSError(
|
||||
'Unable to open certificate bundle {}. Check that the service is running on Red Hat Enterprise Linux.'.format(verify)
|
||||
) from error # noqa
|
||||
subs.raise_for_status()
|
||||
def get_rhsm_subs(self, host, client_id, client_secret):
|
||||
client = OIDCClient(client_id, client_secret)
|
||||
subs = client.make_request(
|
||||
'GET',
|
||||
host,
|
||||
verify=True,
|
||||
timeout=(31, 31),
|
||||
)
|
||||
|
||||
for sub in subs.json():
|
||||
resp = requests.get('/'.join([host, 'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])]), verify=verify, auth=(user, pw))
|
||||
resp.raise_for_status()
|
||||
json.extend(resp.json())
|
||||
return json
|
||||
subs.raise_for_status()
|
||||
subs_formatted = []
|
||||
for sku in subs.json()['body']:
|
||||
sku_data = {k: v for k, v in sku.items() if k != 'subscriptions'}
|
||||
for sub in sku['subscriptions']:
|
||||
sub_data = sku_data.copy()
|
||||
sub_data['subscriptions'] = sub
|
||||
subs_formatted.append(sub_data)
|
||||
|
||||
return subs_formatted
|
||||
|
||||
def get_satellite_subs(self, host, user, pw):
|
||||
port = None
|
||||
@@ -267,7 +269,7 @@ class Licenser(object):
|
||||
port = str(self.config.get("server", "port"))
|
||||
except Exception as e:
|
||||
logger.exception('Unable to read rhsm config to get ca_cert location. {}'.format(str(e)))
|
||||
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||
verify = True
|
||||
if port:
|
||||
host = ':'.join([host, port])
|
||||
json = []
|
||||
@@ -314,20 +316,11 @@ class Licenser(object):
|
||||
return False
|
||||
return True
|
||||
|
||||
def is_appropriate_sub(self, sub):
|
||||
if sub['activeSubscription'] is False:
|
||||
return False
|
||||
# Products that contain Ansible Tower
|
||||
products = sub.get('providedProducts', [])
|
||||
if any(product.get('productId') == '480' for product in products):
|
||||
return True
|
||||
return False
|
||||
|
||||
def generate_license_options_from_entitlements(self, json):
|
||||
from dateutil.parser import parse
|
||||
|
||||
ValidSub = collections.namedtuple(
|
||||
'ValidSub', 'sku name support_level end_date trial developer_license quantity pool_id satellite subscription_id account_number usage'
|
||||
'ValidSub', 'sku name support_level end_date trial developer_license quantity satellite subscription_id account_number usage'
|
||||
)
|
||||
valid_subs = []
|
||||
for sub in json:
|
||||
@@ -335,10 +328,14 @@ class Licenser(object):
|
||||
if satellite:
|
||||
is_valid = self.is_appropriate_sat_sub(sub)
|
||||
else:
|
||||
is_valid = self.is_appropriate_sub(sub)
|
||||
# the list of subs from console.redhat.com are already valid based on the query params we provided
|
||||
is_valid = True
|
||||
if is_valid:
|
||||
try:
|
||||
end_date = parse(sub.get('endDate'))
|
||||
if satellite:
|
||||
end_date = parse(sub.get('endDate'))
|
||||
else:
|
||||
end_date = parse(sub['subscriptions']['endDate'])
|
||||
except Exception:
|
||||
continue
|
||||
now = datetime.utcnow()
|
||||
@@ -346,44 +343,50 @@ class Licenser(object):
|
||||
if end_date < now:
|
||||
# If the sub has a past end date, skip it
|
||||
continue
|
||||
try:
|
||||
quantity = int(sub['quantity'])
|
||||
if quantity == -1:
|
||||
# effectively, unlimited
|
||||
quantity = MAX_INSTANCES
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
sku = sub['productId']
|
||||
trial = sku.startswith('S') # i.e.,, SER/SVC
|
||||
developer_license = False
|
||||
support_level = ''
|
||||
usage = ''
|
||||
pool_id = sub['id']
|
||||
subscription_id = sub['subscriptionId']
|
||||
account_number = sub['accountNumber']
|
||||
account_number = ''
|
||||
usage = sub.get('usage', '')
|
||||
if satellite:
|
||||
try:
|
||||
quantity = int(sub['quantity'])
|
||||
except Exception:
|
||||
continue
|
||||
sku = sub['productId']
|
||||
subscription_id = sub['subscriptionId']
|
||||
sub_name = sub['productName']
|
||||
support_level = sub['support_level']
|
||||
usage = sub['usage']
|
||||
account_number = sub['accountNumber']
|
||||
else:
|
||||
for attr in sub.get('productAttributes', []):
|
||||
if attr.get('name') == 'support_level':
|
||||
support_level = attr.get('value')
|
||||
elif attr.get('name') == 'usage':
|
||||
usage = attr.get('value')
|
||||
elif attr.get('name') == 'ph_product_name' and attr.get('value') == 'RHEL Developer':
|
||||
developer_license = True
|
||||
try:
|
||||
if sub['capacity']['name'] == "Nodes":
|
||||
quantity = int(sub['capacity']['quantity']) * int(sub['subscriptions']['quantity'])
|
||||
else:
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
sku = sub['sku']
|
||||
sub_name = sub['name']
|
||||
support_level = sub['serviceLevel']
|
||||
subscription_id = sub['subscriptions']['number']
|
||||
if sub.get('name') == 'RHEL Developer':
|
||||
developer_license = True
|
||||
|
||||
if quantity == -1:
|
||||
# effectively, unlimited
|
||||
quantity = MAX_INSTANCES
|
||||
trial = sku.startswith('S') # i.e.,, SER/SVC
|
||||
|
||||
valid_subs.append(
|
||||
ValidSub(
|
||||
sku,
|
||||
sub['productName'],
|
||||
sub_name,
|
||||
support_level,
|
||||
end_date,
|
||||
trial,
|
||||
developer_license,
|
||||
quantity,
|
||||
pool_id,
|
||||
satellite,
|
||||
subscription_id,
|
||||
account_number,
|
||||
@@ -414,7 +417,6 @@ class Licenser(object):
|
||||
license._attrs['satellite'] = satellite
|
||||
license._attrs['valid_key'] = True
|
||||
license.update(license_date=int(sub.end_date.strftime('%s')))
|
||||
license.update(pool_id=sub.pool_id)
|
||||
license.update(subscription_id=sub.subscription_id)
|
||||
license.update(account_number=sub.account_number)
|
||||
licenses.append(license._attrs.copy())
|
||||
|
||||
@@ -964,6 +964,9 @@ CLUSTER_HOST_ID = socket.gethostname()
|
||||
# - 'unique_managed_hosts': Compliant = automated - deleted hosts (using /api/v2/host_metrics/)
|
||||
SUBSCRIPTION_USAGE_MODEL = ''
|
||||
|
||||
# Default URL and query params for obtaining valid AAP subscriptions
|
||||
SUBSCRIPTIONS_RHSM_URL = 'https://console.redhat.com/api/rhsm/v2/products?include=providedProducts&oids=480&status=Active'
|
||||
|
||||
# Host metrics cleanup - last time of the task/command run
|
||||
CLEANUP_HOST_METRICS_LAST_TS = None
|
||||
# Host metrics cleanup - minimal interval between two cleanups in days
|
||||
|
||||
@@ -31,9 +31,9 @@ options:
|
||||
unlicensed or trial licensed. When force=true, the license is always applied.
|
||||
type: bool
|
||||
default: 'False'
|
||||
pool_id:
|
||||
subscription_id:
|
||||
description:
|
||||
- Red Hat or Red Hat Satellite pool_id to attach to
|
||||
- Red Hat or Red Hat Satellite subscription_id to attach to
|
||||
required: False
|
||||
type: str
|
||||
state:
|
||||
@@ -57,9 +57,9 @@ EXAMPLES = '''
|
||||
username: "my_satellite_username"
|
||||
password: "my_satellite_password"
|
||||
|
||||
- name: Attach to a pool (requires fetching subscriptions at least once before)
|
||||
- name: Attach to a subscription (requires fetching subscriptions at least once before)
|
||||
license:
|
||||
pool_id: 123456
|
||||
subscription_id: 123456
|
||||
|
||||
- name: Remove license
|
||||
license:
|
||||
@@ -75,14 +75,14 @@ def main():
|
||||
module = ControllerAPIModule(
|
||||
argument_spec=dict(
|
||||
manifest=dict(type='str', required=False),
|
||||
pool_id=dict(type='str', required=False),
|
||||
subscription_id=dict(type='str', required=False),
|
||||
force=dict(type='bool', default=False),
|
||||
state=dict(choices=['present', 'absent'], default='present'),
|
||||
),
|
||||
required_if=[
|
||||
['state', 'present', ['manifest', 'pool_id'], True],
|
||||
['state', 'present', ['manifest', 'subscription_id'], True],
|
||||
],
|
||||
mutually_exclusive=[("manifest", "pool_id")],
|
||||
mutually_exclusive=[("manifest", "subscription_id")],
|
||||
)
|
||||
|
||||
json_output = {'changed': False}
|
||||
@@ -124,7 +124,7 @@ def main():
|
||||
if module.params.get('manifest', None):
|
||||
module.post_endpoint('config', data={'manifest': manifest.decode()})
|
||||
else:
|
||||
module.post_endpoint('config/attach', data={'pool_id': module.params.get('pool_id')})
|
||||
module.post_endpoint('config/attach', data={'subscription_id': module.params.get('subscription_id')})
|
||||
|
||||
module.exit_json(**json_output)
|
||||
|
||||
|
||||
@@ -20,15 +20,15 @@ description:
|
||||
- Get subscriptions available to Automation Platform Controller. See
|
||||
U(https://www.ansible.com/tower) for an overview.
|
||||
options:
|
||||
username:
|
||||
client_id:
|
||||
description:
|
||||
- Red Hat or Red Hat Satellite username to get available subscriptions.
|
||||
- Red Hat service account client ID or Red Hat Satellite username to get available subscriptions.
|
||||
- The credentials you use will be stored for future use in retrieving renewal or expanded subscriptions
|
||||
required: True
|
||||
type: str
|
||||
password:
|
||||
client_secret:
|
||||
description:
|
||||
- Red Hat or Red Hat Satellite password to get available subscriptions.
|
||||
- Red Hat service account client secret or Red Hat Satellite password to get available subscriptions.
|
||||
- The credentials you use will be stored for future use in retrieving renewal or expanded subscriptions
|
||||
required: True
|
||||
type: str
|
||||
@@ -53,13 +53,13 @@ subscriptions:
|
||||
EXAMPLES = '''
|
||||
- name: Get subscriptions
|
||||
subscriptions:
|
||||
username: "my_username"
|
||||
password: "My Password"
|
||||
client_id: "c6bd7594-d776-46e5-8156-6d17af147479"
|
||||
client_secret: "MO9QUvoOZ5fc5JQKXoTch1AsTLI7nFsZ"
|
||||
|
||||
- name: Get subscriptions with a filter
|
||||
subscriptions:
|
||||
username: "my_username"
|
||||
password: "My Password"
|
||||
client_id: "c6bd7594-d776-46e5-8156-6d17af147479"
|
||||
client_secret: "MO9QUvoOZ5fc5JQKXoTch1AsTLI7nFsZ"
|
||||
filters:
|
||||
product_name: "Red Hat Ansible Automation Platform"
|
||||
support_level: "Self-Support"
|
||||
@@ -72,8 +72,8 @@ def main():
|
||||
|
||||
module = ControllerAPIModule(
|
||||
argument_spec=dict(
|
||||
username=dict(type='str', required=True),
|
||||
password=dict(type='str', no_log=True, required=True),
|
||||
client_id=dict(type='str', required=True),
|
||||
client_secret=dict(type='str', no_log=True, required=True),
|
||||
filters=dict(type='dict', required=False, default={}),
|
||||
),
|
||||
)
|
||||
@@ -82,8 +82,8 @@ def main():
|
||||
|
||||
# Check if Tower is already licensed
|
||||
post_data = {
|
||||
'subscriptions_password': module.params.get('password'),
|
||||
'subscriptions_username': module.params.get('username'),
|
||||
'subscriptions_client_secret': module.params.get('client_secret'),
|
||||
'subscriptions_client_id': module.params.get('client_id'),
|
||||
}
|
||||
all_subscriptions = module.post_endpoint('config/subscriptions', data=post_data)['json']
|
||||
json_output['subscriptions'] = []
|
||||
|
||||
11
licenses/portalocker.txt
Normal file
11
licenses/portalocker.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
Copyright 2022 Rick van Hattem
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
BIN
licenses/psycopg-3.2.3.tar.gz
Normal file
BIN
licenses/psycopg-3.2.3.tar.gz
Normal file
Binary file not shown.
Binary file not shown.
BIN
licenses/uwsgi-2.0.28.tar.gz
Normal file
BIN
licenses/uwsgi-2.0.28.tar.gz
Normal file
Binary file not shown.
Binary file not shown.
@@ -54,7 +54,7 @@ python-tss-sdk>=1.2.1
|
||||
pyyaml>=6.0.2 # require packing fix for cython 3 or higher
|
||||
pyzstd # otel collector log file compression library
|
||||
receptorctl
|
||||
sqlparse>=0.5.2
|
||||
sqlparse>=0.4.4 # Required by django https://github.com/ansible/awx/security/dependabot/96
|
||||
redis[hiredis]
|
||||
requests
|
||||
slack-sdk
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
adal==1.2.7
|
||||
# via msrestazure
|
||||
aiohappyeyeballs==2.6.1
|
||||
aiohappyeyeballs==2.4.4
|
||||
# via aiohttp
|
||||
aiohttp==3.11.16
|
||||
aiohttp==3.11.11
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# aiohttp-retry
|
||||
# twilio
|
||||
aiohttp-retry==2.9.1
|
||||
aiohttp-retry==2.8.3
|
||||
# via twilio
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
@@ -25,9 +25,9 @@ asgiref==3.8.1
|
||||
# django
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
asn1==3.0.0
|
||||
asn1==2.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
attrs==25.3.0
|
||||
attrs==24.3.0
|
||||
# via
|
||||
# aiohttp
|
||||
# jsonschema
|
||||
@@ -46,14 +46,14 @@ awx-plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.g
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# awx-plugins-core
|
||||
azure-core==1.33.0
|
||||
azure-core==1.32.0
|
||||
# via
|
||||
# azure-identity
|
||||
# azure-keyvault-certificates
|
||||
# azure-keyvault-keys
|
||||
# azure-keyvault-secrets
|
||||
# msrest
|
||||
azure-identity==1.21.0
|
||||
azure-identity==1.19.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
azure-keyvault==4.2.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -65,14 +65,14 @@ azure-keyvault-secrets==4.9.0
|
||||
# via azure-keyvault
|
||||
backports-tarfile==1.2.0
|
||||
# via jaraco-context
|
||||
boto3==1.37.34
|
||||
boto3==1.35.96
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
botocore==1.37.34
|
||||
botocore==1.35.96
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# boto3
|
||||
# s3transfer
|
||||
cachetools==5.5.2
|
||||
cachetools==5.5.0
|
||||
# via google-auth
|
||||
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
|
||||
# via
|
||||
@@ -84,7 +84,7 @@ cffi==1.17.1
|
||||
# via
|
||||
# cryptography
|
||||
# pynacl
|
||||
channels==4.2.2
|
||||
channels==4.2.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
@@ -109,11 +109,11 @@ cryptography==41.0.7
|
||||
# pyjwt
|
||||
# pyopenssl
|
||||
# service-identity
|
||||
cython==3.0.12
|
||||
cython==3.0.11
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
daphne==4.1.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
deprecated==1.2.18
|
||||
deprecated==1.2.15
|
||||
# via
|
||||
# opentelemetry-api
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -138,19 +138,19 @@ django==4.2.20
|
||||
# djangorestframework
|
||||
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
django-cors-headers==4.7.0
|
||||
django-cors-headers==4.6.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-crum==0.7.9
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-extensions==4.1
|
||||
django-extensions==3.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-flags==5.0.13
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-guid==3.5.1
|
||||
django-guid==3.5.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-oauth-toolkit==1.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -158,7 +158,7 @@ django-polymorphic==3.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-solo==2.4.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
djangorestframework==3.16.0
|
||||
djangorestframework==3.15.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
@@ -172,7 +172,7 @@ dynaconf==3.2.10
|
||||
# django-ansible-base
|
||||
enum-compat==0.0.3
|
||||
# via asn1
|
||||
filelock==3.18.0
|
||||
filelock==3.16.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
frozenlist==1.5.0
|
||||
# via
|
||||
@@ -182,13 +182,13 @@ gitdb==4.0.12
|
||||
# via gitpython
|
||||
gitpython==3.1.44
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
google-auth==2.39.0
|
||||
google-auth==2.37.0
|
||||
# via kubernetes
|
||||
googleapis-common-protos==1.70.0
|
||||
googleapis-common-protos==1.66.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
grpcio==1.71.0
|
||||
grpcio==1.69.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -204,7 +204,7 @@ idna==3.10
|
||||
# requests
|
||||
# twisted
|
||||
# yarl
|
||||
importlib-metadata==8.6.1
|
||||
importlib-metadata==8.5.0
|
||||
# via opentelemetry-api
|
||||
importlib-resources==6.5.2
|
||||
# via irc
|
||||
@@ -237,7 +237,7 @@ jaraco-text==4.0.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-collections
|
||||
jinja2==3.1.6
|
||||
jinja2==3.1.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
@@ -245,7 +245,7 @@ jmespath==1.0.1
|
||||
# botocore
|
||||
jq==1.8.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
json-log-formatter==1.1.1
|
||||
json-log-formatter==1.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema==4.23.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -253,27 +253,27 @@ jsonschema-specifications==2024.10.1
|
||||
# via jsonschema
|
||||
jwcrypto==1.5.6
|
||||
# via django-oauth-toolkit
|
||||
kubernetes==32.0.1
|
||||
kubernetes==31.0.0
|
||||
# via openshift
|
||||
lockfile==0.12.2
|
||||
# via python-daemon
|
||||
markdown==3.8
|
||||
markdown==3.7
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
markupsafe==3.0.2
|
||||
# via jinja2
|
||||
maturin==1.8.3
|
||||
maturin==1.8.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
more-itertools==10.6.0
|
||||
more-itertools==10.5.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-functools
|
||||
# jaraco-stream
|
||||
# jaraco-text
|
||||
msal==1.32.0
|
||||
msal==1.31.1
|
||||
# via
|
||||
# azure-identity
|
||||
# msal-extensions
|
||||
msal-extensions==1.3.1
|
||||
msal-extensions==1.2.0
|
||||
# via azure-identity
|
||||
msgpack==1.1.0
|
||||
# via
|
||||
@@ -283,7 +283,7 @@ msrest==0.7.1
|
||||
# via msrestazure
|
||||
msrestazure==0.6.4.post1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
multidict==6.4.3
|
||||
multidict==6.1.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
@@ -294,7 +294,7 @@ oauthlib==3.2.2
|
||||
# requests-oauthlib
|
||||
openshift==0.13.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-api==1.32.0
|
||||
opentelemetry-api==1.29.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -303,31 +303,31 @@ opentelemetry-api==1.32.0
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-sdk
|
||||
# opentelemetry-semantic-conventions
|
||||
opentelemetry-exporter-otlp==1.32.0
|
||||
opentelemetry-exporter-otlp==1.29.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-exporter-otlp-proto-common==1.32.0
|
||||
opentelemetry-exporter-otlp-proto-common==1.29.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-exporter-otlp-proto-grpc==1.32.0
|
||||
opentelemetry-exporter-otlp-proto-grpc==1.29.0
|
||||
# via opentelemetry-exporter-otlp
|
||||
opentelemetry-exporter-otlp-proto-http==1.32.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.29.0
|
||||
# via opentelemetry-exporter-otlp
|
||||
opentelemetry-instrumentation==0.53b0
|
||||
opentelemetry-instrumentation==0.50b0
|
||||
# via opentelemetry-instrumentation-logging
|
||||
opentelemetry-instrumentation-logging==0.53b0
|
||||
opentelemetry-instrumentation-logging==0.50b0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-proto==1.32.0
|
||||
opentelemetry-proto==1.29.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-common
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-sdk==1.32.0
|
||||
opentelemetry-sdk==1.29.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-semantic-conventions==0.53b0
|
||||
opentelemetry-semantic-conventions==0.50b0
|
||||
# via
|
||||
# opentelemetry-instrumentation
|
||||
# opentelemetry-sdk
|
||||
@@ -342,19 +342,21 @@ pexpect==4.7.0
|
||||
# ansible-runner
|
||||
pkgconfig==1.5.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
portalocker==2.10.1
|
||||
# via msal-extensions
|
||||
prometheus-client==0.21.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
propcache==0.3.1
|
||||
propcache==0.2.1
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
protobuf==5.29.4
|
||||
protobuf==5.29.3
|
||||
# via
|
||||
# googleapis-common-protos
|
||||
# opentelemetry-proto
|
||||
psutil==7.0.0
|
||||
psutil==6.1.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psycopg==3.2.6
|
||||
psycopg==3.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
@@ -363,7 +365,7 @@ pyasn1==0.6.1
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
# service-identity
|
||||
pyasn1-modules==0.4.2
|
||||
pyasn1-modules==0.4.1
|
||||
# via
|
||||
# google-auth
|
||||
# service-identity
|
||||
@@ -382,7 +384,7 @@ pyjwt[crypto]==2.10.1
|
||||
# twilio
|
||||
pynacl==1.5.0
|
||||
# via pygithub
|
||||
pyopenssl==25.0.0
|
||||
pyopenssl==24.3.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# twisted
|
||||
@@ -405,7 +407,7 @@ python-string-utils==1.0.0
|
||||
# via openshift
|
||||
python-tss-sdk==1.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pytz==2025.2
|
||||
pytz==2024.2
|
||||
# via irc
|
||||
pyyaml==6.0.2
|
||||
# via
|
||||
@@ -416,13 +418,13 @@ pyyaml==6.0.2
|
||||
# receptorctl
|
||||
pyzstd==0.16.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
receptorctl==1.5.4
|
||||
receptorctl==1.5.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
redis[hiredis]==5.2.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
referencing==0.36.2
|
||||
referencing==0.35.1
|
||||
# via
|
||||
# jsonschema
|
||||
# jsonschema-specifications
|
||||
@@ -446,21 +448,21 @@ requests-oauthlib==2.0.0
|
||||
# via
|
||||
# kubernetes
|
||||
# msrest
|
||||
rpds-py==0.24.0
|
||||
rpds-py==0.22.3
|
||||
# via
|
||||
# jsonschema
|
||||
# referencing
|
||||
rsa==4.9
|
||||
# via google-auth
|
||||
s3transfer==0.11.4
|
||||
s3transfer==0.10.4
|
||||
# via boto3
|
||||
semantic-version==2.10.0
|
||||
# via setuptools-rust
|
||||
service-identity==24.2.0
|
||||
# via twisted
|
||||
setuptools-rust==1.11.1
|
||||
setuptools-rust==1.10.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
setuptools-scm[toml]==8.2.0
|
||||
setuptools-scm[toml]==8.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
six==1.17.0
|
||||
# via
|
||||
@@ -470,7 +472,7 @@ six==1.17.0
|
||||
# openshift
|
||||
# pygerduty
|
||||
# python-dateutil
|
||||
slack-sdk==3.35.0
|
||||
slack-sdk==3.34.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
smmap==5.0.2
|
||||
# via gitdb
|
||||
@@ -483,7 +485,7 @@ tempora==5.8.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-logging
|
||||
twilio==9.5.2
|
||||
twilio==9.4.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
twisted[tls]==24.11.0
|
||||
# via
|
||||
@@ -491,7 +493,7 @@ twisted[tls]==24.11.0
|
||||
# daphne
|
||||
txaio==23.1.1
|
||||
# via autobahn
|
||||
typing-extensions==4.13.2
|
||||
typing-extensions==4.12.2
|
||||
# via
|
||||
# azure-core
|
||||
# azure-identity
|
||||
@@ -502,17 +504,15 @@ typing-extensions==4.13.2
|
||||
# opentelemetry-sdk
|
||||
# psycopg
|
||||
# pygithub
|
||||
# pyopenssl
|
||||
# referencing
|
||||
# twisted
|
||||
urllib3==2.4.0
|
||||
urllib3==2.3.0
|
||||
# via
|
||||
# botocore
|
||||
# django-ansible-base
|
||||
# kubernetes
|
||||
# pygithub
|
||||
# requests
|
||||
uwsgi==2.0.29
|
||||
uwsgi==2.0.28
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
uwsgitop==0.12
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -520,11 +520,11 @@ websocket-client==1.8.0
|
||||
# via kubernetes
|
||||
wheel==0.45.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
wrapt==1.17.2
|
||||
wrapt==1.17.0
|
||||
# via
|
||||
# deprecated
|
||||
# opentelemetry-instrumentation
|
||||
yarl==1.19.0
|
||||
yarl==1.18.3
|
||||
# via aiohttp
|
||||
zipp==3.21.0
|
||||
# via importlib-metadata
|
||||
|
||||
16
tools/scripts/firehose_tasks.py
Executable file
16
tools/scripts/firehose_tasks.py
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from django import setup
|
||||
|
||||
from awx import prepare_env
|
||||
|
||||
prepare_env()
|
||||
|
||||
setup()
|
||||
|
||||
# Keeping this in test folder allows it to be importable
|
||||
from awx.main.tests.data.sleep_task import sleep_task
|
||||
|
||||
|
||||
for i in range(634):
|
||||
sleep_task.delay()
|
||||
Reference in New Issue
Block a user