Compare commits

..

4 Commits

Author SHA1 Message Date
Peter Braun
1417b1e33e improve error handling 2025-04-23 11:09:50 +02:00
Peter Braun
73187c61f4 use server certificates for checking rhsm and sattelite subscriptions 2025-04-23 10:41:54 +02:00
Alan Rominger
db6e8b9bad AAP-40782 Fix too-low max_workers value, dump running at capacity (#15873)
* Dump running tasks when running out of capacity

* Use same logic for max_workers and capacity

* Address case where CPU capacity is the constraint

* Add a test for correspondence

* Fake redis to make tests work
2025-04-16 16:43:21 -04:00
Hao Liu
483417762f Git ignore legacy UI files (#15946) 2025-04-16 14:40:12 -04:00
16 changed files with 235 additions and 119 deletions

2
.gitignore vendored
View File

@@ -150,6 +150,8 @@ use_dev_supervisor.txt
awx/ui/src
awx/ui/build
awx/ui/.ui-built
awx/ui_next
# Docs build stuff
docs/docsite/build/

View File

@@ -7,6 +7,7 @@ import time
import traceback
from datetime import datetime
from uuid import uuid4
import json
import collections
from multiprocessing import Process
@@ -25,7 +26,10 @@ from ansible_base.lib.logging.runtime import log_excess_runtime
from awx.main.models import UnifiedJob
from awx.main.dispatch import reaper
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity
from awx.main.utils.common import get_mem_effective_capacity, get_corrected_memory, get_corrected_cpu, get_cpu_effective_capacity
# ansible-runner
from ansible_runner.utils.capacity import get_mem_in_bytes, get_cpu_count
if 'run_callback_receiver' in sys.argv:
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
@@ -307,6 +311,41 @@ class WorkerPool(object):
logger.exception('could not kill {}'.format(worker.pid))
def get_auto_max_workers():
"""Method we normally rely on to get max_workers
Uses almost same logic as Instance.local_health_check
The important thing is to be MORE than Instance.capacity
so that the task-manager does not over-schedule this node
Ideally we would just use the capacity from the database plus reserve workers,
but this poses some bootstrap problems where OCP task containers
register themselves after startup
"""
# Get memory from ansible-runner
total_memory_gb = get_mem_in_bytes()
# This may replace memory calculation with a user override
corrected_memory = get_corrected_memory(total_memory_gb)
# Get same number as max forks based on memory, this function takes memory as bytes
mem_capacity = get_mem_effective_capacity(corrected_memory, is_control_node=True)
# Follow same process for CPU capacity constraint
cpu_count = get_cpu_count()
corrected_cpu = get_corrected_cpu(cpu_count)
cpu_capacity = get_cpu_effective_capacity(corrected_cpu, is_control_node=True)
# Here is what is different from health checks,
auto_max = max(mem_capacity, cpu_capacity)
# add magic number of extra workers to ensure
# we have a few extra workers to run the heartbeat
auto_max += 7
return auto_max
class AutoscalePool(WorkerPool):
"""
An extended pool implementation that automatically scales workers up and
@@ -320,19 +359,7 @@ class AutoscalePool(WorkerPool):
super(AutoscalePool, self).__init__(*args, **kwargs)
if self.max_workers is None:
settings_absmem = getattr(settings, 'SYSTEM_TASK_ABS_MEM', None)
if settings_absmem is not None:
# There are 1073741824 bytes in a gigabyte. Convert bytes to gigabytes by dividing by 2**30
total_memory_gb = convert_mem_str_to_bytes(settings_absmem) // 2**30
else:
total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up
# Get same number as max forks based on memory, this function takes memory as bytes
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
# add magic prime number of extra workers to ensure
# we have a few extra workers to run the heartbeat
self.max_workers += 7
self.max_workers = get_auto_max_workers()
# max workers can't be less than min_workers
self.max_workers = max(self.min_workers, self.max_workers)
@@ -346,6 +373,9 @@ class AutoscalePool(WorkerPool):
self.scale_up_ct = 0
self.worker_count_max = 0
# last time we wrote current tasks, to avoid too much log spam
self.last_task_list_log = time.monotonic()
def produce_subsystem_metrics(self, metrics_object):
metrics_object.set('dispatcher_pool_scale_up_events', self.scale_up_ct)
metrics_object.set('dispatcher_pool_active_task_count', sum(len(w.managed_tasks) for w in self.workers))
@@ -463,6 +493,14 @@ class AutoscalePool(WorkerPool):
self.worker_count_max = new_worker_ct
return ret
@staticmethod
def fast_task_serialization(current_task):
try:
return str(current_task.get('task')) + ' - ' + str(sorted(current_task.get('args', []))) + ' - ' + str(sorted(current_task.get('kwargs', {})))
except Exception:
# just make sure this does not make things worse
return str(current_task)
def write(self, preferred_queue, body):
if 'guid' in body:
set_guid(body['guid'])
@@ -484,6 +522,15 @@ class AutoscalePool(WorkerPool):
if isinstance(body, dict):
task_name = body.get('task')
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
# Once every 10 seconds write out task list for debugging
if time.monotonic() - self.last_task_list_log >= 10.0:
task_counts = {}
for worker in self.workers:
task_slug = self.fast_task_serialization(worker.current_task)
task_counts.setdefault(task_slug, 0)
task_counts[task_slug] += 1
logger.info(f'Running tasks by count:\n{json.dumps(task_counts, indent=2)}')
self.last_task_list_log = time.monotonic()
return super(AutoscalePool, self).write(preferred_queue, body)
except Exception:
for conn in connections.all():

View File

@@ -238,7 +238,7 @@ class AWXConsumerPG(AWXConsumerBase):
def run(self, *args, **kwargs):
super(AWXConsumerPG, self).run(*args, **kwargs)
logger.info(f"Running worker {self.name} listening to queues {self.queues}")
logger.info(f"Running {self.name}, workers min={self.pool.min_workers} max={self.pool.max_workers}, listening to queues {self.queues}")
init = False
while True:

View File

@@ -0,0 +1,17 @@
import time
import logging
from awx.main.dispatch import get_task_queuename
from awx.main.dispatch.publish import task
logger = logging.getLogger(__name__)
@task(queue=get_task_queuename)
def sleep_task(seconds=10, log=False):
if log:
logger.info('starting sleep_task')
time.sleep(seconds)
if log:
logger.info('finished sleep_task')

View File

@@ -34,40 +34,18 @@ def test_wrapup_does_send_notifications(mocker):
mock.assert_called_once_with('succeeded')
class FakeRedis:
def keys(self, *args, **kwargs):
return []
def set(self):
pass
def get(self):
return None
@classmethod
def from_url(cls, *args, **kwargs):
return cls()
def pipeline(self):
return self
class TestCallbackBrokerWorker(TransactionTestCase):
@pytest.fixture(autouse=True)
def turn_off_websockets(self):
def turn_off_websockets_and_redis(self, fake_redis):
with mock.patch('awx.main.dispatch.worker.callback.emit_event_detail', lambda *a, **kw: None):
yield
def get_worker(self):
with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff
return CallbackBrokerWorker()
def event_create_kwargs(self):
inventory_update = InventoryUpdate.objects.create(source='file', inventory_source=InventorySource.objects.create(source='file'))
return dict(inventory_update=inventory_update, created=inventory_update.created)
def test_flush_with_valid_event(self):
worker = self.get_worker()
worker = CallbackBrokerWorker()
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
worker.buff = {InventoryUpdateEvent: events}
worker.flush()
@@ -75,7 +53,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1
def test_flush_with_invalid_event(self):
worker = self.get_worker()
worker = CallbackBrokerWorker()
kwargs = self.event_create_kwargs()
events = [
InventoryUpdateEvent(uuid=str(uuid4()), stdout='good1', **kwargs),
@@ -90,7 +68,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
assert worker.buff == {InventoryUpdateEvent: [events[1]]}
def test_duplicate_key_not_saved_twice(self):
worker = self.get_worker()
worker = CallbackBrokerWorker()
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
worker.buff = {InventoryUpdateEvent: events.copy()}
worker.flush()
@@ -104,7 +82,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
assert worker.buff.get(InventoryUpdateEvent, []) == []
def test_give_up_on_bad_event(self):
worker = self.get_worker()
worker = CallbackBrokerWorker()
events = [InventoryUpdateEvent(uuid=str(uuid4()), counter=-2, **self.event_create_kwargs())]
worker.buff = {InventoryUpdateEvent: events.copy()}
@@ -117,7 +95,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 0 # sanity
def test_flush_with_empty_buffer(self):
worker = self.get_worker()
worker = CallbackBrokerWorker()
worker.buff = {InventoryUpdateEvent: []}
with mock.patch.object(InventoryUpdateEvent.objects, 'bulk_create') as flush_mock:
worker.flush()
@@ -127,7 +105,7 @@ class TestCallbackBrokerWorker(TransactionTestCase):
# In postgres, text fields reject NUL character, 0x00
# tests use sqlite3 which will not raise an error
# but we can still test that it is sanitized before saving
worker = self.get_worker()
worker = CallbackBrokerWorker()
kwargs = self.event_create_kwargs()
events = [InventoryUpdateEvent(uuid=str(uuid4()), stdout="\x00", **kwargs)]
assert "\x00" in events[0].stdout # sanity

View File

@@ -63,6 +63,33 @@ def swagger_autogen(requests=__SWAGGER_REQUESTS__):
return requests
class FakeRedis:
def keys(self, *args, **kwargs):
return []
def set(self):
pass
def get(self):
return None
@classmethod
def from_url(cls, *args, **kwargs):
return cls()
def pipeline(self):
return self
def ping(self):
return
@pytest.fixture
def fake_redis():
with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff
yield
@pytest.fixture
def user():
def u(name, is_superuser=False):

View File

@@ -3,6 +3,10 @@ import pytest
# AWX
from awx.main.ha import is_ha_environment
from awx.main.models.ha import Instance
from awx.main.dispatch.pool import get_auto_max_workers
# Django
from django.test.utils import override_settings
@pytest.mark.django_db
@@ -17,3 +21,25 @@ def test_db_localhost():
Instance.objects.create(hostname='foo', node_type='hybrid')
Instance.objects.create(hostname='bar', node_type='execution')
assert is_ha_environment() is False
@pytest.mark.django_db
@pytest.mark.parametrize(
'settings',
[
dict(SYSTEM_TASK_ABS_MEM='16Gi', SYSTEM_TASK_ABS_CPU='24', SYSTEM_TASK_FORKS_MEM=400, SYSTEM_TASK_FORKS_CPU=4),
dict(SYSTEM_TASK_ABS_MEM='124Gi', SYSTEM_TASK_ABS_CPU='2', SYSTEM_TASK_FORKS_MEM=None, SYSTEM_TASK_FORKS_CPU=None),
],
ids=['cpu_dominated', 'memory_dominated'],
)
def test_dispatcher_max_workers_reserve(settings, fake_redis):
"""This tests that the dispatcher max_workers matches instance capacity
Assumes capacity_adjustment is 1,
plus reserve worker count
"""
with override_settings(**settings):
i = Instance.objects.create(hostname='test-1', node_type='hybrid')
i.local_health_check()
assert get_auto_max_workers() == i.capacity + 7, (i.cpu, i.memory, i.cpu_capacity, i.mem_capacity)

View File

@@ -242,20 +242,12 @@ class Licenser(object):
return []
def get_rhsm_subs(self, host, user, pw):
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
json = []
try:
subs = requests.get('/'.join([host, 'subscription/users/{}/owners'.format(user)]), verify=verify, auth=(user, pw))
except requests.exceptions.ConnectionError as error:
raise error
except OSError as error:
raise OSError(
'Unable to open certificate bundle {}. Check that the service is running on Red Hat Enterprise Linux.'.format(verify)
) from error # noqa
subs = requests.get('/'.join([host, 'subscription/users/{}/owners'.format(user)]), verify=True, auth=(user, pw))
subs.raise_for_status()
for sub in subs.json():
resp = requests.get('/'.join([host, 'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])]), verify=verify, auth=(user, pw))
resp = requests.get('/'.join([host, 'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])]), verify=True, auth=(user, pw))
resp.raise_for_status()
json.extend(resp.json())
return json
@@ -266,8 +258,8 @@ class Licenser(object):
verify = str(self.config.get("rhsm", "repo_ca_cert"))
port = str(self.config.get("server", "port"))
except Exception as e:
verify = True
logger.exception('Unable to read rhsm config to get ca_cert location. {}'.format(str(e)))
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
if port:
host = ':'.join([host, port])
json = []

11
licenses/portalocker.txt Normal file
View File

@@ -0,0 +1,11 @@
Copyright 2022 Rick van Hattem
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -54,7 +54,7 @@ python-tss-sdk>=1.2.1
pyyaml>=6.0.2 # require packing fix for cython 3 or higher
pyzstd # otel collector log file compression library
receptorctl
sqlparse>=0.5.2
sqlparse>=0.4.4 # Required by django https://github.com/ansible/awx/security/dependabot/96
redis[hiredis]
requests
slack-sdk

View File

@@ -1,13 +1,13 @@
adal==1.2.7
# via msrestazure
aiohappyeyeballs==2.6.1
aiohappyeyeballs==2.4.4
# via aiohttp
aiohttp==3.11.16
aiohttp==3.11.11
# via
# -r /awx_devel/requirements/requirements.in
# aiohttp-retry
# twilio
aiohttp-retry==2.9.1
aiohttp-retry==2.8.3
# via twilio
aiosignal==1.3.2
# via aiohttp
@@ -25,9 +25,9 @@ asgiref==3.8.1
# django
# django-ansible-base
# django-cors-headers
asn1==3.0.0
asn1==2.7.1
# via -r /awx_devel/requirements/requirements.in
attrs==25.3.0
attrs==24.3.0
# via
# aiohttp
# jsonschema
@@ -46,14 +46,14 @@ awx-plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.g
# via
# -r /awx_devel/requirements/requirements_git.txt
# awx-plugins-core
azure-core==1.33.0
azure-core==1.32.0
# via
# azure-identity
# azure-keyvault-certificates
# azure-keyvault-keys
# azure-keyvault-secrets
# msrest
azure-identity==1.21.0
azure-identity==1.19.0
# via -r /awx_devel/requirements/requirements.in
azure-keyvault==4.2.0
# via -r /awx_devel/requirements/requirements.in
@@ -65,14 +65,14 @@ azure-keyvault-secrets==4.9.0
# via azure-keyvault
backports-tarfile==1.2.0
# via jaraco-context
boto3==1.37.34
boto3==1.35.96
# via -r /awx_devel/requirements/requirements.in
botocore==1.37.34
botocore==1.35.96
# via
# -r /awx_devel/requirements/requirements.in
# boto3
# s3transfer
cachetools==5.5.2
cachetools==5.5.0
# via google-auth
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
# via
@@ -84,7 +84,7 @@ cffi==1.17.1
# via
# cryptography
# pynacl
channels==4.2.2
channels==4.2.0
# via
# -r /awx_devel/requirements/requirements.in
# channels-redis
@@ -109,11 +109,11 @@ cryptography==41.0.7
# pyjwt
# pyopenssl
# service-identity
cython==3.0.12
cython==3.0.11
# via -r /awx_devel/requirements/requirements.in
daphne==4.1.2
# via -r /awx_devel/requirements/requirements.in
deprecated==1.2.18
deprecated==1.2.15
# via
# opentelemetry-api
# opentelemetry-exporter-otlp-proto-grpc
@@ -138,19 +138,19 @@ django==4.2.20
# djangorestframework
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
# via -r /awx_devel/requirements/requirements_git.txt
django-cors-headers==4.7.0
django-cors-headers==4.6.0
# via -r /awx_devel/requirements/requirements.in
django-crum==0.7.9
# via
# -r /awx_devel/requirements/requirements.in
# django-ansible-base
django-extensions==4.1
django-extensions==3.2.3
# via -r /awx_devel/requirements/requirements.in
django-flags==5.0.13
# via
# -r /awx_devel/requirements/requirements.in
# django-ansible-base
django-guid==3.5.1
django-guid==3.5.0
# via -r /awx_devel/requirements/requirements.in
django-oauth-toolkit==1.7.1
# via -r /awx_devel/requirements/requirements.in
@@ -158,7 +158,7 @@ django-polymorphic==3.1.0
# via -r /awx_devel/requirements/requirements.in
django-solo==2.4.0
# via -r /awx_devel/requirements/requirements.in
djangorestframework==3.16.0
djangorestframework==3.15.2
# via
# -r /awx_devel/requirements/requirements.in
# django-ansible-base
@@ -172,7 +172,7 @@ dynaconf==3.2.10
# django-ansible-base
enum-compat==0.0.3
# via asn1
filelock==3.18.0
filelock==3.16.1
# via -r /awx_devel/requirements/requirements.in
frozenlist==1.5.0
# via
@@ -182,13 +182,13 @@ gitdb==4.0.12
# via gitpython
gitpython==3.1.44
# via -r /awx_devel/requirements/requirements.in
google-auth==2.39.0
google-auth==2.37.0
# via kubernetes
googleapis-common-protos==1.70.0
googleapis-common-protos==1.66.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
grpcio==1.71.0
grpcio==1.69.0
# via
# -r /awx_devel/requirements/requirements.in
# opentelemetry-exporter-otlp-proto-grpc
@@ -204,7 +204,7 @@ idna==3.10
# requests
# twisted
# yarl
importlib-metadata==8.6.1
importlib-metadata==8.5.0
# via opentelemetry-api
importlib-resources==6.5.2
# via irc
@@ -237,7 +237,7 @@ jaraco-text==4.0.0
# via
# irc
# jaraco-collections
jinja2==3.1.6
jinja2==3.1.5
# via -r /awx_devel/requirements/requirements.in
jmespath==1.0.1
# via
@@ -245,7 +245,7 @@ jmespath==1.0.1
# botocore
jq==1.8.0
# via -r /awx_devel/requirements/requirements.in
json-log-formatter==1.1.1
json-log-formatter==1.1
# via -r /awx_devel/requirements/requirements.in
jsonschema==4.23.0
# via -r /awx_devel/requirements/requirements.in
@@ -253,27 +253,27 @@ jsonschema-specifications==2024.10.1
# via jsonschema
jwcrypto==1.5.6
# via django-oauth-toolkit
kubernetes==32.0.1
kubernetes==31.0.0
# via openshift
lockfile==0.12.2
# via python-daemon
markdown==3.8
markdown==3.7
# via -r /awx_devel/requirements/requirements.in
markupsafe==3.0.2
# via jinja2
maturin==1.8.3
maturin==1.8.1
# via -r /awx_devel/requirements/requirements.in
more-itertools==10.6.0
more-itertools==10.5.0
# via
# irc
# jaraco-functools
# jaraco-stream
# jaraco-text
msal==1.32.0
msal==1.31.1
# via
# azure-identity
# msal-extensions
msal-extensions==1.3.1
msal-extensions==1.2.0
# via azure-identity
msgpack==1.1.0
# via
@@ -283,7 +283,7 @@ msrest==0.7.1
# via msrestazure
msrestazure==0.6.4.post1
# via -r /awx_devel/requirements/requirements.in
multidict==6.4.3
multidict==6.1.0
# via
# aiohttp
# yarl
@@ -294,7 +294,7 @@ oauthlib==3.2.2
# requests-oauthlib
openshift==0.13.2
# via -r /awx_devel/requirements/requirements.in
opentelemetry-api==1.32.0
opentelemetry-api==1.29.0
# via
# -r /awx_devel/requirements/requirements.in
# opentelemetry-exporter-otlp-proto-grpc
@@ -303,31 +303,31 @@ opentelemetry-api==1.32.0
# opentelemetry-instrumentation-logging
# opentelemetry-sdk
# opentelemetry-semantic-conventions
opentelemetry-exporter-otlp==1.32.0
opentelemetry-exporter-otlp==1.29.0
# via -r /awx_devel/requirements/requirements.in
opentelemetry-exporter-otlp-proto-common==1.32.0
opentelemetry-exporter-otlp-proto-common==1.29.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-exporter-otlp-proto-grpc==1.32.0
opentelemetry-exporter-otlp-proto-grpc==1.29.0
# via opentelemetry-exporter-otlp
opentelemetry-exporter-otlp-proto-http==1.32.0
opentelemetry-exporter-otlp-proto-http==1.29.0
# via opentelemetry-exporter-otlp
opentelemetry-instrumentation==0.53b0
opentelemetry-instrumentation==0.50b0
# via opentelemetry-instrumentation-logging
opentelemetry-instrumentation-logging==0.53b0
opentelemetry-instrumentation-logging==0.50b0
# via -r /awx_devel/requirements/requirements.in
opentelemetry-proto==1.32.0
opentelemetry-proto==1.29.0
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-sdk==1.32.0
opentelemetry-sdk==1.29.0
# via
# -r /awx_devel/requirements/requirements.in
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-semantic-conventions==0.53b0
opentelemetry-semantic-conventions==0.50b0
# via
# opentelemetry-instrumentation
# opentelemetry-sdk
@@ -342,19 +342,21 @@ pexpect==4.7.0
# ansible-runner
pkgconfig==1.5.5
# via -r /awx_devel/requirements/requirements.in
portalocker==2.10.1
# via msal-extensions
prometheus-client==0.21.1
# via -r /awx_devel/requirements/requirements.in
propcache==0.3.1
propcache==0.2.1
# via
# aiohttp
# yarl
protobuf==5.29.4
protobuf==5.29.3
# via
# googleapis-common-protos
# opentelemetry-proto
psutil==7.0.0
psutil==6.1.1
# via -r /awx_devel/requirements/requirements.in
psycopg==3.2.6
psycopg==3.2.3
# via -r /awx_devel/requirements/requirements.in
ptyprocess==0.7.0
# via pexpect
@@ -363,7 +365,7 @@ pyasn1==0.6.1
# pyasn1-modules
# rsa
# service-identity
pyasn1-modules==0.4.2
pyasn1-modules==0.4.1
# via
# google-auth
# service-identity
@@ -382,7 +384,7 @@ pyjwt[crypto]==2.10.1
# twilio
pynacl==1.5.0
# via pygithub
pyopenssl==25.0.0
pyopenssl==24.3.0
# via
# -r /awx_devel/requirements/requirements.in
# twisted
@@ -405,7 +407,7 @@ python-string-utils==1.0.0
# via openshift
python-tss-sdk==1.2.3
# via -r /awx_devel/requirements/requirements.in
pytz==2025.2
pytz==2024.2
# via irc
pyyaml==6.0.2
# via
@@ -416,13 +418,13 @@ pyyaml==6.0.2
# receptorctl
pyzstd==0.16.2
# via -r /awx_devel/requirements/requirements.in
receptorctl==1.5.4
receptorctl==1.5.2
# via -r /awx_devel/requirements/requirements.in
redis[hiredis]==5.2.1
# via
# -r /awx_devel/requirements/requirements.in
# channels-redis
referencing==0.36.2
referencing==0.35.1
# via
# jsonschema
# jsonschema-specifications
@@ -446,21 +448,21 @@ requests-oauthlib==2.0.0
# via
# kubernetes
# msrest
rpds-py==0.24.0
rpds-py==0.22.3
# via
# jsonschema
# referencing
rsa==4.9
# via google-auth
s3transfer==0.11.4
s3transfer==0.10.4
# via boto3
semantic-version==2.10.0
# via setuptools-rust
service-identity==24.2.0
# via twisted
setuptools-rust==1.11.1
setuptools-rust==1.10.2
# via -r /awx_devel/requirements/requirements.in
setuptools-scm[toml]==8.2.0
setuptools-scm[toml]==8.1.0
# via -r /awx_devel/requirements/requirements.in
six==1.17.0
# via
@@ -470,7 +472,7 @@ six==1.17.0
# openshift
# pygerduty
# python-dateutil
slack-sdk==3.35.0
slack-sdk==3.34.0
# via -r /awx_devel/requirements/requirements.in
smmap==5.0.2
# via gitdb
@@ -483,7 +485,7 @@ tempora==5.8.0
# via
# irc
# jaraco-logging
twilio==9.5.2
twilio==9.4.2
# via -r /awx_devel/requirements/requirements.in
twisted[tls]==24.11.0
# via
@@ -491,7 +493,7 @@ twisted[tls]==24.11.0
# daphne
txaio==23.1.1
# via autobahn
typing-extensions==4.13.2
typing-extensions==4.12.2
# via
# azure-core
# azure-identity
@@ -502,17 +504,15 @@ typing-extensions==4.13.2
# opentelemetry-sdk
# psycopg
# pygithub
# pyopenssl
# referencing
# twisted
urllib3==2.4.0
urllib3==2.3.0
# via
# botocore
# django-ansible-base
# kubernetes
# pygithub
# requests
uwsgi==2.0.29
uwsgi==2.0.28
# via -r /awx_devel/requirements/requirements.in
uwsgitop==0.12
# via -r /awx_devel/requirements/requirements.in
@@ -520,11 +520,11 @@ websocket-client==1.8.0
# via kubernetes
wheel==0.45.1
# via -r /awx_devel/requirements/requirements.in
wrapt==1.17.2
wrapt==1.17.0
# via
# deprecated
# opentelemetry-instrumentation
yarl==1.19.0
yarl==1.18.3
# via aiohttp
zipp==3.21.0
# via importlib-metadata

16
tools/scripts/firehose_tasks.py Executable file
View File

@@ -0,0 +1,16 @@
#!/usr/bin/env python
from django import setup
from awx import prepare_env
prepare_env()
setup()
# Keeping this in test folder allows it to be importable
from awx.main.tests.data.sleep_task import sleep_task
for i in range(634):
sleep_task.delay()