AAP-60452 Remove the dynamic log level filter for the dispatcherd main process (#16200)

* Remove the dynamic filter on dispatcher startup

Configure the dynamic logging level only on startup

* Special case for log level on settings change

* Add unit test for new behavior

* Add test for initial config

* Mark test django DB

* Do necessary requirement bump

* Delete cache in live test fixture
This commit is contained in:
Alan Rominger 2026-01-02 15:45:06 -05:00 committed by GitHub
parent 40059512d8
commit 48c7534b57
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 103 additions and 2 deletions

View File

@ -1,12 +1,16 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
import logging.config
import yaml
import copy
import redis
from django.conf import settings
from django.db import connection
from django.core.management.base import BaseCommand, CommandError
from django.core.cache import cache as django_cache
from flags.state import flag_enabled
@ -104,6 +108,12 @@ class Command(BaseCommand):
return
if flag_enabled('FEATURE_DISPATCHERD_ENABLED'):
self.configure_dispatcher_logging()
# Close the connection, because the pg_notify broker will create new async connection
connection.close()
django_cache.close()
dispatcher_setup(get_dispatcherd_config(for_service=True))
run_service()
else:
@ -122,3 +132,18 @@ class Command(BaseCommand):
logger.debug('Terminating Task Dispatcher')
if consumer:
consumer.stop()
def configure_dispatcher_logging(self):
# Apply special log rule for the parent process
special_logging = copy.deepcopy(settings.LOGGING)
for handler_name, handler_config in special_logging.get('handlers', {}).items():
filters = handler_config.get('filters', [])
if 'dynamic_level_filter' in filters:
handler_config['filters'] = [flt for flt in filters if flt != 'dynamic_level_filter']
logger.info(f'Dispatcherd main process replaced log level filter for {handler_name} handler')
# Apply the custom logging level here, before the asyncio code starts
special_logging.setdefault('loggers', {}).setdefault('dispatcherd', {})
special_logging['loggers']['dispatcherd']['level'] = settings.LOG_AGGREGATOR_LEVEL
logging.config.dictConfig(special_logging)

View File

@ -13,6 +13,9 @@ from datetime import datetime
from distutils.version import LooseVersion as Version
from io import StringIO
# dispatcherd
from dispatcherd.factories import get_control_from_settings
# Runner
import ansible_runner.cleanup
import psycopg
@ -354,6 +357,11 @@ def clear_setting_cache(setting_keys):
logger.debug('cache delete_many(%r)', cache_keys)
cache.delete_many(cache_keys)
if 'LOG_AGGREGATOR_LEVEL' in setting_keys:
ctl = get_control_from_settings()
ctl.queuename = get_task_queuename()
ctl.control('set_log_level', data={'level': settings.LOG_AGGREGATOR_LEVEL})
@task_awx(queue='tower_broadcast_all', timeout=600)
def delete_project_files(project_path):

View File

@ -1,10 +1,15 @@
import copy
import json
import logging
import os
import tempfile
import shutil
from unittest import mock
import pytest
from awx.main.tasks.system import CleanupImagesAndFiles, execution_node_health_check, inspect_established_receptor_connections
from awx.main.tasks.system import CleanupImagesAndFiles, execution_node_health_check, inspect_established_receptor_connections, clear_setting_cache
from awx.main.management.commands.run_dispatcher import Command
from awx.main.models import Instance, Job, ReceptorAddress, InstanceLink
@ -98,3 +103,64 @@ def test_folder_cleanup_multiple_running_jobs(job_folder_factory, me_inst):
CleanupImagesAndFiles.run(grace_period=0)
assert [os.path.exists(d) for d in dirs] == [True for i in range(num_jobs)]
@pytest.mark.django_db
def test_clear_setting_cache_log_level_branch(settings):
settings.LOG_AGGREGATOR_LEVEL = 'DEBUG'
settings.CLUSTER_HOST_ID = 'control-node'
published_messages = []
class DummyBroker:
def publish_message(self, channel, message):
published_messages.append((channel, message))
def close(self):
pass
dummy_broker = DummyBroker()
with mock.patch('dispatcherd.control.get_broker', return_value=dummy_broker) as mock_get_broker:
clear_setting_cache(['LOG_AGGREGATOR_LEVEL'])
mock_get_broker.assert_called_once()
assert published_messages, 'control command was not sent through the broker'
queue, payload = published_messages[-1]
assert queue == 'control-node'
body = json.loads(payload)
assert body['control'] == 'set_log_level'
assert body['control_data'] == {'level': 'DEBUG'}
@pytest.mark.django_db
def test_configure_dispatcher_logging_updates_level(settings):
original_logging_settings = copy.deepcopy(settings.LOGGING)
settings.LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'dynamic_level_filter': {
'()': 'logging.Filter',
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'filters': ['dynamic_level_filter'],
'stream': 'ext://sys.stdout',
}
},
'loggers': {
'dispatcherd': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
}
},
}
settings.LOG_AGGREGATOR_LEVEL = 'WARNING'
Command().configure_dispatcher_logging()
assert logging.getLogger('dispatcherd').level == logging.WARNING
settings.LOGGING = original_logging_settings

View File

@ -8,6 +8,7 @@ import logging
import pytest
from django.conf import settings
from django.core.cache import cache
from awx.api.versioning import reverse
@ -60,6 +61,7 @@ def live_tmp_folder():
subprocess.run(GIT_COMMANDS, cwd=source_dir, shell=True)
if path not in settings.AWX_ISOLATION_SHOW_PATHS:
settings.AWX_ISOLATION_SHOW_PATHS = settings.AWX_ISOLATION_SHOW_PATHS + [path]
cache.delete_many(['AWX_ISOLATION_SHOW_PATHS'])
return path

View File

@ -120,7 +120,7 @@ cython==3.1.3
# via -r /awx_devel/requirements/requirements.in
daphne==4.2.1
# via -r /awx_devel/requirements/requirements.in
dispatcherd[pg_notify]==2025.12.10
dispatcherd[pg_notify]==2025.12.12
# via -r /awx_devel/requirements/requirements.in
distro==1.9.0
# via -r /awx_devel/requirements/requirements.in