mirror of
https://github.com/ansible/awx.git
synced 2026-01-12 10:30:03 -03:30
Merge pull request #6108 from rooftopcellist/rsyslog
Replace our external logging feature with Rsyslog
Reviewed-by: Ryan Petrello
https://github.com/ryanpetrello
This commit is contained in:
commit
98a4e85db4
1
.gitignore
vendored
1
.gitignore
vendored
@ -31,6 +31,7 @@ awx/ui/templates/ui/installing.html
|
||||
awx/ui_next/node_modules/
|
||||
awx/ui_next/coverage/
|
||||
awx/ui_next/build/locales/_build
|
||||
rsyslog.pid
|
||||
/tower-license
|
||||
/tower-license/**
|
||||
tools/prometheus/data
|
||||
|
||||
@ -1 +1,2 @@
|
||||
# Test Logging Configuration
|
||||
|
||||
|
||||
@ -325,17 +325,3 @@ def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 23
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_logging_test(api_request):
|
||||
with mock.patch('awx.conf.views.AWXProxyHandler.perform_test') as mock_func:
|
||||
api_request(
|
||||
'post',
|
||||
reverse('api:setting_logging_test'),
|
||||
data={'LOG_AGGREGATOR_HOST': 'http://foobar', 'LOG_AGGREGATOR_TYPE': 'logstash'}
|
||||
)
|
||||
call = mock_func.call_args_list[0]
|
||||
args, kwargs = call
|
||||
given_settings = kwargs['custom_settings']
|
||||
assert given_settings.LOG_AGGREGATOR_HOST == 'http://foobar'
|
||||
assert given_settings.LOG_AGGREGATOR_TYPE == 'logstash'
|
||||
|
||||
@ -3,7 +3,11 @@
|
||||
|
||||
# Python
|
||||
import collections
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
import socket
|
||||
from socket import SHUT_RDWR
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -11,7 +15,7 @@ from django.http import Http404
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
@ -26,7 +30,6 @@ from awx.api.generics import (
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import camelcase_to_underscore
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
from awx.main.tasks import handle_setting_changes
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
|
||||
@ -161,40 +164,47 @@ class SettingLoggingTest(GenericAPIView):
|
||||
filter_backends = []
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
defaults = dict()
|
||||
for key in settings_registry.get_registered_settings(category_slug='logging'):
|
||||
try:
|
||||
defaults[key] = settings_registry.get_setting_field(key).get_default()
|
||||
except serializers.SkipField:
|
||||
defaults[key] = None
|
||||
obj = type('Settings', (object,), defaults)()
|
||||
serializer = self.get_serializer(obj, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
# Special validation specific to logging test.
|
||||
errors = {}
|
||||
for key in ['LOG_AGGREGATOR_TYPE', 'LOG_AGGREGATOR_HOST']:
|
||||
if not request.data.get(key, ''):
|
||||
errors[key] = 'This field is required.'
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
if request.data.get('LOG_AGGREGATOR_PASSWORD', '').startswith('$encrypted$'):
|
||||
serializer.validated_data['LOG_AGGREGATOR_PASSWORD'] = getattr(
|
||||
settings, 'LOG_AGGREGATOR_PASSWORD', ''
|
||||
)
|
||||
# Error if logging is not enabled
|
||||
enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED', False)
|
||||
if not enabled:
|
||||
return Response({'error': 'Logging not enabled'}, status=status.HTTP_409_CONFLICT)
|
||||
|
||||
# Send test message to configured logger based on db settings
|
||||
logging.getLogger('awx').error('AWX Connection Test Message')
|
||||
|
||||
hostname = getattr(settings, 'LOG_AGGREGATOR_HOST', None)
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', None)
|
||||
|
||||
try:
|
||||
class MockSettings:
|
||||
pass
|
||||
mock_settings = MockSettings()
|
||||
for k, v in serializer.validated_data.items():
|
||||
setattr(mock_settings, k, v)
|
||||
AWXProxyHandler().perform_test(custom_settings=mock_settings)
|
||||
if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP':
|
||||
return Response(status=status.HTTP_201_CREATED)
|
||||
except LoggingConnectivityException as e:
|
||||
return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
subprocess.check_output(
|
||||
['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'],
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return Response({'error': exc.output}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Check to ensure port is open at host
|
||||
if protocol in ['udp', 'tcp']:
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', None)
|
||||
# Error if port is not set when using UDP/TCP
|
||||
if not port:
|
||||
return Response({'error': 'Port required for ' + protocol}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if http/https by this point, domain is reacheable
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
if protocol == 'udp':
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
else:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
s.settimeout(.5)
|
||||
s.connect((hostname, int(port)))
|
||||
s.shutdown(SHUT_RDWR)
|
||||
s.close()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
except Exception as e:
|
||||
return Response({'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# Create view functions for all of the class-based views to simplify inclusion
|
||||
|
||||
@ -667,7 +667,7 @@ register(
|
||||
allow_blank=True,
|
||||
default='',
|
||||
label=_('Logging Aggregator Username'),
|
||||
help_text=_('Username for external log aggregator (if required).'),
|
||||
help_text=_('Username for external log aggregator (if required; HTTP/s only).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
@ -679,7 +679,7 @@ register(
|
||||
default='',
|
||||
encrypted=True,
|
||||
label=_('Logging Aggregator Password/Token'),
|
||||
help_text=_('Password or authentication token for external log aggregator (if required).'),
|
||||
help_text=_('Password or authentication token for external log aggregator (if required; HTTP/s only).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
|
||||
@ -38,7 +38,7 @@ ENV_BLACKLIST = frozenset((
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
|
||||
'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
|
||||
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS',
|
||||
'AWX_HOST', 'PROJECT_REVISION'
|
||||
'AWX_HOST', 'PROJECT_REVISION', 'SUPERVISOR_WEB_CONFIG_PATH'
|
||||
))
|
||||
|
||||
# loggers that may be called in process of emitting a log
|
||||
|
||||
@ -7,7 +7,6 @@ from django.core.cache import cache as django_cache
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection as django_connection
|
||||
|
||||
from awx.main.utils.handlers import AWXProxyHandler
|
||||
from awx.main.dispatch import get_local_queuename, reaper
|
||||
from awx.main.dispatch.control import Control
|
||||
from awx.main.dispatch.pool import AutoscalePool
|
||||
@ -56,11 +55,6 @@ class Command(BaseCommand):
|
||||
reaper.reap()
|
||||
consumer = None
|
||||
|
||||
# don't ship external logs inside the dispatcher's parent process
|
||||
# this exists to work around a race condition + deadlock bug on fork
|
||||
# in cpython itself:
|
||||
# https://bugs.python.org/issue37429
|
||||
AWXProxyHandler.disable()
|
||||
try:
|
||||
queues = ['tower_broadcast_all', get_local_queuename()]
|
||||
consumer = AWXConsumerPG(
|
||||
|
||||
@ -73,6 +73,7 @@ from awx.main.utils import (get_ssh_version, update_scm_url,
|
||||
get_awx_version)
|
||||
from awx.main.utils.ansible import read_ansible_config
|
||||
from awx.main.utils.common import _get_ansible_version, get_custom_venv_choices
|
||||
from awx.main.utils.external_logging import reconfigure_rsyslog
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
@ -140,6 +141,9 @@ def dispatch_startup():
|
||||
# and Tower fall out of use/support, we can probably just _assume_ that
|
||||
# everybody has moved to bigint, and remove this code entirely
|
||||
enforce_bigint_pk_migration()
|
||||
|
||||
# Update Tower's rsyslog.conf file based on loggins settings in the db
|
||||
reconfigure_rsyslog()
|
||||
|
||||
|
||||
def inform_cluster_of_shutdown():
|
||||
@ -280,6 +284,12 @@ def handle_setting_changes(setting_keys):
|
||||
logger.debug('cache delete_many(%r)', cache_keys)
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
if any([
|
||||
setting.startswith('LOG_AGGREGATOR')
|
||||
for setting in setting_keys
|
||||
]):
|
||||
connection.on_commit(reconfigure_rsyslog)
|
||||
|
||||
|
||||
@task(queue='tower_broadcast_all')
|
||||
def delete_project_files(project_path):
|
||||
|
||||
@ -5,17 +5,13 @@
|
||||
# Python
|
||||
import pytest
|
||||
import os
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
# Mock
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.conf.models import Setting
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
|
||||
TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA
|
||||
@ -237,73 +233,95 @@ def test_ui_settings(get, put, patch, delete, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_requires_superuser(get, post, alice):
|
||||
def test_logging_aggregator_connection_test_requires_superuser(post, alice):
|
||||
url = reverse('api:setting_logging_test')
|
||||
post(url, {}, user=alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('key', [
|
||||
'LOG_AGGREGATOR_TYPE',
|
||||
'LOG_AGGREGATOR_HOST',
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregator_connection_test_not_enabled(post, admin):
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {}, user=admin, expect=409)
|
||||
assert 'Logging not enabled' in resp.data.get('error')
|
||||
|
||||
|
||||
def _mock_logging_defaults():
|
||||
# Pre-populate settings obj with defaults
|
||||
class MockSettings:
|
||||
pass
|
||||
mock_settings_obj = MockSettings()
|
||||
mock_settings_json = dict()
|
||||
for key in settings_registry.get_registered_settings(category_slug='logging'):
|
||||
value = settings_registry.get_setting_field(key).get_default()
|
||||
setattr(mock_settings_obj, key, value)
|
||||
mock_settings_json[key] = value
|
||||
setattr(mock_settings_obj, 'MAX_EVENT_RES_DATA', 700000)
|
||||
return mock_settings_obj, mock_settings_json
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize('key, value, error', [
|
||||
['LOG_AGGREGATOR_TYPE', 'logstash', 'Cannot enable log aggregator without providing host.'],
|
||||
['LOG_AGGREGATOR_HOST', 'https://logstash', 'Cannot enable log aggregator without providing type.']
|
||||
])
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_bad_request(get, post, admin, key):
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {}, user=admin, expect=400)
|
||||
assert 'This field is required.' in resp.data.get(key, [])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_valid(mocker, get, post, admin):
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
url = reverse('api:setting_logging_test')
|
||||
user_data = {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_USERNAME': 'logger',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'mcstash'
|
||||
}
|
||||
post(url, user_data, user=admin, expect=200)
|
||||
args, kwargs = perform_test.call_args_list[0]
|
||||
create_settings = kwargs['custom_settings']
|
||||
for k, v in user_data.items():
|
||||
assert hasattr(create_settings, k)
|
||||
assert getattr(create_settings, k) == v
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_with_masked_password(mocker, patch, post, admin):
|
||||
def test_logging_aggregator_missing_settings(put, post, admin, key, value, error):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings[key] = value
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
patch(url, user=admin, data={'LOG_AGGREGATOR_PASSWORD': 'password123'}, expect=200)
|
||||
time.sleep(1) # log settings are cached slightly
|
||||
response = put(url, data=mock_settings, user=admin, expect=400)
|
||||
assert error in str(response.data)
|
||||
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
url = reverse('api:setting_logging_test')
|
||||
user_data = {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_USERNAME': 'logger',
|
||||
'LOG_AGGREGATOR_PASSWORD': '$encrypted$'
|
||||
}
|
||||
post(url, user_data, user=admin, expect=200)
|
||||
args, kwargs = perform_test.call_args_list[0]
|
||||
create_settings = kwargs['custom_settings']
|
||||
assert getattr(create_settings, 'LOG_AGGREGATOR_PASSWORD') == 'password123'
|
||||
|
||||
@pytest.mark.parametrize('type, host, port, username, password', [
|
||||
['logstash', 'localhost', 8080, 'logger', 'mcstash'],
|
||||
['loggly', 'http://logs-01.loggly.com/inputs/1fd38090-hash-h4a$h-8d80-t0k3n71/tag/http/', None, None, None],
|
||||
['splunk', 'https://yoursplunk:8088/services/collector/event', None, None, None],
|
||||
['other', '97.221.40.41', 9000, 'logger', 'mcstash'],
|
||||
['sumologic', 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/Zagnw_f9XGr_zZgd-_EPM0hb8_rUU7_RU8Q==',
|
||||
None, None, None]
|
||||
])
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregator_valid_settings(put, post, admin, type, host, port, username, password):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
# type = 'splunk'
|
||||
# host = 'https://yoursplunk:8088/services/collector/event'
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings['LOG_AGGREGATOR_TYPE'] = type
|
||||
mock_settings['LOG_AGGREGATOR_HOST'] = host
|
||||
if port:
|
||||
mock_settings['LOG_AGGREGATOR_PORT'] = port
|
||||
if username:
|
||||
mock_settings['LOG_AGGREGATOR_USERNAME'] = username
|
||||
if password:
|
||||
mock_settings['LOG_AGGREGATOR_PASSWORD'] = password
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
response = put(url, data=mock_settings, user=admin, expect=200)
|
||||
assert type in response.data.get('LOG_AGGREGATOR_TYPE')
|
||||
assert host in response.data.get('LOG_AGGREGATOR_HOST')
|
||||
if port:
|
||||
assert port == response.data.get('LOG_AGGREGATOR_PORT')
|
||||
if username:
|
||||
assert username in response.data.get('LOG_AGGREGATOR_USERNAME')
|
||||
if password: # Note: password should be encrypted
|
||||
assert '$encrypted$' in response.data.get('LOG_AGGREGATOR_PASSWORD')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_invalid(mocker, get, post, admin):
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
perform_test.side_effect = LoggingConnectivityException('404: Not Found')
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080
|
||||
}, user=admin, expect=500)
|
||||
assert resp.data == {'error': '404: Not Found'}
|
||||
def test_logging_aggregator_connection_test_valid(put, post, admin):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
type = 'other'
|
||||
host = 'https://localhost'
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings['LOG_AGGREGATOR_TYPE'] = type
|
||||
mock_settings['LOG_AGGREGATOR_HOST'] = host
|
||||
# POST to save these mock settings
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
put(url, data=mock_settings, user=admin, expect=200)
|
||||
# "Test" the logger
|
||||
url = reverse('api:setting_logging_test')
|
||||
post(url, {}, user=admin, expect=202)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
160
awx/main/tests/unit/api/test_logger.py
Normal file
160
awx/main/tests/unit/api/test_logger.py
Normal file
@ -0,0 +1,160 @@
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.external_logging import construct_rsyslog_conf_template
|
||||
from awx.main.tests.functional.api.test_settings import _mock_logging_defaults
|
||||
|
||||
'''
|
||||
# Example User Data
|
||||
data_logstash = {
|
||||
"LOG_AGGREGATOR_TYPE": "logstash",
|
||||
"LOG_AGGREGATOR_HOST": "localhost",
|
||||
"LOG_AGGREGATOR_PORT": 8080,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "tcp",
|
||||
"LOG_AGGREGATOR_USERNAME": "logger",
|
||||
"LOG_AGGREGATOR_PASSWORD": "mcstash"
|
||||
}
|
||||
|
||||
data_netcat = {
|
||||
"LOG_AGGREGATOR_TYPE": "other",
|
||||
"LOG_AGGREGATOR_HOST": "localhost",
|
||||
"LOG_AGGREGATOR_PORT": 9000,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "udp",
|
||||
}
|
||||
|
||||
data_loggly = {
|
||||
"LOG_AGGREGATOR_TYPE": "loggly",
|
||||
"LOG_AGGREGATOR_HOST": "http://logs-01.loggly.com/inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/",
|
||||
"LOG_AGGREGATOR_PORT": 8080,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "https"
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
# Test reconfigure logging settings function
|
||||
# name this whatever you want
|
||||
@pytest.mark.parametrize(
|
||||
'enabled, type, host, port, protocol, expected_config', [
|
||||
(
|
||||
True,
|
||||
'loggly',
|
||||
'http://logs-01.loggly.com/inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/',
|
||||
None,
|
||||
'https',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # localhost w/ custom UDP port
|
||||
'other',
|
||||
'localhost',
|
||||
9000,
|
||||
'udp',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
'action(type="omfwd" target="localhost" port="9000" protocol="udp" action.resumeRetryCount="-1" template="awx")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # localhost w/ custom TCP port
|
||||
'other',
|
||||
'localhost',
|
||||
9000,
|
||||
'tcp',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
'action(type="omfwd" target="localhost" port="9000" protocol="tcp" action.resumeRetryCount="-1" template="awx")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, default port 443
|
||||
'splunk',
|
||||
'https://yoursplunk/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # http, default port 80
|
||||
'splunk',
|
||||
'http://yoursplunk/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, custom port in URL string
|
||||
'splunk',
|
||||
'https://yoursplunk:8088/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, custom port explicitly specified
|
||||
'splunk',
|
||||
'https://yoursplunk/services/collector/event',
|
||||
8088,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # no scheme specified in URL, default to https, respect custom port
|
||||
'splunk',
|
||||
'yoursplunk.org/services/collector/event',
|
||||
8088,
|
||||
'https',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # respect custom http-only port
|
||||
'splunk',
|
||||
'http://yoursplunk.org/services/collector/event',
|
||||
8088,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
]
|
||||
)
|
||||
def test_rsyslog_conf_template(enabled, type, host, port, protocol, expected_config):
|
||||
|
||||
mock_settings, _ = _mock_logging_defaults()
|
||||
|
||||
# Set test settings
|
||||
logging_defaults = getattr(settings, 'LOGGING')
|
||||
setattr(mock_settings, 'LOGGING', logging_defaults)
|
||||
setattr(mock_settings, 'LOGGING["handlers"]["external_logger"]["address"]', '/var/run/rsyslog/rsyslog.sock')
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_ENABLED', enabled)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_TYPE', type)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_HOST', host)
|
||||
if port:
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_PORT', port)
|
||||
if protocol:
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_PROTOCOL', protocol)
|
||||
|
||||
# create rsyslog conf template
|
||||
tmpl = construct_rsyslog_conf_template(mock_settings)
|
||||
|
||||
# check validity of created template
|
||||
assert expected_config in tmpl
|
||||
@ -1,393 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import logging
|
||||
import socket
|
||||
import datetime
|
||||
from dateutil.tz import tzutc
|
||||
from io import StringIO
|
||||
from uuid import uuid4
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import LazySettings
|
||||
from django.utils.encoding import smart_str
|
||||
import pytest
|
||||
import requests
|
||||
from requests_futures.sessions import FuturesSession
|
||||
|
||||
from awx.main.utils.handlers import (BaseHandler, BaseHTTPSHandler as HTTPSHandler,
|
||||
TCPHandler, UDPHandler, _encode_payload_for_socket,
|
||||
PARAM_NAMES, LoggingConnectivityException,
|
||||
AWXProxyHandler)
|
||||
from awx.main.utils.formatters import LogstashFormatter
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def https_adapter():
|
||||
class FakeHTTPSAdapter(requests.adapters.HTTPAdapter):
|
||||
requests = []
|
||||
status = 200
|
||||
reason = None
|
||||
|
||||
def send(self, request, **kwargs):
|
||||
self.requests.append(request)
|
||||
resp = requests.models.Response()
|
||||
resp.status_code = self.status
|
||||
resp.reason = self.reason
|
||||
resp.request = request
|
||||
return resp
|
||||
|
||||
return FakeHTTPSAdapter()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def connection_error_adapter():
|
||||
class ConnectionErrorAdapter(requests.adapters.HTTPAdapter):
|
||||
|
||||
def send(self, request, **kwargs):
|
||||
err = requests.packages.urllib3.exceptions.SSLError()
|
||||
raise requests.exceptions.ConnectionError(err, request=request)
|
||||
|
||||
return ConnectionErrorAdapter()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_socket(tmpdir_factory, request):
|
||||
sok = socket.socket
|
||||
sok.send = mock.MagicMock()
|
||||
sok.connect = mock.MagicMock()
|
||||
sok.setblocking = mock.MagicMock()
|
||||
sok.close = mock.MagicMock()
|
||||
return sok
|
||||
|
||||
|
||||
def test_https_logging_handler_requests_async_implementation():
|
||||
handler = HTTPSHandler()
|
||||
assert isinstance(handler.session, FuturesSession)
|
||||
|
||||
|
||||
def test_https_logging_handler_has_default_http_timeout():
|
||||
handler = TCPHandler()
|
||||
assert handler.tcp_timeout == 5
|
||||
|
||||
|
||||
@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts'])
|
||||
def test_base_logging_handler_defaults(param):
|
||||
handler = BaseHandler()
|
||||
assert hasattr(handler, param) and getattr(handler, param) is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts'])
|
||||
def test_base_logging_handler_kwargs(param):
|
||||
handler = BaseHandler(**{param: 'EXAMPLE'})
|
||||
assert hasattr(handler, param) and getattr(handler, param) == 'EXAMPLE'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('params', [
|
||||
{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_TYPE': 'loggly',
|
||||
'LOG_AGGREGATOR_USERNAME': 'foo',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'bar',
|
||||
'LOG_AGGREGATOR_INDIVIDUAL_FACTS': True,
|
||||
'LOG_AGGREGATOR_TCP_TIMEOUT': 96,
|
||||
'LOG_AGGREGATOR_VERIFY_CERT': False,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'https'
|
||||
},
|
||||
{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'udp'
|
||||
}
|
||||
])
|
||||
def test_real_handler_from_django_settings(params):
|
||||
settings = LazySettings()
|
||||
settings.configure(**params)
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
# need the _reverse_ dictionary from PARAM_NAMES
|
||||
attr_lookup = {}
|
||||
for attr_name, setting_name in PARAM_NAMES.items():
|
||||
attr_lookup[setting_name] = attr_name
|
||||
for setting_name, val in params.items():
|
||||
attr_name = attr_lookup[setting_name]
|
||||
if attr_name == 'protocol':
|
||||
continue
|
||||
assert hasattr(handler, attr_name)
|
||||
|
||||
|
||||
def test_invalid_kwarg_to_real_handler():
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'udp',
|
||||
'LOG_AGGREGATOR_VERIFY_CERT': False # setting not valid for UDP handler
|
||||
})
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
assert not hasattr(handler, 'verify_cert')
|
||||
|
||||
|
||||
def test_protocol_not_specified():
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': None # awx/settings/defaults.py
|
||||
})
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
assert isinstance(handler, logging.NullHandler)
|
||||
|
||||
|
||||
def test_base_logging_handler_emit_system_tracking(dummy_log_record):
|
||||
handler = BaseHandler(host='127.0.0.1', indv_facts=True)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
dummy_log_record.name = 'awx.analytics.system_tracking'
|
||||
dummy_log_record.msg = None
|
||||
dummy_log_record.inventory_id = 11
|
||||
dummy_log_record.host_name = 'my_lucky_host'
|
||||
dummy_log_record.job_id = 777
|
||||
dummy_log_record.ansible_facts = {
|
||||
"ansible_kernel": "4.4.66-boot2docker",
|
||||
"ansible_machine": "x86_64",
|
||||
"ansible_swapfree_mb": 4663,
|
||||
}
|
||||
dummy_log_record.ansible_facts_modified = datetime.datetime.now(tzutc()).isoformat()
|
||||
sent_payloads = handler.emit(dummy_log_record)
|
||||
|
||||
assert len(sent_payloads) == 1
|
||||
assert sent_payloads[0]['ansible_facts'] == dummy_log_record.ansible_facts
|
||||
assert sent_payloads[0]['ansible_facts_modified'] == dummy_log_record.ansible_facts_modified
|
||||
assert sent_payloads[0]['level'] == 'INFO'
|
||||
assert sent_payloads[0]['logger_name'] == 'awx.analytics.system_tracking'
|
||||
assert sent_payloads[0]['job_id'] == dummy_log_record.job_id
|
||||
assert sent_payloads[0]['inventory_id'] == dummy_log_record.inventory_id
|
||||
assert sent_payloads[0]['host_name'] == dummy_log_record.host_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize('host, port, normalized, hostname_only', [
|
||||
('http://localhost', None, 'http://localhost', False),
|
||||
('http://localhost', 8080, 'http://localhost:8080', False),
|
||||
('https://localhost', 443, 'https://localhost:443', False),
|
||||
('ftp://localhost', 443, 'ftp://localhost:443', False),
|
||||
('https://localhost:550', 443, 'https://localhost:550', False),
|
||||
('https://localhost:yoho/foobar', 443, 'https://localhost:443/foobar', False),
|
||||
('https://localhost:yoho/foobar', None, 'https://localhost:yoho/foobar', False),
|
||||
('http://splunk.server:8088/services/collector/event', 80,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('http://splunk.server/services/collector/event', 8088,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('splunk.server:8088/services/collector/event', 80,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('splunk.server/services/collector/event', 8088,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('localhost', None, 'http://localhost', False),
|
||||
('localhost', 8080, 'http://localhost:8080', False),
|
||||
('localhost', 4399, 'localhost', True),
|
||||
('tcp://localhost:4399/foo/bar', 4399, 'localhost', True),
|
||||
])
|
||||
def test_base_logging_handler_host_format(host, port, normalized, hostname_only):
|
||||
handler = BaseHandler(host=host, port=port)
|
||||
assert handler._get_host(scheme='http', hostname_only=hostname_only) == normalized
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'status, reason, exc',
|
||||
[(200, '200 OK', None), (404, 'Not Found', LoggingConnectivityException)]
|
||||
)
|
||||
@pytest.mark.parametrize('protocol', ['http', 'https', None])
|
||||
def test_https_logging_handler_connectivity_test(https_adapter, status, reason, exc, protocol):
|
||||
host = 'example.org'
|
||||
if protocol:
|
||||
host = '://'.join([protocol, host])
|
||||
https_adapter.status = status
|
||||
https_adapter.reason = reason
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': host,
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_USERNAME': 'user',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'password',
|
||||
'LOG_AGGREGATOR_LOGGERS': ['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'https',
|
||||
'CLUSTER_HOST_ID': '',
|
||||
'LOG_AGGREGATOR_TOWER_UUID': str(uuid4()),
|
||||
'LOG_AGGREGATOR_LEVEL': 'DEBUG',
|
||||
})
|
||||
|
||||
class FakeHTTPSHandler(HTTPSHandler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FakeHTTPSHandler, self).__init__(*args, **kwargs)
|
||||
self.session.mount('{}://'.format(protocol or 'https'), https_adapter)
|
||||
|
||||
def emit(self, record):
|
||||
return super(FakeHTTPSHandler, self).emit(record)
|
||||
|
||||
with mock.patch.object(AWXProxyHandler, 'get_handler_class') as mock_get_class:
|
||||
mock_get_class.return_value = FakeHTTPSHandler
|
||||
if exc:
|
||||
with pytest.raises(exc) as e:
|
||||
AWXProxyHandler().perform_test(settings)
|
||||
assert str(e).endswith('%s: %s' % (status, reason))
|
||||
else:
|
||||
assert AWXProxyHandler().perform_test(settings) is None
|
||||
|
||||
|
||||
def test_https_logging_handler_logstash_auth_info():
|
||||
handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible')
|
||||
handler._add_auth_information()
|
||||
assert isinstance(handler.session.auth, requests.auth.HTTPBasicAuth)
|
||||
assert handler.session.auth.username == 'bob'
|
||||
assert handler.session.auth.password == 'ansible'
|
||||
|
||||
|
||||
def test_https_logging_handler_splunk_auth_info():
|
||||
handler = HTTPSHandler(message_type='splunk', password='ansible')
|
||||
handler._add_auth_information()
|
||||
assert handler.session.headers['Authorization'] == 'Splunk ansible'
|
||||
assert handler.session.headers['Content-Type'] == 'application/json'
|
||||
|
||||
|
||||
def test_https_logging_handler_connection_error(connection_error_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1', message_type='logstash')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('http://', connection_error_adapter)
|
||||
|
||||
buff = StringIO()
|
||||
logging.getLogger('awx.main.utils.handlers').addHandler(
|
||||
logging.StreamHandler(buff)
|
||||
)
|
||||
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
with pytest.raises(requests.exceptions.ConnectionError):
|
||||
[future.result() for future in async_futures]
|
||||
assert 'failed to emit log to external aggregator\nTraceback' in buff.getvalue()
|
||||
|
||||
# we should only log failures *periodically*, so causing *another*
|
||||
# immediate failure shouldn't report a second ConnectionError
|
||||
buff.truncate(0)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
with pytest.raises(requests.exceptions.ConnectionError):
|
||||
[future.result() for future in async_futures]
|
||||
assert buff.getvalue() == ''
|
||||
|
||||
|
||||
@pytest.mark.parametrize('message_type', ['logstash', 'splunk'])
|
||||
def test_https_logging_handler_emit_without_cred(https_adapter, dummy_log_record,
|
||||
message_type):
|
||||
handler = HTTPSHandler(host='127.0.0.1', message_type=message_type)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.url == 'https://127.0.0.1/'
|
||||
assert request.method == 'POST'
|
||||
|
||||
if message_type == 'logstash':
|
||||
# A username + password weren't used, so this header should be missing
|
||||
assert 'Authorization' not in request.headers
|
||||
|
||||
if message_type == 'splunk':
|
||||
assert request.headers['Authorization'] == 'Splunk None'
|
||||
|
||||
|
||||
def test_https_logging_handler_emit_logstash_with_creds(https_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1',
|
||||
username='user', password='pass',
|
||||
message_type='logstash')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.headers['Authorization'] == 'Basic %s' % smart_str(base64.b64encode(b"user:pass"))
|
||||
|
||||
|
||||
def test_https_logging_handler_emit_splunk_with_creds(https_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1',
|
||||
password='pass', message_type='splunk')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.headers['Authorization'] == 'Splunk pass'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('payload, encoded_payload', [
|
||||
('foobar', 'foobar'),
|
||||
({'foo': 'bar'}, '{"foo": "bar"}'),
|
||||
({u'测试键': u'测试值'}, '{"测试键": "测试值"}'),
|
||||
])
|
||||
def test_encode_payload_for_socket(payload, encoded_payload):
|
||||
assert _encode_payload_for_socket(payload).decode('utf-8') == encoded_payload
|
||||
|
||||
|
||||
def test_udp_handler_create_socket_at_init():
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399)
|
||||
assert hasattr(handler, 'socket')
|
||||
assert isinstance(handler.socket, socket.socket)
|
||||
assert handler.socket.family == socket.AF_INET
|
||||
assert handler.socket.type == socket.SOCK_DGRAM
|
||||
|
||||
|
||||
def test_udp_handler_send(dummy_log_record):
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('awx.main.utils.handlers._encode_payload_for_socket', return_value="des") as encode_mock,\
|
||||
mock.patch.object(handler, 'socket') as socket_mock:
|
||||
handler.emit(dummy_log_record)
|
||||
encode_mock.assert_called_once_with(handler.format(dummy_log_record))
|
||||
socket_mock.sendto.assert_called_once_with("des", ('127.0.0.1', 4399))
|
||||
|
||||
|
||||
def test_tcp_handler_send(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [fake_socket], [])):
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
fake_socket.connect.assert_called_once_with(('127.0.0.1', 4399))
|
||||
fake_socket.setblocking.assert_called_once_with(0)
|
||||
fake_socket.send.assert_called_once_with(handler.format(dummy_log_record))
|
||||
fake_socket.close.assert_called_once()
|
||||
|
||||
|
||||
def test_tcp_handler_return_if_socket_unavailable(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [], [])):
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
fake_socket.connect.assert_called_once_with(('127.0.0.1', 4399))
|
||||
fake_socket.setblocking.assert_called_once_with(0)
|
||||
assert not fake_socket.send.called
|
||||
fake_socket.close.assert_called_once()
|
||||
|
||||
|
||||
def test_tcp_handler_log_exception(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [], [])),\
|
||||
mock.patch('awx.main.utils.handlers.logger') as logger_mock:
|
||||
fake_socket.connect.side_effect = Exception("foo")
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
logger_mock.exception.assert_called_once()
|
||||
fake_socket.close.assert_called_once()
|
||||
assert not fake_socket.send.called
|
||||
@ -8,7 +8,7 @@ def test_produce_supervisor_command(mocker):
|
||||
mock_process.communicate = communicate_mock
|
||||
Popen_mock = mocker.MagicMock(return_value=mock_process)
|
||||
with mocker.patch.object(reload.subprocess, 'Popen', Popen_mock):
|
||||
reload._supervisor_service_command("restart")
|
||||
reload.supervisor_service_command("restart")
|
||||
reload.subprocess.Popen.assert_called_once_with(
|
||||
['supervisorctl', 'restart', 'tower-processes:*',],
|
||||
stderr=-1, stdin=-1, stdout=-1)
|
||||
|
||||
84
awx/main/utils/external_logging.py
Normal file
84
awx/main/utils/external_logging.py
Normal file
@ -0,0 +1,84 @@
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.reload import supervisor_service_command
|
||||
|
||||
|
||||
def construct_rsyslog_conf_template(settings=settings):
|
||||
tmpl = ''
|
||||
parts = []
|
||||
host = getattr(settings, 'LOG_AGGREGATOR_HOST', '')
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', '')
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', '')
|
||||
timeout = str(getattr(settings, 'LOG_AGGREGATOR_TCP_TIMEOUT', 5) * 1000)
|
||||
if protocol.startswith('http'):
|
||||
scheme = 'https'
|
||||
# urlparse requires '//' to be provided if scheme is not specified
|
||||
original_parsed = urlparse.urlsplit(host)
|
||||
if (not original_parsed.scheme and not host.startswith('//')) or original_parsed.hostname is None:
|
||||
host = '%s://%s' % (scheme, host) if scheme else '//%s' % host
|
||||
parsed = urlparse.urlsplit(host)
|
||||
|
||||
host = parsed.hostname
|
||||
try:
|
||||
if parsed.port:
|
||||
port = parsed.port
|
||||
except ValueError:
|
||||
port = settings.LOG_AGGREGATOR_PORT
|
||||
max_bytes = settings.MAX_EVENT_RES_DATA
|
||||
parts.extend([
|
||||
'$WorkDirectory /var/lib/awx/rsyslog',
|
||||
f'$MaxMessageSize {max_bytes}',
|
||||
'$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf',
|
||||
'$ModLoad imuxsock',
|
||||
'input(type="imuxsock" Socket="' + settings.LOGGING['handlers']['external_logger']['address'] + '" unlink="on")',
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
])
|
||||
if protocol.startswith('http'):
|
||||
# https://github.com/rsyslog/rsyslog-doc/blob/master/source/configuration/modules/omhttp.rst
|
||||
ssl = "on" if parsed.scheme == 'https' else "off"
|
||||
skip_verify = "off" if settings.LOG_AGGREGATOR_VERIFY_CERT else "on"
|
||||
if not port:
|
||||
port = 443 if parsed.scheme == 'https' else 80
|
||||
|
||||
params = [
|
||||
'type="omhttp"',
|
||||
f'server="{host}"',
|
||||
f'serverport="{port}"',
|
||||
f'usehttps="{ssl}"',
|
||||
f'skipverifyhost="{skip_verify}"',
|
||||
'action.resumeRetryCount="-1"',
|
||||
'template="awx"',
|
||||
'errorfile="/var/log/tower/rsyslog.err"',
|
||||
f'healthchecktimeout="{timeout}"',
|
||||
]
|
||||
if parsed.path:
|
||||
path = urlparse.quote(parsed.path[1:])
|
||||
if parsed.query:
|
||||
path = f'{path}?{urlparse.quote(parsed.query)}'
|
||||
params.append(f'restpath="{path}"')
|
||||
username = getattr(settings, 'LOG_AGGREGATOR_USERNAME', '')
|
||||
password = getattr(settings, 'LOG_AGGREGATOR_PASSWORD', '')
|
||||
if username:
|
||||
params.append(f'uid="{username}"')
|
||||
if username and password:
|
||||
# you can only have a basic auth password if there's a username
|
||||
params.append(f'pwd="{password}"')
|
||||
params = ' '.join(params)
|
||||
parts.extend(['module(load="omhttp")', f'action({params})'])
|
||||
elif protocol and host and port:
|
||||
parts.append(
|
||||
f'action(type="omfwd" target="{host}" port="{port}" protocol="{protocol}" action.resumeRetryCount="-1" template="awx")' # noqa
|
||||
)
|
||||
else:
|
||||
parts.append(f'action(type="omfile" file="/dev/null")') # rsyslog needs *at least* one valid action to start
|
||||
tmpl = '\n'.join(parts)
|
||||
return tmpl
|
||||
|
||||
|
||||
def reconfigure_rsyslog():
|
||||
tmpl = construct_rsyslog_conf_template()
|
||||
with open('/var/lib/awx/rsyslog/rsyslog.conf', 'w') as f:
|
||||
f.write(tmpl + '\n')
|
||||
supervisor_service_command(command='restart', service='awx-rsyslogd')
|
||||
@ -97,7 +97,7 @@ class LogstashFormatterBase(logging.Formatter):
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, message):
|
||||
return bytes(json.dumps(message, cls=DjangoJSONEncoder), 'utf-8')
|
||||
return json.dumps(message, cls=DjangoJSONEncoder) + '\n'
|
||||
|
||||
|
||||
class LogstashFormatter(LogstashFormatterBase):
|
||||
|
||||
@ -3,404 +3,29 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import time
|
||||
import threading
|
||||
import socket
|
||||
import select
|
||||
from urllib import parse as urlparse
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from requests.exceptions import RequestException
|
||||
import os.path
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
# requests futures, a dependency used by these handlers
|
||||
from requests_futures.sessions import FuturesSession
|
||||
import cachetools
|
||||
|
||||
# AWX
|
||||
from awx.main.utils.formatters import LogstashFormatter
|
||||
class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
|
||||
append_nul = False
|
||||
|
||||
__all__ = ['BaseHTTPSHandler', 'TCPHandler', 'UDPHandler',
|
||||
'AWXProxyHandler']
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.handlers')
|
||||
|
||||
|
||||
# Translation of parameter names to names in Django settings
|
||||
# logging settings category, only those related to handler / log emission
|
||||
PARAM_NAMES = {
|
||||
'host': 'LOG_AGGREGATOR_HOST',
|
||||
'port': 'LOG_AGGREGATOR_PORT',
|
||||
'message_type': 'LOG_AGGREGATOR_TYPE',
|
||||
'username': 'LOG_AGGREGATOR_USERNAME',
|
||||
'password': 'LOG_AGGREGATOR_PASSWORD',
|
||||
'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS',
|
||||
'tcp_timeout': 'LOG_AGGREGATOR_TCP_TIMEOUT',
|
||||
'verify_cert': 'LOG_AGGREGATOR_VERIFY_CERT',
|
||||
'protocol': 'LOG_AGGREGATOR_PROTOCOL'
|
||||
}
|
||||
|
||||
|
||||
def unused_callback(sess, resp):
|
||||
pass
|
||||
|
||||
|
||||
class LoggingConnectivityException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class VerboseThreadPoolExecutor(ThreadPoolExecutor):
|
||||
|
||||
last_log_emit = 0
|
||||
|
||||
def submit(self, func, *args, **kwargs):
|
||||
def _wrapped(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
# If an exception occurs in a concurrent thread worker (like
|
||||
# a ConnectionError or a read timeout), periodically log
|
||||
# that failure.
|
||||
#
|
||||
# This approach isn't really thread-safe, so we could
|
||||
# potentially log once per thread every 10 seconds, but it
|
||||
# beats logging *every* failed HTTP request in a scenario where
|
||||
# you've typo'd your log aggregator hostname.
|
||||
now = time.time()
|
||||
if now - self.last_log_emit > 10:
|
||||
logger.exception('failed to emit log to external aggregator')
|
||||
self.last_log_emit = now
|
||||
raise
|
||||
return super(VerboseThreadPoolExecutor, self).submit(_wrapped, *args,
|
||||
**kwargs)
|
||||
|
||||
|
||||
class SocketResult:
|
||||
'''
|
||||
A class to be the return type of methods that send data over a socket
|
||||
allows object to be used in the same way as a request futures object
|
||||
'''
|
||||
def __init__(self, ok, reason=None):
|
||||
self.ok = ok
|
||||
self.reason = reason
|
||||
|
||||
def result(self):
|
||||
return self
|
||||
|
||||
|
||||
class BaseHandler(logging.Handler):
|
||||
def __init__(self, host=None, port=None, indv_facts=None, **kwargs):
|
||||
super(BaseHandler, self).__init__()
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.indv_facts = indv_facts
|
||||
|
||||
def _send(self, payload):
|
||||
"""Actually send message to log aggregator.
|
||||
"""
|
||||
return payload
|
||||
|
||||
def _format_and_send_record(self, record):
|
||||
if self.indv_facts:
|
||||
return [self._send(json.loads(self.format(record)))]
|
||||
return [self._send(self.format(record))]
|
||||
|
||||
def emit(self, record):
|
||||
"""
|
||||
Emit a log record. Returns a list of zero or more
|
||||
implementation-specific objects for tests.
|
||||
"""
|
||||
def emit(self, msg):
|
||||
if not settings.LOG_AGGREGATOR_ENABLED:
|
||||
return
|
||||
if not os.path.exists(settings.LOGGING['handlers']['external_logger']['address']):
|
||||
return
|
||||
try:
|
||||
return self._format_and_send_record(record)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
def _get_host(self, scheme='', hostname_only=False):
|
||||
"""Return the host name of log aggregator.
|
||||
"""
|
||||
host = self.host or ''
|
||||
# urlparse requires '//' to be provided if scheme is not specified
|
||||
original_parsed = urlparse.urlsplit(host)
|
||||
if (not original_parsed.scheme and not host.startswith('//')) or original_parsed.hostname is None:
|
||||
host = '%s://%s' % (scheme, host) if scheme else '//%s' % host
|
||||
parsed = urlparse.urlsplit(host)
|
||||
|
||||
if hostname_only:
|
||||
return parsed.hostname
|
||||
|
||||
try:
|
||||
port = parsed.port or self.port
|
||||
except ValueError:
|
||||
port = self.port
|
||||
netloc = parsed.netloc if port is None else '%s:%s' % (parsed.hostname, port)
|
||||
|
||||
url_components = list(parsed)
|
||||
url_components[1] = netloc
|
||||
ret = urlparse.urlunsplit(url_components)
|
||||
return ret.lstrip('/')
|
||||
|
||||
|
||||
class BaseHTTPSHandler(BaseHandler):
|
||||
'''
|
||||
Originally derived from python-logstash library
|
||||
Non-blocking request accomplished by FuturesSession, similar
|
||||
to the loggly-python-handler library
|
||||
'''
|
||||
def _add_auth_information(self):
|
||||
if self.message_type == 'logstash':
|
||||
if not self.username:
|
||||
# Logstash authentication not enabled
|
||||
return
|
||||
logstash_auth = requests.auth.HTTPBasicAuth(self.username, self.password)
|
||||
self.session.auth = logstash_auth
|
||||
elif self.message_type == 'splunk':
|
||||
auth_header = "Splunk %s" % self.password
|
||||
headers = {
|
||||
"Authorization": auth_header,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
self.session.headers.update(headers)
|
||||
|
||||
def __init__(self, fqdn=False, message_type=None, username=None, password=None,
|
||||
tcp_timeout=5, verify_cert=True, **kwargs):
|
||||
self.fqdn = fqdn
|
||||
self.message_type = message_type
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.tcp_timeout = tcp_timeout
|
||||
self.verify_cert = verify_cert
|
||||
super(BaseHTTPSHandler, self).__init__(**kwargs)
|
||||
self.session = FuturesSession(executor=VerboseThreadPoolExecutor(
|
||||
max_workers=2 # this is the default used by requests_futures
|
||||
))
|
||||
self._add_auth_information()
|
||||
|
||||
def _get_post_kwargs(self, payload_input):
|
||||
if self.message_type == 'splunk':
|
||||
# Splunk needs data nested under key "event"
|
||||
if not isinstance(payload_input, dict):
|
||||
payload_input = json.loads(payload_input)
|
||||
payload_input = {'event': payload_input}
|
||||
if isinstance(payload_input, dict):
|
||||
payload_str = json.dumps(payload_input)
|
||||
else:
|
||||
payload_str = payload_input
|
||||
kwargs = dict(data=payload_str, background_callback=unused_callback,
|
||||
timeout=self.tcp_timeout)
|
||||
if self.verify_cert is False:
|
||||
kwargs['verify'] = False
|
||||
return kwargs
|
||||
|
||||
|
||||
def _send(self, payload):
|
||||
"""See:
|
||||
https://docs.python.org/3/library/concurrent.futures.html#future-objects
|
||||
http://pythonhosted.org/futures/
|
||||
"""
|
||||
return self.session.post(self._get_host(scheme='https'),
|
||||
**self._get_post_kwargs(payload))
|
||||
|
||||
|
||||
def _encode_payload_for_socket(payload):
|
||||
encoded_payload = payload
|
||||
if isinstance(encoded_payload, dict):
|
||||
encoded_payload = json.dumps(encoded_payload, ensure_ascii=False)
|
||||
if isinstance(encoded_payload, str):
|
||||
encoded_payload = encoded_payload.encode('utf-8')
|
||||
return encoded_payload
|
||||
|
||||
|
||||
class TCPHandler(BaseHandler):
|
||||
def __init__(self, tcp_timeout=5, **kwargs):
|
||||
self.tcp_timeout = tcp_timeout
|
||||
super(TCPHandler, self).__init__(**kwargs)
|
||||
|
||||
def _send(self, payload):
|
||||
payload = _encode_payload_for_socket(payload)
|
||||
sok = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
sok.connect((self._get_host(hostname_only=True), self.port or 0))
|
||||
sok.setblocking(0)
|
||||
_, ready_to_send, _ = select.select([], [sok], [], float(self.tcp_timeout))
|
||||
if len(ready_to_send) == 0:
|
||||
ret = SocketResult(False, "Socket currently busy, failed to send message")
|
||||
logger.warning(ret.reason)
|
||||
else:
|
||||
sok.send(payload)
|
||||
ret = SocketResult(True) # success!
|
||||
except Exception as e:
|
||||
ret = SocketResult(False, "Error sending message from %s: %s" %
|
||||
(TCPHandler.__name__,
|
||||
' '.join(str(arg) for arg in e.args)))
|
||||
logger.exception(ret.reason)
|
||||
finally:
|
||||
sok.close()
|
||||
return ret
|
||||
|
||||
|
||||
class UDPHandler(BaseHandler):
|
||||
message = "Cannot determine if UDP messages are received."
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(UDPHandler, self).__init__(**kwargs)
|
||||
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
def _send(self, payload):
|
||||
payload = _encode_payload_for_socket(payload)
|
||||
self.socket.sendto(payload, (self._get_host(hostname_only=True), self.port or 0))
|
||||
return SocketResult(True, reason=self.message)
|
||||
|
||||
|
||||
class AWXNullHandler(logging.NullHandler):
|
||||
'''
|
||||
Only additional this does is accept arbitrary __init__ params because
|
||||
the proxy handler does not (yet) work with arbitrary handler classes
|
||||
'''
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AWXNullHandler, self).__init__()
|
||||
|
||||
|
||||
HANDLER_MAPPING = {
|
||||
'https': BaseHTTPSHandler,
|
||||
'tcp': TCPHandler,
|
||||
'udp': UDPHandler,
|
||||
}
|
||||
|
||||
|
||||
TTLCache = cachetools.TTLCache
|
||||
|
||||
if 'py.test' in os.environ.get('_', ''):
|
||||
# don't cache settings in unit tests
|
||||
class TTLCache(TTLCache):
|
||||
|
||||
def __getitem__(self, item):
|
||||
raise KeyError()
|
||||
|
||||
|
||||
class AWXProxyHandler(logging.Handler):
|
||||
'''
|
||||
Handler specific to the AWX external logging feature
|
||||
|
||||
Will dynamically create a handler specific to the configured
|
||||
protocol, and will create a new one automatically on setting change
|
||||
|
||||
Managing parameters:
|
||||
All parameters will get their value from settings as a default
|
||||
if the parameter was either provided on init, or set manually,
|
||||
this value will take precedence.
|
||||
Parameters match same parameters in the actualized handler classes.
|
||||
'''
|
||||
|
||||
thread_local = threading.local()
|
||||
_auditor = None
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# TODO: process 'level' kwarg
|
||||
super(AWXProxyHandler, self).__init__(**kwargs)
|
||||
self._handler = None
|
||||
self._old_kwargs = {}
|
||||
|
||||
@property
|
||||
def auditor(self):
|
||||
if not self._auditor:
|
||||
self._auditor = logging.handlers.RotatingFileHandler(
|
||||
filename='/var/log/tower/external.log',
|
||||
maxBytes=1024 * 1024 * 50, # 50 MB
|
||||
backupCount=5,
|
||||
)
|
||||
|
||||
class WritableLogstashFormatter(LogstashFormatter):
|
||||
@classmethod
|
||||
def serialize(cls, message):
|
||||
return json.dumps(message)
|
||||
|
||||
self._auditor.setFormatter(WritableLogstashFormatter())
|
||||
return self._auditor
|
||||
|
||||
def get_handler_class(self, protocol):
|
||||
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
|
||||
|
||||
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'get_handler')
|
||||
def get_handler(self, custom_settings=None, force_create=False):
|
||||
new_kwargs = {}
|
||||
use_settings = custom_settings or settings
|
||||
for field_name, setting_name in PARAM_NAMES.items():
|
||||
val = getattr(use_settings, setting_name, None)
|
||||
if val is None:
|
||||
continue
|
||||
new_kwargs[field_name] = val
|
||||
if new_kwargs == self._old_kwargs and self._handler and (not force_create):
|
||||
# avoids re-creating session objects, and other such things
|
||||
return self._handler
|
||||
self._old_kwargs = new_kwargs.copy()
|
||||
# TODO: remove any kwargs no applicable to that particular handler
|
||||
protocol = new_kwargs.pop('protocol', None)
|
||||
HandlerClass = self.get_handler_class(protocol)
|
||||
# cleanup old handler and make new one
|
||||
if self._handler:
|
||||
self._handler.close()
|
||||
logger.debug('Creating external log handler due to startup or settings change.')
|
||||
self._handler = HandlerClass(**new_kwargs)
|
||||
if self.formatter:
|
||||
# self.format(record) is called inside of emit method
|
||||
# so not safe to assume this can be handled within self
|
||||
self._handler.setFormatter(self.formatter)
|
||||
return self._handler
|
||||
|
||||
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'should_audit')
|
||||
def should_audit(self):
|
||||
return settings.LOG_AGGREGATOR_AUDIT
|
||||
|
||||
def emit(self, record):
|
||||
if AWXProxyHandler.thread_local.enabled:
|
||||
actual_handler = self.get_handler()
|
||||
if self.should_audit():
|
||||
self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
|
||||
self.auditor.emit(record)
|
||||
return actual_handler.emit(record)
|
||||
|
||||
def perform_test(self, custom_settings):
|
||||
"""
|
||||
Tests logging connectivity for given settings module.
|
||||
@raises LoggingConnectivityException
|
||||
"""
|
||||
handler = self.get_handler(custom_settings=custom_settings, force_create=True)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
logger = logging.getLogger(__file__)
|
||||
fn, lno, func, _ = logger.findCaller()
|
||||
record = logger.makeRecord('awx', 10, fn, lno,
|
||||
'AWX Connection Test', tuple(),
|
||||
None, func)
|
||||
futures = handler.emit(record)
|
||||
for future in futures:
|
||||
try:
|
||||
resp = future.result()
|
||||
if not resp.ok:
|
||||
if isinstance(resp, SocketResult):
|
||||
raise LoggingConnectivityException(
|
||||
'Socket error: {}'.format(resp.reason or '')
|
||||
)
|
||||
else:
|
||||
raise LoggingConnectivityException(
|
||||
': '.join([str(resp.status_code), resp.reason or ''])
|
||||
)
|
||||
except RequestException as e:
|
||||
raise LoggingConnectivityException(str(e))
|
||||
|
||||
@classmethod
|
||||
def disable(cls):
|
||||
cls.thread_local.enabled = False
|
||||
|
||||
|
||||
AWXProxyHandler.thread_local.enabled = True
|
||||
return super(RSysLogHandler, self).emit(msg)
|
||||
except ConnectionRefusedError:
|
||||
# rsyslogd has gone to lunch; this generally means that it's just
|
||||
# been restarted (due to a configuration change)
|
||||
# unfortunately, we can't log that because...rsyslogd is down (and
|
||||
# would just us back ddown this code path)
|
||||
pass
|
||||
|
||||
|
||||
ColorHandler = logging.StreamHandler
|
||||
|
||||
@ -4,25 +4,24 @@
|
||||
# Python
|
||||
import subprocess
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.reload')
|
||||
|
||||
|
||||
def _supervisor_service_command(command, communicate=True):
|
||||
def supervisor_service_command(command, service='*', communicate=True):
|
||||
'''
|
||||
example use pattern of supervisorctl:
|
||||
# supervisorctl restart tower-processes:receiver tower-processes:factcacher
|
||||
'''
|
||||
group_name = 'tower-processes'
|
||||
if settings.DEBUG:
|
||||
group_name = 'awx-processes'
|
||||
args = ['supervisorctl']
|
||||
if settings.DEBUG:
|
||||
args.extend(['-c', '/supervisor.conf'])
|
||||
args.extend([command, '{}:*'.format(group_name)])
|
||||
|
||||
supervisor_config_path = os.getenv('SUPERVISOR_WEB_CONFIG_PATH', None)
|
||||
if supervisor_config_path:
|
||||
args.extend(['-c', supervisor_config_path])
|
||||
|
||||
args.extend([command, ':'.join(['tower-processes', service])])
|
||||
logger.debug('Issuing command to {} services, args={}'.format(command, args))
|
||||
supervisor_process = subprocess.Popen(args, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
@ -41,4 +40,4 @@ def _supervisor_service_command(command, communicate=True):
|
||||
|
||||
def stop_local_services(communicate=True):
|
||||
logger.warn('Stopping services on this node in response to user action')
|
||||
_supervisor_service_command(command='stop', communicate=communicate)
|
||||
supervisor_service_command(command='stop', communicate=communicate)
|
||||
|
||||
@ -1011,8 +1011,9 @@ LOGGING = {
|
||||
'formatter': 'simple',
|
||||
},
|
||||
'external_logger': {
|
||||
'class': 'awx.main.utils.handlers.AWXProxyHandler',
|
||||
'class': 'awx.main.utils.handlers.RSysLogHandler',
|
||||
'formatter': 'json',
|
||||
'address': '/var/run/rsyslog/rsyslog.sock',
|
||||
'filters': ['external_log_enabled', 'dynamic_level_filter'],
|
||||
},
|
||||
'tower_warnings': {
|
||||
|
||||
@ -92,6 +92,7 @@ export default [
|
||||
var populateFromApi = function() {
|
||||
SettingsService.getCurrentValues()
|
||||
.then(function(data) {
|
||||
$scope.logAggregatorEnabled = data.LOG_AGGREGATOR_ENABLED;
|
||||
// these two values need to be unnested from the
|
||||
// OAUTH2_PROVIDER key
|
||||
data.ACCESS_TOKEN_EXPIRE_SECONDS = data
|
||||
@ -538,8 +539,11 @@ export default [
|
||||
var payload = {};
|
||||
payload[key] = $scope[key];
|
||||
SettingsService.patchConfiguration(payload)
|
||||
.then(function() {
|
||||
.then(function(data) {
|
||||
//TODO consider updating form values with returned data here
|
||||
if (key === 'LOG_AGGREGATOR_ENABLED') {
|
||||
$scope.logAggregatorEnabled = data.LOG_AGGREGATOR_ENABLED;
|
||||
}
|
||||
})
|
||||
.catch(function(data) {
|
||||
//Change back on unsuccessful update
|
||||
|
||||
@ -17,7 +17,7 @@ export default [
|
||||
'ProcessErrors',
|
||||
'ngToast',
|
||||
'$filter',
|
||||
function(
|
||||
function (
|
||||
$rootScope, $scope, $stateParams,
|
||||
systemActivityStreamForm,
|
||||
systemLoggingForm,
|
||||
@ -41,8 +41,8 @@ export default [
|
||||
formTracker.setCurrentSystem(activeSystemForm);
|
||||
}
|
||||
|
||||
var activeForm = function(tab) {
|
||||
if(!_.get($scope.$parent, [formTracker.currentFormName(), '$dirty'])) {
|
||||
var activeForm = function (tab) {
|
||||
if (!_.get($scope.$parent, [formTracker.currentFormName(), '$dirty'])) {
|
||||
systemVm.activeSystemForm = tab;
|
||||
formTracker.setCurrentSystem(systemVm.activeSystemForm);
|
||||
} else {
|
||||
@ -52,7 +52,7 @@ export default [
|
||||
label: i18n._('Discard changes'),
|
||||
"class": "btn Form-cancelButton",
|
||||
"id": "formmodal-cancel-button",
|
||||
onClick: function() {
|
||||
onClick: function () {
|
||||
$scope.$parent.vm.populateFromApi();
|
||||
$scope.$parent[formTracker.currentFormName()].$setPristine();
|
||||
systemVm.activeSystemForm = tab;
|
||||
@ -61,15 +61,15 @@ export default [
|
||||
}
|
||||
}, {
|
||||
label: i18n._('Save changes'),
|
||||
onClick: function() {
|
||||
onClick: function () {
|
||||
$scope.$parent.vm.formSave()
|
||||
.then(function() {
|
||||
$scope.$parent[formTracker.currentFormName()].$setPristine();
|
||||
$scope.$parent.vm.populateFromApi();
|
||||
systemVm.activeSystemForm = tab;
|
||||
formTracker.setCurrentSystem(systemVm.activeSystemForm);
|
||||
$('#FormModal-dialog').dialog('close');
|
||||
});
|
||||
.then(function () {
|
||||
$scope.$parent[formTracker.currentFormName()].$setPristine();
|
||||
$scope.$parent.vm.populateFromApi();
|
||||
systemVm.activeSystemForm = tab;
|
||||
formTracker.setCurrentSystem(systemVm.activeSystemForm);
|
||||
$('#FormModal-dialog').dialog('close');
|
||||
});
|
||||
},
|
||||
"class": "btn btn-primary",
|
||||
"id": "formmodal-save-button"
|
||||
@ -80,9 +80,9 @@ export default [
|
||||
};
|
||||
|
||||
var dropdownOptions = [
|
||||
{label: i18n._('Misc. System'), value: 'misc'},
|
||||
{label: i18n._('Activity Stream'), value: 'activity_stream'},
|
||||
{label: i18n._('Logging'), value: 'logging'},
|
||||
{ label: i18n._('Misc. System'), value: 'misc' },
|
||||
{ label: i18n._('Activity Stream'), value: 'activity_stream' },
|
||||
{ label: i18n._('Logging'), value: 'logging' },
|
||||
];
|
||||
|
||||
var systemForms = [{
|
||||
@ -97,14 +97,14 @@ export default [
|
||||
}];
|
||||
|
||||
var forms = _.map(systemForms, 'formDef');
|
||||
_.each(forms, function(form) {
|
||||
_.each(forms, function (form) {
|
||||
var keys = _.keys(form.fields);
|
||||
_.each(keys, function(key) {
|
||||
if($scope.configDataResolve[key].type === 'choice') {
|
||||
_.each(keys, function (key) {
|
||||
if ($scope.configDataResolve[key].type === 'choice') {
|
||||
// Create options for dropdowns
|
||||
var optionsGroup = key + '_options';
|
||||
$scope.$parent.$parent[optionsGroup] = [];
|
||||
_.each($scope.configDataResolve[key].choices, function(choice){
|
||||
_.each($scope.configDataResolve[key].choices, function (choice) {
|
||||
$scope.$parent.$parent[optionsGroup].push({
|
||||
name: choice[0],
|
||||
label: choice[1],
|
||||
@ -121,7 +121,7 @@ export default [
|
||||
function addFieldInfo(form, key) {
|
||||
_.extend(form.fields[key], {
|
||||
awPopOver: ($scope.configDataResolve[key].defined_in_file) ?
|
||||
null: $scope.configDataResolve[key].help_text,
|
||||
null : $scope.configDataResolve[key].help_text,
|
||||
label: $scope.configDataResolve[key].label,
|
||||
name: key,
|
||||
toggleSource: key,
|
||||
@ -138,7 +138,7 @@ export default [
|
||||
|
||||
$scope.$parent.$parent.parseType = 'json';
|
||||
|
||||
_.each(systemForms, function(form) {
|
||||
_.each(systemForms, function (form) {
|
||||
generator.inject(form.formDef, {
|
||||
id: form.id,
|
||||
mode: 'edit',
|
||||
@ -150,37 +150,37 @@ export default [
|
||||
|
||||
var dropdownRendered = false;
|
||||
|
||||
$scope.$on('populated', function() {
|
||||
$scope.$on('populated', function () {
|
||||
populateLogAggregator(false);
|
||||
});
|
||||
|
||||
$scope.$on('LOG_AGGREGATOR_TYPE_populated', function(e, data, flag) {
|
||||
$scope.$on('LOG_AGGREGATOR_TYPE_populated', function (e, data, flag) {
|
||||
populateLogAggregator(flag);
|
||||
});
|
||||
|
||||
$scope.$on('LOG_AGGREGATOR_PROTOCOL_populated', function(e, data, flag) {
|
||||
$scope.$on('LOG_AGGREGATOR_PROTOCOL_populated', function (e, data, flag) {
|
||||
populateLogAggregator(flag);
|
||||
});
|
||||
|
||||
function populateLogAggregator(flag){
|
||||
function populateLogAggregator(flag) {
|
||||
|
||||
if($scope.$parent.$parent.LOG_AGGREGATOR_TYPE !== null) {
|
||||
if ($scope.$parent.$parent.LOG_AGGREGATOR_TYPE !== null) {
|
||||
$scope.$parent.$parent.LOG_AGGREGATOR_TYPE = _.find($scope.$parent.$parent.LOG_AGGREGATOR_TYPE_options, { value: $scope.$parent.$parent.LOG_AGGREGATOR_TYPE });
|
||||
}
|
||||
|
||||
if($scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL !== null) {
|
||||
if ($scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL !== null) {
|
||||
$scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL = _.find($scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL_options, { value: $scope.$parent.$parent.LOG_AGGREGATOR_PROTOCOL });
|
||||
}
|
||||
|
||||
if($scope.$parent.$parent.LOG_AGGREGATOR_LEVEL !== null) {
|
||||
if ($scope.$parent.$parent.LOG_AGGREGATOR_LEVEL !== null) {
|
||||
$scope.$parent.$parent.LOG_AGGREGATOR_LEVEL = _.find($scope.$parent.$parent.LOG_AGGREGATOR_LEVEL_options, { value: $scope.$parent.$parent.LOG_AGGREGATOR_LEVEL });
|
||||
}
|
||||
|
||||
if(flag !== undefined){
|
||||
if (flag !== undefined) {
|
||||
dropdownRendered = flag;
|
||||
}
|
||||
|
||||
if(!dropdownRendered) {
|
||||
if (!dropdownRendered) {
|
||||
dropdownRendered = true;
|
||||
CreateSelect2({
|
||||
element: '#configuration_logging_template_LOG_AGGREGATOR_TYPE',
|
||||
@ -193,33 +193,52 @@ export default [
|
||||
}
|
||||
}
|
||||
|
||||
$scope.$parent.vm.testLogging = function() {
|
||||
Rest.setUrl("/api/v2/settings/logging/test/");
|
||||
Rest.post($scope.$parent.vm.getFormPayload())
|
||||
.then(() => {
|
||||
ngToast.success({
|
||||
content: `<i class="fa fa-check-circle
|
||||
Toast-successIcon"></i>` +
|
||||
i18n._('Log aggregator test successful.')
|
||||
});
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
if (status === 500) {
|
||||
ngToast.danger({
|
||||
content: '<i class="fa fa-exclamation-triangle Toast-successIcon"></i>' +
|
||||
i18n._('Log aggregator test failed. <br> Detail: ') + $filter('sanitize')(data.error),
|
||||
additionalClasses: "LogAggregator-failedNotification"
|
||||
$scope.$watchGroup(['configuration_logging_template_form.$pending', 'configuration_logging_template_form.$dirty', '!logAggregatorEnabled'], (vals) => {
|
||||
if (vals.some(val => val === true)) {
|
||||
$scope.$parent.vm.disableTestButton = true;
|
||||
$scope.$parent.vm.testTooltip = i18n._('Save and enable log aggregation before testing the log aggregator.');
|
||||
} else {
|
||||
$scope.$parent.vm.disableTestButton = false;
|
||||
$scope.$parent.vm.testTooltip = i18n._('Send a test log message to the configured log aggregator.');
|
||||
}
|
||||
});
|
||||
|
||||
$scope.$parent.vm.testLogging = function () {
|
||||
if (!$scope.$parent.vm.disableTestButton) {
|
||||
$scope.$parent.vm.disableTestButton = true;
|
||||
Rest.setUrl("/api/v2/settings/logging/test/");
|
||||
Rest.post({})
|
||||
.then(() => {
|
||||
$scope.$parent.vm.disableTestButton = false;
|
||||
ngToast.success({
|
||||
dismissButton: false,
|
||||
dismissOnTimeout: true,
|
||||
content: `<i class="fa fa-check-circle
|
||||
Toast-successIcon"></i>` +
|
||||
i18n._('Log aggregator test sent successfully.')
|
||||
});
|
||||
} else {
|
||||
ProcessErrors($scope, data, status, null,
|
||||
{
|
||||
hdr: i18n._('Error!'),
|
||||
msg: i18n._('There was an error testing the ' +
|
||||
'log aggregator. Returned status: ') +
|
||||
status
|
||||
})
|
||||
.catch(({ data, status }) => {
|
||||
$scope.$parent.vm.disableTestButton = false;
|
||||
if (status === 400 || status === 500) {
|
||||
ngToast.danger({
|
||||
dismissButton: false,
|
||||
dismissOnTimeout: true,
|
||||
content: '<i class="fa fa-exclamation-triangle Toast-successIcon"></i>' +
|
||||
i18n._('Log aggregator test failed. <br> Detail: ') + $filter('sanitize')(data.error),
|
||||
additionalClasses: "LogAggregator-failedNotification"
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
ProcessErrors($scope, data, status, null,
|
||||
{
|
||||
hdr: i18n._('Error!'),
|
||||
msg: i18n._('There was an error testing the ' +
|
||||
'log aggregator. Returned status: ') +
|
||||
status
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
angular.extend(systemVm, {
|
||||
|
||||
@ -75,10 +75,13 @@
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
testLogging: {
|
||||
ngClass: "{'Form-button--disabled': vm.disableTestButton}",
|
||||
ngClick: 'vm.testLogging()',
|
||||
label: i18n._('Test'),
|
||||
class: 'btn-primary',
|
||||
ngDisabled: 'configuration_logging_template_form.$invalid'
|
||||
class: 'Form-primaryButton',
|
||||
awToolTip: '{{vm.testTooltip}}',
|
||||
dataTipWatch: 'vm.testTooltip',
|
||||
dataPlacement: 'top',
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -1690,6 +1690,9 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
if (button.ngClick) {
|
||||
html += this.attr(button, 'ngClick');
|
||||
}
|
||||
if (button.ngClass) {
|
||||
html += this.attr(button, 'ngClass');
|
||||
}
|
||||
if (button.ngDisabled) {
|
||||
ngDisabled = (button.ngDisabled===true) ? `${this.form.name}_form.$invalid || ${this.form.name}_form.$pending`: button.ngDisabled;
|
||||
if (btn !== 'reset') {
|
||||
|
||||
@ -1,13 +0,0 @@
|
||||
Copyright 2013 Ross McFarland
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
@ -213,7 +213,6 @@ with the traceback message.
|
||||
|
||||
Log messages should be sent outside of the
|
||||
request-response cycle. For example, Loggly examples use
|
||||
`requests_futures.sessions.FuturesSession`, which does some
|
||||
threading work to fire the message without interfering with other
|
||||
rsyslog, which handles these messages without interfering with other
|
||||
operations. A timeout on the part of the log aggregation service should
|
||||
not cause Tower operations to hang.
|
||||
|
||||
7
installer/roles/image_build/files/rsyslog.conf
Normal file
7
installer/roles/image_build/files/rsyslog.conf
Normal file
@ -0,0 +1,7 @@
|
||||
$WorkDirectory /var/lib/awx/rsyslog
|
||||
$MaxMessageSize 700000
|
||||
$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf
|
||||
$ModLoad imuxsock
|
||||
input(type="imuxsock" Socket="/var/run/rsyslog/rsyslog.sock" unlink="on")
|
||||
template(name="awx" type="string" string="%msg%")
|
||||
action(type="omfile" file="/dev/null")
|
||||
7
installer/roles/image_build/files/rsyslog.repo
Normal file
7
installer/roles/image_build/files/rsyslog.repo
Normal file
@ -0,0 +1,7 @@
|
||||
[rsyslog_v8]
|
||||
name=Adiscon CentOS-$releasever - local packages for $basearch
|
||||
baseurl=http://rpms.adiscon.com/v8-stable/epel-$releasever/$basearch
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
gpgkey=http://rpms.adiscon.com/RPM-GPG-KEY-Adiscon
|
||||
protect=1
|
||||
@ -46,8 +46,20 @@ stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:awx-rsyslogd]
|
||||
command = rsyslogd -n -i /var/run/rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopwaitsecs = 1
|
||||
stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/stderr
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
[group:tower-processes]
|
||||
programs=nginx,uwsgi,daphne,wsbroadcast
|
||||
programs=nginx,uwsgi,daphne,wsbroadcast,awx-rsyslogd
|
||||
priority=5
|
||||
|
||||
# TODO: Exit Handler
|
||||
@ -62,10 +74,10 @@ events=TICK_60
|
||||
priority=0
|
||||
|
||||
[unix_http_server]
|
||||
file=/tmp/supervisor.sock
|
||||
file=/var/run/supervisor/supervisor.web.sock
|
||||
|
||||
[supervisorctl]
|
||||
serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket
|
||||
serverurl=unix:///var/run/supervisor/supervisor.web.sock ; use a unix:// URL for a unix socket
|
||||
|
||||
[rpcinterface:supervisor]
|
||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
||||
|
||||
@ -26,8 +26,20 @@ stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:awx-rsyslogd]
|
||||
command = rsyslogd -n -i /awx_devel/rsyslog.pid
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopwaitsecs = 1
|
||||
stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/stderr
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
[group:tower-processes]
|
||||
programs=dispatcher,callback-receiver
|
||||
programs=dispatcher,callback-receiver,awx-rsyslogd
|
||||
priority=5
|
||||
|
||||
# TODO: Exit Handler
|
||||
|
||||
@ -137,6 +137,20 @@
|
||||
mode: '0700'
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Stage rsyslog.repo
|
||||
copy:
|
||||
src: rsyslog.repo
|
||||
dest: "{{ docker_base_path }}/rsyslog.repo"
|
||||
mode: '0700'
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Stage rsyslog.conf
|
||||
copy:
|
||||
src: rsyslog.conf
|
||||
dest: "{{ docker_base_path }}/rsyslog.conf"
|
||||
mode: '0660'
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Stage supervisor.conf
|
||||
copy:
|
||||
src: supervisor.conf
|
||||
|
||||
@ -97,16 +97,23 @@ RUN cd /usr/local/bin && \
|
||||
curl -L https://github.com/openshift/origin/releases/download/v3.11.0/openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz | \
|
||||
tar -xz --strip-components=1 --wildcards --no-anchored 'oc'
|
||||
|
||||
ADD rsyslog.repo /etc/yum.repos.d/rsyslog.repo
|
||||
RUN yum install -y rsyslog-omhttp
|
||||
|
||||
# Pre-create things that we need to write to
|
||||
RUN for dir in /home/awx /var/log/tower /var/log/nginx /var/lib/nginx; \
|
||||
RUN for dir in /home/awx /var/run/supervisor /var/lib/awx /var/lib/awx/rsyslog /var/lib/awx/rsyslog/conf.d /var/run/rsyslog /var/log/tower /var/log/nginx /var/lib/nginx; \
|
||||
do mkdir -p $dir; chmod -R g+rwx $dir; chgrp -R root $dir; done && \
|
||||
\
|
||||
for file in /etc/passwd /var/run/nginx.pid; \
|
||||
do touch $file; chmod -R g+rwx $file; chgrp -R root $file; done
|
||||
|
||||
# Create default awx rsyslog config
|
||||
ADD rsyslog.conf /var/lib/awx/rsyslog/rsyslog.conf
|
||||
|
||||
# Fix up permissions
|
||||
RUN find /var/lib/awx -not -path '/var/lib/awx/venv*' | xargs chgrp root && \
|
||||
find /var/lib/awx -not -path '/var/lib/awx/venv*' | xargs chmod g+w && \
|
||||
chgrp root /var/lib/awx/rsyslog/rsyslog.conf && \
|
||||
chmod +rx /usr/bin/launch_awx.sh && \
|
||||
chmod +rx /usr/bin/launch_awx_task.sh && \
|
||||
chmod +rx /usr/bin/config-watcher && \
|
||||
|
||||
@ -94,6 +94,12 @@ spec:
|
||||
ports:
|
||||
- containerPort: 8052
|
||||
volumeMounts:
|
||||
- name: supervisor-socket
|
||||
mountPath: "/var/run/supervisor"
|
||||
- name: rsyslog-socket
|
||||
mountPath: "/var/run/rsyslog"
|
||||
- name: rsyslog-dir
|
||||
mountPath: "/var/lib/awx/rsyslog"
|
||||
{% if ca_trust_dir is defined %}
|
||||
- name: {{ kubernetes_deployment_name }}-ca-trust-dir
|
||||
mountPath: "/etc/pki/ca-trust/source/anchors/"
|
||||
@ -174,6 +180,12 @@ spec:
|
||||
- /usr/bin/launch_awx_task.sh
|
||||
imagePullPolicy: Always
|
||||
volumeMounts:
|
||||
- name: supervisor-socket
|
||||
mountPath: "/var/run/supervisor"
|
||||
- name: rsyslog-socket
|
||||
mountPath: "/var/run/rsyslog"
|
||||
- name: rsyslog-dir
|
||||
mountPath: "/var/lib/awx/rsyslog"
|
||||
{% if ca_trust_dir is defined %}
|
||||
- name: {{ kubernetes_deployment_name }}-ca-trust-dir
|
||||
mountPath: "/etc/pki/ca-trust/source/anchors/"
|
||||
@ -223,6 +235,8 @@ spec:
|
||||
- name: {{ kubernetes_deployment_name }}-memcached-socket
|
||||
mountPath: "/var/run/memcached"
|
||||
env:
|
||||
- name: SUPERVISOR_WEB_CONFIG_PATH
|
||||
value: "/supervisor.conf"
|
||||
- name: AWX_SKIP_MIGRATIONS
|
||||
value: "1"
|
||||
- name: MY_POD_UID
|
||||
@ -313,6 +327,12 @@ spec:
|
||||
{{ affinity | to_nice_yaml(indent=2) | indent(width=8, indentfirst=True) }}
|
||||
{% endif %}
|
||||
volumes:
|
||||
- name: supervisor-socket
|
||||
emptyDir: {}
|
||||
- name: rsyslog-socket
|
||||
emptyDir: {}
|
||||
- name: rsyslog-dir
|
||||
emptyDir: {}
|
||||
{% if ca_trust_dir is defined %}
|
||||
- name: {{ kubernetes_deployment_name }}-ca-trust-dir
|
||||
hostPath:
|
||||
@ -389,7 +409,6 @@ spec:
|
||||
- key: supervisor-task-config
|
||||
path: 'supervisor_task.conf'
|
||||
|
||||
|
||||
- name: {{ kubernetes_deployment_name }}-secret-key
|
||||
secret:
|
||||
secretName: "{{ kubernetes_deployment_name }}-secrets"
|
||||
|
||||
@ -53,8 +53,20 @@ data:
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:awx-rsyslogd]
|
||||
command = rsyslogd -n -i /var/run/rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopwaitsecs = 1
|
||||
stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/stderr
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
[group:tower-processes]
|
||||
programs=nginx,uwsgi,daphne,wsbroadcast
|
||||
programs=nginx,uwsgi,daphne,wsbroadcast,awx-rsyslogd
|
||||
priority=5
|
||||
|
||||
# TODO: Exit Handler
|
||||
@ -69,10 +81,10 @@ data:
|
||||
priority=0
|
||||
|
||||
[unix_http_server]
|
||||
file=/tmp/supervisor.sock
|
||||
file=/var/run/supervisor/supervisor.web.sock
|
||||
|
||||
[supervisorctl]
|
||||
serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket
|
||||
serverurl=unix:///var/run/supervisor/supervisor.web.sock ; use a unix:// URL for a unix socket
|
||||
|
||||
[rpcinterface:supervisor]
|
||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
||||
@ -128,4 +140,3 @@ data:
|
||||
|
||||
[rpcinterface:supervisor]
|
||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
||||
|
||||
|
||||
@ -20,6 +20,8 @@ services:
|
||||
user: root
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- rsyslog-socket:/var/run/rsyslog/
|
||||
- rsyslog-config:/var/lib/awx/rsyslog/
|
||||
- "{{ docker_compose_dir }}/SECRET_KEY:/etc/tower/SECRET_KEY"
|
||||
- "{{ docker_compose_dir }}/environment.sh:/etc/tower/conf.d/environment.sh"
|
||||
- "{{ docker_compose_dir }}/credentials.py:/etc/tower/conf.d/credentials.py"
|
||||
@ -75,6 +77,8 @@ services:
|
||||
user: root
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- rsyslog-socket:/var/run/rsyslog/
|
||||
- rsyslog-config:/var/lib/awx/rsyslog/
|
||||
- "{{ docker_compose_dir }}/SECRET_KEY:/etc/tower/SECRET_KEY"
|
||||
- "{{ docker_compose_dir }}/environment.sh:/etc/tower/conf.d/environment.sh"
|
||||
- "{{ docker_compose_dir }}/credentials.py:/etc/tower/conf.d/credentials.py"
|
||||
@ -114,6 +118,7 @@ services:
|
||||
http_proxy: {{ http_proxy | default('') }}
|
||||
https_proxy: {{ https_proxy | default('') }}
|
||||
no_proxy: {{ no_proxy | default('') }}
|
||||
SUPERVISOR_WEB_CONFIG_PATH: '/supervisor.conf'
|
||||
|
||||
redis:
|
||||
image: {{ redis_image }}
|
||||
@ -157,3 +162,7 @@ services:
|
||||
https_proxy: {{ https_proxy | default('') }}
|
||||
no_proxy: {{ no_proxy | default('') }}
|
||||
{% endif %}
|
||||
volumes:
|
||||
supervisor-socket:
|
||||
rsyslog-socket:
|
||||
rsyslog-config:
|
||||
|
||||
@ -4,7 +4,7 @@ The `requirements.txt` and `requirements_ansible.txt` files are generated from `
|
||||
|
||||
## How To Use
|
||||
|
||||
Commands should from inside `./requirements` directory of the awx repository.
|
||||
Commands should be run from inside the `./requirements` directory of the awx repository.
|
||||
|
||||
Make sure you have `patch, awk, python3, python2, python3-venv, python2-virtualenv, pip2, pip3` installed. The development container image should have all these.
|
||||
|
||||
@ -145,7 +145,3 @@ in the top-level Makefile.
|
||||
Version 4.8 makes us a little bit nervous with changes to `searchwindowsize` https://github.com/pexpect/pexpect/pull/579/files
|
||||
Pin to `pexpect==4.7.x` until we have more time to move to `4.8` and test.
|
||||
|
||||
### requests-futures
|
||||
|
||||
This can be removed when a solution for the external log queuing is ready.
|
||||
https://github.com/ansible/awx/pull/5092
|
||||
|
||||
@ -42,7 +42,6 @@ social-auth-core==3.3.1 # see UPGRADE BLOCKERs
|
||||
social-auth-app-django==3.1.0 # see UPGRADE BLOCKERs
|
||||
redis
|
||||
requests
|
||||
requests-futures # see library notes
|
||||
slackclient==1.1.2 # see UPGRADE BLOCKERs
|
||||
tacacs_plus==1.0 # UPGRADE BLOCKER: auth does not work with later versions
|
||||
twilio
|
||||
|
||||
@ -101,9 +101,8 @@ python3-saml==1.9.0 # via -r /awx_devel/requirements/requirements.in
|
||||
pytz==2019.3 # via django, irc, tempora, twilio
|
||||
pyyaml==5.3.1 # via -r /awx_devel/requirements/requirements.in, ansible-runner, djangorestframework-yaml, kubernetes
|
||||
redis==3.4.1 # via -r /awx_devel/requirements/requirements.in
|
||||
requests-futures==1.0.0 # via -r /awx_devel/requirements/requirements.in
|
||||
requests-oauthlib==1.3.0 # via kubernetes, msrest, social-auth-core
|
||||
requests==2.23.0 # via -r /awx_devel/requirements/requirements.in, adal, azure-keyvault, django-oauth-toolkit, kubernetes, msrest, requests-futures, requests-oauthlib, slackclient, social-auth-core, twilio
|
||||
requests==2.23.0 # via -r /awx_devel/requirements/requirements.in, adal, azure-keyvault, django-oauth-toolkit, kubernetes, msrest, requests-oauthlib, slackclient, social-auth-core, twilio
|
||||
rsa==4.0 # via google-auth
|
||||
ruamel.yaml.clib==0.2.0 # via ruamel.yaml
|
||||
ruamel.yaml==0.16.10 # via openshift
|
||||
|
||||
@ -102,6 +102,10 @@ RUN cd /usr/local/bin && \
|
||||
curl -L https://github.com/openshift/origin/releases/download/v3.9.0/openshift-origin-client-tools-v3.9.0-191fece-linux-64bit.tar.gz | \
|
||||
tar -xz --strip-components=1 --wildcards --no-anchored 'oc'
|
||||
|
||||
ADD tools/docker-compose/rsyslog.repo /etc/yum.repos.d/
|
||||
RUN yum install -y rsyslog-omhttp
|
||||
RUN mkdir -p /var/lib/awx/rsyslog/ && echo '$IncludeConfig /etc/rsyslog.conf' >> /var/lib/awx/rsyslog/rsyslog.conf
|
||||
|
||||
RUN dnf -y clean all && rm -rf /root/.cache
|
||||
|
||||
# https://github.com/ansible/awx/issues/5224
|
||||
@ -119,12 +123,19 @@ ADD tools/docker-compose/entrypoint.sh /
|
||||
ADD tools/scripts/awx-python /usr/bin/awx-python
|
||||
|
||||
# Pre-create things that we need to write to
|
||||
RUN for dir in /var/lib/awx/ /var/log/tower/ /var/lib/awx/projects /.ansible /var/log/nginx /var/lib/nginx /.local; \
|
||||
RUN for dir in /var/lib/awx /var/lib/awx/rsyslog /var/lib/awx/rsyslog/conf.d /var/run/rsyslog /var/log/tower/ /var/lib/awx/projects /.ansible /var/log/nginx /var/lib/nginx /.local; \
|
||||
do mkdir -p $dir; chmod -R g+rwx $dir; chgrp -R root $dir; done && \
|
||||
\
|
||||
for file in /etc/passwd /etc/supervisord.conf /venv/awx/lib/python3.6/site-packages/awx.egg-link /var/run/nginx.pid; \
|
||||
do touch $file; chmod -R g+rwx $file; chgrp -R root $file; done
|
||||
|
||||
|
||||
RUN chmod -R 0775 /var/lib/awx /var/lib/awx/rsyslog
|
||||
ADD tools/docker-compose/rsyslog.repo /etc/yum.repos.d/
|
||||
RUN yum install -y rsyslog-omhttp
|
||||
ADD tools/docker-compose/rsyslog.conf /var/lib/awx/rsyslog/rsyslog.conf
|
||||
RUN chmod 0775 /var/lib/awx/rsyslog/rsyslog.conf
|
||||
|
||||
ENV HOME /var/lib/awx
|
||||
ENV PATH="/usr/local/n/versions/node/10.15.0/bin:${PATH}"
|
||||
ENV PATH="/usr/pgsql-10/bin:${PATH}"
|
||||
|
||||
7
tools/docker-compose/rsyslog.conf
Normal file
7
tools/docker-compose/rsyslog.conf
Normal file
@ -0,0 +1,7 @@
|
||||
$WorkDirectory /var/lib/awx/rsyslog
|
||||
$MaxMessageSize 700000
|
||||
$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf
|
||||
$ModLoad imuxsock
|
||||
input(type="imuxsock" Socket="/var/run/rsyslog/rsyslog.sock" unlink="on")
|
||||
template(name="awx" type="string" string="%msg%")
|
||||
action(type="omfile" file="/dev/null")
|
||||
7
tools/docker-compose/rsyslog.repo
Normal file
7
tools/docker-compose/rsyslog.repo
Normal file
@ -0,0 +1,7 @@
|
||||
[rsyslog_v8]
|
||||
name=Adiscon CentOS-$releasever - local packages for $basearch
|
||||
baseurl=http://rpms.adiscon.com/v8-stable/epel-$releasever/$basearch
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
gpgkey=http://rpms.adiscon.com/RPM-GPG-KEY-Adiscon
|
||||
protect=1
|
||||
@ -71,8 +71,20 @@ redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
[program:awx-rsyslogd]
|
||||
command = rsyslogd -n -i /var/run/rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopwaitsecs = 1
|
||||
stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
[group:tower-processes]
|
||||
programs=awx-dispatcher,awx-receiver,awx-uwsgi,awx-daphne,awx-nginx,awx-wsbroadcast
|
||||
programs=awx-dispatcher,awx-receiver,awx-uwsgi,awx-daphne,awx-nginx,awx-wsbroadcast,awx-rsyslogd
|
||||
priority=5
|
||||
|
||||
[unix_http_server]
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user