mirror of
https://github.com/ansible/awx.git
synced 2026-03-13 23:17:32 -02:30
Merge pull request #6108 from rooftopcellist/rsyslog
Replace our external logging feature with Rsyslog
Reviewed-by: Ryan Petrello
https://github.com/ryanpetrello
This commit is contained in:
@@ -5,17 +5,13 @@
|
||||
# Python
|
||||
import pytest
|
||||
import os
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
# Mock
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.conf.models import Setting
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
|
||||
TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA
|
||||
@@ -237,73 +233,95 @@ def test_ui_settings(get, put, patch, delete, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_requires_superuser(get, post, alice):
|
||||
def test_logging_aggregator_connection_test_requires_superuser(post, alice):
|
||||
url = reverse('api:setting_logging_test')
|
||||
post(url, {}, user=alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('key', [
|
||||
'LOG_AGGREGATOR_TYPE',
|
||||
'LOG_AGGREGATOR_HOST',
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregator_connection_test_not_enabled(post, admin):
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {}, user=admin, expect=409)
|
||||
assert 'Logging not enabled' in resp.data.get('error')
|
||||
|
||||
|
||||
def _mock_logging_defaults():
|
||||
# Pre-populate settings obj with defaults
|
||||
class MockSettings:
|
||||
pass
|
||||
mock_settings_obj = MockSettings()
|
||||
mock_settings_json = dict()
|
||||
for key in settings_registry.get_registered_settings(category_slug='logging'):
|
||||
value = settings_registry.get_setting_field(key).get_default()
|
||||
setattr(mock_settings_obj, key, value)
|
||||
mock_settings_json[key] = value
|
||||
setattr(mock_settings_obj, 'MAX_EVENT_RES_DATA', 700000)
|
||||
return mock_settings_obj, mock_settings_json
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize('key, value, error', [
|
||||
['LOG_AGGREGATOR_TYPE', 'logstash', 'Cannot enable log aggregator without providing host.'],
|
||||
['LOG_AGGREGATOR_HOST', 'https://logstash', 'Cannot enable log aggregator without providing type.']
|
||||
])
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_bad_request(get, post, admin, key):
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {}, user=admin, expect=400)
|
||||
assert 'This field is required.' in resp.data.get(key, [])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_valid(mocker, get, post, admin):
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
url = reverse('api:setting_logging_test')
|
||||
user_data = {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_USERNAME': 'logger',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'mcstash'
|
||||
}
|
||||
post(url, user_data, user=admin, expect=200)
|
||||
args, kwargs = perform_test.call_args_list[0]
|
||||
create_settings = kwargs['custom_settings']
|
||||
for k, v in user_data.items():
|
||||
assert hasattr(create_settings, k)
|
||||
assert getattr(create_settings, k) == v
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_with_masked_password(mocker, patch, post, admin):
|
||||
def test_logging_aggregator_missing_settings(put, post, admin, key, value, error):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings[key] = value
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
patch(url, user=admin, data={'LOG_AGGREGATOR_PASSWORD': 'password123'}, expect=200)
|
||||
time.sleep(1) # log settings are cached slightly
|
||||
response = put(url, data=mock_settings, user=admin, expect=400)
|
||||
assert error in str(response.data)
|
||||
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
url = reverse('api:setting_logging_test')
|
||||
user_data = {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_USERNAME': 'logger',
|
||||
'LOG_AGGREGATOR_PASSWORD': '$encrypted$'
|
||||
}
|
||||
post(url, user_data, user=admin, expect=200)
|
||||
args, kwargs = perform_test.call_args_list[0]
|
||||
create_settings = kwargs['custom_settings']
|
||||
assert getattr(create_settings, 'LOG_AGGREGATOR_PASSWORD') == 'password123'
|
||||
|
||||
@pytest.mark.parametrize('type, host, port, username, password', [
|
||||
['logstash', 'localhost', 8080, 'logger', 'mcstash'],
|
||||
['loggly', 'http://logs-01.loggly.com/inputs/1fd38090-hash-h4a$h-8d80-t0k3n71/tag/http/', None, None, None],
|
||||
['splunk', 'https://yoursplunk:8088/services/collector/event', None, None, None],
|
||||
['other', '97.221.40.41', 9000, 'logger', 'mcstash'],
|
||||
['sumologic', 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/Zagnw_f9XGr_zZgd-_EPM0hb8_rUU7_RU8Q==',
|
||||
None, None, None]
|
||||
])
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregator_valid_settings(put, post, admin, type, host, port, username, password):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
# type = 'splunk'
|
||||
# host = 'https://yoursplunk:8088/services/collector/event'
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings['LOG_AGGREGATOR_TYPE'] = type
|
||||
mock_settings['LOG_AGGREGATOR_HOST'] = host
|
||||
if port:
|
||||
mock_settings['LOG_AGGREGATOR_PORT'] = port
|
||||
if username:
|
||||
mock_settings['LOG_AGGREGATOR_USERNAME'] = username
|
||||
if password:
|
||||
mock_settings['LOG_AGGREGATOR_PASSWORD'] = password
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
response = put(url, data=mock_settings, user=admin, expect=200)
|
||||
assert type in response.data.get('LOG_AGGREGATOR_TYPE')
|
||||
assert host in response.data.get('LOG_AGGREGATOR_HOST')
|
||||
if port:
|
||||
assert port == response.data.get('LOG_AGGREGATOR_PORT')
|
||||
if username:
|
||||
assert username in response.data.get('LOG_AGGREGATOR_USERNAME')
|
||||
if password: # Note: password should be encrypted
|
||||
assert '$encrypted$' in response.data.get('LOG_AGGREGATOR_PASSWORD')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_invalid(mocker, get, post, admin):
|
||||
with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test:
|
||||
perform_test.side_effect = LoggingConnectivityException('404: Not Found')
|
||||
url = reverse('api:setting_logging_test')
|
||||
resp = post(url, {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080
|
||||
}, user=admin, expect=500)
|
||||
assert resp.data == {'error': '404: Not Found'}
|
||||
def test_logging_aggregator_connection_test_valid(put, post, admin):
|
||||
_, mock_settings = _mock_logging_defaults()
|
||||
type = 'other'
|
||||
host = 'https://localhost'
|
||||
mock_settings['LOG_AGGREGATOR_ENABLED'] = True
|
||||
mock_settings['LOG_AGGREGATOR_TYPE'] = type
|
||||
mock_settings['LOG_AGGREGATOR_HOST'] = host
|
||||
# POST to save these mock settings
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
put(url, data=mock_settings, user=admin, expect=200)
|
||||
# "Test" the logger
|
||||
url = reverse('api:setting_logging_test')
|
||||
post(url, {}, user=admin, expect=202)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
160
awx/main/tests/unit/api/test_logger.py
Normal file
160
awx/main/tests/unit/api/test_logger.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.external_logging import construct_rsyslog_conf_template
|
||||
from awx.main.tests.functional.api.test_settings import _mock_logging_defaults
|
||||
|
||||
'''
|
||||
# Example User Data
|
||||
data_logstash = {
|
||||
"LOG_AGGREGATOR_TYPE": "logstash",
|
||||
"LOG_AGGREGATOR_HOST": "localhost",
|
||||
"LOG_AGGREGATOR_PORT": 8080,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "tcp",
|
||||
"LOG_AGGREGATOR_USERNAME": "logger",
|
||||
"LOG_AGGREGATOR_PASSWORD": "mcstash"
|
||||
}
|
||||
|
||||
data_netcat = {
|
||||
"LOG_AGGREGATOR_TYPE": "other",
|
||||
"LOG_AGGREGATOR_HOST": "localhost",
|
||||
"LOG_AGGREGATOR_PORT": 9000,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "udp",
|
||||
}
|
||||
|
||||
data_loggly = {
|
||||
"LOG_AGGREGATOR_TYPE": "loggly",
|
||||
"LOG_AGGREGATOR_HOST": "http://logs-01.loggly.com/inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/",
|
||||
"LOG_AGGREGATOR_PORT": 8080,
|
||||
"LOG_AGGREGATOR_PROTOCOL": "https"
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
# Test reconfigure logging settings function
|
||||
# name this whatever you want
|
||||
@pytest.mark.parametrize(
|
||||
'enabled, type, host, port, protocol, expected_config', [
|
||||
(
|
||||
True,
|
||||
'loggly',
|
||||
'http://logs-01.loggly.com/inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/',
|
||||
None,
|
||||
'https',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # localhost w/ custom UDP port
|
||||
'other',
|
||||
'localhost',
|
||||
9000,
|
||||
'udp',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
'action(type="omfwd" target="localhost" port="9000" protocol="udp" action.resumeRetryCount="-1" template="awx")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # localhost w/ custom TCP port
|
||||
'other',
|
||||
'localhost',
|
||||
9000,
|
||||
'tcp',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
'action(type="omfwd" target="localhost" port="9000" protocol="tcp" action.resumeRetryCount="-1" template="awx")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, default port 443
|
||||
'splunk',
|
||||
'https://yoursplunk/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # http, default port 80
|
||||
'splunk',
|
||||
'http://yoursplunk/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, custom port in URL string
|
||||
'splunk',
|
||||
'https://yoursplunk:8088/services/collector/event',
|
||||
None,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # https, custom port explicitly specified
|
||||
'splunk',
|
||||
'https://yoursplunk/services/collector/event',
|
||||
8088,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # no scheme specified in URL, default to https, respect custom port
|
||||
'splunk',
|
||||
'yoursplunk.org/services/collector/event',
|
||||
8088,
|
||||
'https',
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
(
|
||||
True, # respect custom http-only port
|
||||
'splunk',
|
||||
'http://yoursplunk.org/services/collector/event',
|
||||
8088,
|
||||
None,
|
||||
'\n'.join([
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" healthchecktimeout="5000" restpath="services/collector/event")', # noqa
|
||||
])
|
||||
),
|
||||
]
|
||||
)
|
||||
def test_rsyslog_conf_template(enabled, type, host, port, protocol, expected_config):
|
||||
|
||||
mock_settings, _ = _mock_logging_defaults()
|
||||
|
||||
# Set test settings
|
||||
logging_defaults = getattr(settings, 'LOGGING')
|
||||
setattr(mock_settings, 'LOGGING', logging_defaults)
|
||||
setattr(mock_settings, 'LOGGING["handlers"]["external_logger"]["address"]', '/var/run/rsyslog/rsyslog.sock')
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_ENABLED', enabled)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_TYPE', type)
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_HOST', host)
|
||||
if port:
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_PORT', port)
|
||||
if protocol:
|
||||
setattr(mock_settings, 'LOG_AGGREGATOR_PROTOCOL', protocol)
|
||||
|
||||
# create rsyslog conf template
|
||||
tmpl = construct_rsyslog_conf_template(mock_settings)
|
||||
|
||||
# check validity of created template
|
||||
assert expected_config in tmpl
|
||||
@@ -1,393 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import logging
|
||||
import socket
|
||||
import datetime
|
||||
from dateutil.tz import tzutc
|
||||
from io import StringIO
|
||||
from uuid import uuid4
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import LazySettings
|
||||
from django.utils.encoding import smart_str
|
||||
import pytest
|
||||
import requests
|
||||
from requests_futures.sessions import FuturesSession
|
||||
|
||||
from awx.main.utils.handlers import (BaseHandler, BaseHTTPSHandler as HTTPSHandler,
|
||||
TCPHandler, UDPHandler, _encode_payload_for_socket,
|
||||
PARAM_NAMES, LoggingConnectivityException,
|
||||
AWXProxyHandler)
|
||||
from awx.main.utils.formatters import LogstashFormatter
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def https_adapter():
|
||||
class FakeHTTPSAdapter(requests.adapters.HTTPAdapter):
|
||||
requests = []
|
||||
status = 200
|
||||
reason = None
|
||||
|
||||
def send(self, request, **kwargs):
|
||||
self.requests.append(request)
|
||||
resp = requests.models.Response()
|
||||
resp.status_code = self.status
|
||||
resp.reason = self.reason
|
||||
resp.request = request
|
||||
return resp
|
||||
|
||||
return FakeHTTPSAdapter()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def connection_error_adapter():
|
||||
class ConnectionErrorAdapter(requests.adapters.HTTPAdapter):
|
||||
|
||||
def send(self, request, **kwargs):
|
||||
err = requests.packages.urllib3.exceptions.SSLError()
|
||||
raise requests.exceptions.ConnectionError(err, request=request)
|
||||
|
||||
return ConnectionErrorAdapter()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_socket(tmpdir_factory, request):
|
||||
sok = socket.socket
|
||||
sok.send = mock.MagicMock()
|
||||
sok.connect = mock.MagicMock()
|
||||
sok.setblocking = mock.MagicMock()
|
||||
sok.close = mock.MagicMock()
|
||||
return sok
|
||||
|
||||
|
||||
def test_https_logging_handler_requests_async_implementation():
|
||||
handler = HTTPSHandler()
|
||||
assert isinstance(handler.session, FuturesSession)
|
||||
|
||||
|
||||
def test_https_logging_handler_has_default_http_timeout():
|
||||
handler = TCPHandler()
|
||||
assert handler.tcp_timeout == 5
|
||||
|
||||
|
||||
@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts'])
|
||||
def test_base_logging_handler_defaults(param):
|
||||
handler = BaseHandler()
|
||||
assert hasattr(handler, param) and getattr(handler, param) is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts'])
|
||||
def test_base_logging_handler_kwargs(param):
|
||||
handler = BaseHandler(**{param: 'EXAMPLE'})
|
||||
assert hasattr(handler, param) and getattr(handler, param) == 'EXAMPLE'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('params', [
|
||||
{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_TYPE': 'loggly',
|
||||
'LOG_AGGREGATOR_USERNAME': 'foo',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'bar',
|
||||
'LOG_AGGREGATOR_INDIVIDUAL_FACTS': True,
|
||||
'LOG_AGGREGATOR_TCP_TIMEOUT': 96,
|
||||
'LOG_AGGREGATOR_VERIFY_CERT': False,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'https'
|
||||
},
|
||||
{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'udp'
|
||||
}
|
||||
])
|
||||
def test_real_handler_from_django_settings(params):
|
||||
settings = LazySettings()
|
||||
settings.configure(**params)
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
# need the _reverse_ dictionary from PARAM_NAMES
|
||||
attr_lookup = {}
|
||||
for attr_name, setting_name in PARAM_NAMES.items():
|
||||
attr_lookup[setting_name] = attr_name
|
||||
for setting_name, val in params.items():
|
||||
attr_name = attr_lookup[setting_name]
|
||||
if attr_name == 'protocol':
|
||||
continue
|
||||
assert hasattr(handler, attr_name)
|
||||
|
||||
|
||||
def test_invalid_kwarg_to_real_handler():
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'udp',
|
||||
'LOG_AGGREGATOR_VERIFY_CERT': False # setting not valid for UDP handler
|
||||
})
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
assert not hasattr(handler, 'verify_cert')
|
||||
|
||||
|
||||
def test_protocol_not_specified():
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': 'https://server.invalid',
|
||||
'LOG_AGGREGATOR_PORT': 22222,
|
||||
'LOG_AGGREGATOR_PROTOCOL': None # awx/settings/defaults.py
|
||||
})
|
||||
handler = AWXProxyHandler().get_handler(custom_settings=settings)
|
||||
assert isinstance(handler, logging.NullHandler)
|
||||
|
||||
|
||||
def test_base_logging_handler_emit_system_tracking(dummy_log_record):
|
||||
handler = BaseHandler(host='127.0.0.1', indv_facts=True)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
dummy_log_record.name = 'awx.analytics.system_tracking'
|
||||
dummy_log_record.msg = None
|
||||
dummy_log_record.inventory_id = 11
|
||||
dummy_log_record.host_name = 'my_lucky_host'
|
||||
dummy_log_record.job_id = 777
|
||||
dummy_log_record.ansible_facts = {
|
||||
"ansible_kernel": "4.4.66-boot2docker",
|
||||
"ansible_machine": "x86_64",
|
||||
"ansible_swapfree_mb": 4663,
|
||||
}
|
||||
dummy_log_record.ansible_facts_modified = datetime.datetime.now(tzutc()).isoformat()
|
||||
sent_payloads = handler.emit(dummy_log_record)
|
||||
|
||||
assert len(sent_payloads) == 1
|
||||
assert sent_payloads[0]['ansible_facts'] == dummy_log_record.ansible_facts
|
||||
assert sent_payloads[0]['ansible_facts_modified'] == dummy_log_record.ansible_facts_modified
|
||||
assert sent_payloads[0]['level'] == 'INFO'
|
||||
assert sent_payloads[0]['logger_name'] == 'awx.analytics.system_tracking'
|
||||
assert sent_payloads[0]['job_id'] == dummy_log_record.job_id
|
||||
assert sent_payloads[0]['inventory_id'] == dummy_log_record.inventory_id
|
||||
assert sent_payloads[0]['host_name'] == dummy_log_record.host_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize('host, port, normalized, hostname_only', [
|
||||
('http://localhost', None, 'http://localhost', False),
|
||||
('http://localhost', 8080, 'http://localhost:8080', False),
|
||||
('https://localhost', 443, 'https://localhost:443', False),
|
||||
('ftp://localhost', 443, 'ftp://localhost:443', False),
|
||||
('https://localhost:550', 443, 'https://localhost:550', False),
|
||||
('https://localhost:yoho/foobar', 443, 'https://localhost:443/foobar', False),
|
||||
('https://localhost:yoho/foobar', None, 'https://localhost:yoho/foobar', False),
|
||||
('http://splunk.server:8088/services/collector/event', 80,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('http://splunk.server/services/collector/event', 8088,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('splunk.server:8088/services/collector/event', 80,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('splunk.server/services/collector/event', 8088,
|
||||
'http://splunk.server:8088/services/collector/event', False),
|
||||
('localhost', None, 'http://localhost', False),
|
||||
('localhost', 8080, 'http://localhost:8080', False),
|
||||
('localhost', 4399, 'localhost', True),
|
||||
('tcp://localhost:4399/foo/bar', 4399, 'localhost', True),
|
||||
])
|
||||
def test_base_logging_handler_host_format(host, port, normalized, hostname_only):
|
||||
handler = BaseHandler(host=host, port=port)
|
||||
assert handler._get_host(scheme='http', hostname_only=hostname_only) == normalized
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'status, reason, exc',
|
||||
[(200, '200 OK', None), (404, 'Not Found', LoggingConnectivityException)]
|
||||
)
|
||||
@pytest.mark.parametrize('protocol', ['http', 'https', None])
|
||||
def test_https_logging_handler_connectivity_test(https_adapter, status, reason, exc, protocol):
|
||||
host = 'example.org'
|
||||
if protocol:
|
||||
host = '://'.join([protocol, host])
|
||||
https_adapter.status = status
|
||||
https_adapter.reason = reason
|
||||
settings = LazySettings()
|
||||
settings.configure(**{
|
||||
'LOG_AGGREGATOR_HOST': host,
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_USERNAME': 'user',
|
||||
'LOG_AGGREGATOR_PASSWORD': 'password',
|
||||
'LOG_AGGREGATOR_LOGGERS': ['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
||||
'LOG_AGGREGATOR_PROTOCOL': 'https',
|
||||
'CLUSTER_HOST_ID': '',
|
||||
'LOG_AGGREGATOR_TOWER_UUID': str(uuid4()),
|
||||
'LOG_AGGREGATOR_LEVEL': 'DEBUG',
|
||||
})
|
||||
|
||||
class FakeHTTPSHandler(HTTPSHandler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FakeHTTPSHandler, self).__init__(*args, **kwargs)
|
||||
self.session.mount('{}://'.format(protocol or 'https'), https_adapter)
|
||||
|
||||
def emit(self, record):
|
||||
return super(FakeHTTPSHandler, self).emit(record)
|
||||
|
||||
with mock.patch.object(AWXProxyHandler, 'get_handler_class') as mock_get_class:
|
||||
mock_get_class.return_value = FakeHTTPSHandler
|
||||
if exc:
|
||||
with pytest.raises(exc) as e:
|
||||
AWXProxyHandler().perform_test(settings)
|
||||
assert str(e).endswith('%s: %s' % (status, reason))
|
||||
else:
|
||||
assert AWXProxyHandler().perform_test(settings) is None
|
||||
|
||||
|
||||
def test_https_logging_handler_logstash_auth_info():
|
||||
handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible')
|
||||
handler._add_auth_information()
|
||||
assert isinstance(handler.session.auth, requests.auth.HTTPBasicAuth)
|
||||
assert handler.session.auth.username == 'bob'
|
||||
assert handler.session.auth.password == 'ansible'
|
||||
|
||||
|
||||
def test_https_logging_handler_splunk_auth_info():
|
||||
handler = HTTPSHandler(message_type='splunk', password='ansible')
|
||||
handler._add_auth_information()
|
||||
assert handler.session.headers['Authorization'] == 'Splunk ansible'
|
||||
assert handler.session.headers['Content-Type'] == 'application/json'
|
||||
|
||||
|
||||
def test_https_logging_handler_connection_error(connection_error_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1', message_type='logstash')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('http://', connection_error_adapter)
|
||||
|
||||
buff = StringIO()
|
||||
logging.getLogger('awx.main.utils.handlers').addHandler(
|
||||
logging.StreamHandler(buff)
|
||||
)
|
||||
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
with pytest.raises(requests.exceptions.ConnectionError):
|
||||
[future.result() for future in async_futures]
|
||||
assert 'failed to emit log to external aggregator\nTraceback' in buff.getvalue()
|
||||
|
||||
# we should only log failures *periodically*, so causing *another*
|
||||
# immediate failure shouldn't report a second ConnectionError
|
||||
buff.truncate(0)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
with pytest.raises(requests.exceptions.ConnectionError):
|
||||
[future.result() for future in async_futures]
|
||||
assert buff.getvalue() == ''
|
||||
|
||||
|
||||
@pytest.mark.parametrize('message_type', ['logstash', 'splunk'])
|
||||
def test_https_logging_handler_emit_without_cred(https_adapter, dummy_log_record,
|
||||
message_type):
|
||||
handler = HTTPSHandler(host='127.0.0.1', message_type=message_type)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.url == 'https://127.0.0.1/'
|
||||
assert request.method == 'POST'
|
||||
|
||||
if message_type == 'logstash':
|
||||
# A username + password weren't used, so this header should be missing
|
||||
assert 'Authorization' not in request.headers
|
||||
|
||||
if message_type == 'splunk':
|
||||
assert request.headers['Authorization'] == 'Splunk None'
|
||||
|
||||
|
||||
def test_https_logging_handler_emit_logstash_with_creds(https_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1',
|
||||
username='user', password='pass',
|
||||
message_type='logstash')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.headers['Authorization'] == 'Basic %s' % smart_str(base64.b64encode(b"user:pass"))
|
||||
|
||||
|
||||
def test_https_logging_handler_emit_splunk_with_creds(https_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1',
|
||||
password='pass', message_type='splunk')
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('https://', https_adapter)
|
||||
async_futures = handler.emit(dummy_log_record)
|
||||
[future.result() for future in async_futures]
|
||||
|
||||
assert len(https_adapter.requests) == 1
|
||||
request = https_adapter.requests[0]
|
||||
assert request.headers['Authorization'] == 'Splunk pass'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('payload, encoded_payload', [
|
||||
('foobar', 'foobar'),
|
||||
({'foo': 'bar'}, '{"foo": "bar"}'),
|
||||
({u'测试键': u'测试值'}, '{"测试键": "测试值"}'),
|
||||
])
|
||||
def test_encode_payload_for_socket(payload, encoded_payload):
|
||||
assert _encode_payload_for_socket(payload).decode('utf-8') == encoded_payload
|
||||
|
||||
|
||||
def test_udp_handler_create_socket_at_init():
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399)
|
||||
assert hasattr(handler, 'socket')
|
||||
assert isinstance(handler.socket, socket.socket)
|
||||
assert handler.socket.family == socket.AF_INET
|
||||
assert handler.socket.type == socket.SOCK_DGRAM
|
||||
|
||||
|
||||
def test_udp_handler_send(dummy_log_record):
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('awx.main.utils.handlers._encode_payload_for_socket', return_value="des") as encode_mock,\
|
||||
mock.patch.object(handler, 'socket') as socket_mock:
|
||||
handler.emit(dummy_log_record)
|
||||
encode_mock.assert_called_once_with(handler.format(dummy_log_record))
|
||||
socket_mock.sendto.assert_called_once_with("des", ('127.0.0.1', 4399))
|
||||
|
||||
|
||||
def test_tcp_handler_send(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [fake_socket], [])):
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
fake_socket.connect.assert_called_once_with(('127.0.0.1', 4399))
|
||||
fake_socket.setblocking.assert_called_once_with(0)
|
||||
fake_socket.send.assert_called_once_with(handler.format(dummy_log_record))
|
||||
fake_socket.close.assert_called_once()
|
||||
|
||||
|
||||
def test_tcp_handler_return_if_socket_unavailable(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [], [])):
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
fake_socket.connect.assert_called_once_with(('127.0.0.1', 4399))
|
||||
fake_socket.setblocking.assert_called_once_with(0)
|
||||
assert not fake_socket.send.called
|
||||
fake_socket.close.assert_called_once()
|
||||
|
||||
|
||||
def test_tcp_handler_log_exception(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5)
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
mock.patch('select.select', return_value=([], [], [])),\
|
||||
mock.patch('awx.main.utils.handlers.logger') as logger_mock:
|
||||
fake_socket.connect.side_effect = Exception("foo")
|
||||
handler.emit(dummy_log_record)
|
||||
sok_init_mock.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
|
||||
logger_mock.exception.assert_called_once()
|
||||
fake_socket.close.assert_called_once()
|
||||
assert not fake_socket.send.called
|
||||
@@ -8,7 +8,7 @@ def test_produce_supervisor_command(mocker):
|
||||
mock_process.communicate = communicate_mock
|
||||
Popen_mock = mocker.MagicMock(return_value=mock_process)
|
||||
with mocker.patch.object(reload.subprocess, 'Popen', Popen_mock):
|
||||
reload._supervisor_service_command("restart")
|
||||
reload.supervisor_service_command("restart")
|
||||
reload.subprocess.Popen.assert_called_once_with(
|
||||
['supervisorctl', 'restart', 'tower-processes:*',],
|
||||
stderr=-1, stdin=-1, stdout=-1)
|
||||
|
||||
Reference in New Issue
Block a user