diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index b7b851a5c5..69f556ddba 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -14,7 +14,6 @@ from requests_futures.sessions import FuturesSession # custom from django.conf import settings as django_settings -from django.utils.log import NullHandler # AWX external logging handler, generally designed to be used # with the accompanying LogstashHandler, derives from python-logstash library @@ -38,7 +37,7 @@ def unused_callback(sess, resp): pass -class HTTPSNullHandler(NullHandler): +class HTTPSNullHandler(logging.NullHandler): "Placeholder null handler to allow loading without database access" def __init__(self, host, **kwargs): diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index cbe05ae83b..9c77b5d381 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -893,16 +893,16 @@ LOGGING = { 'formatter': 'simple', }, 'null': { - 'class': 'django.utils.log.NullHandler', + 'class': 'logging.NullHandler', }, 'file': { - 'class': 'django.utils.log.NullHandler', + 'class': 'logging.NullHandler', 'formatter': 'simple', }, 'syslog': { 'level': 'WARNING', 'filters': ['require_debug_false'], - 'class': 'django.utils.log.NullHandler', + 'class': 'logging.NullHandler', 'formatter': 'simple', }, 'http_receiver': { diff --git a/docs/logging_integration.md b/docs/logging_integration.md index f913122e04..cca7434db4 100644 --- a/docs/logging_integration.md +++ b/docs/logging_integration.md @@ -26,7 +26,7 @@ from the API. These data loggers are the following. These loggers only use log-level of INFO. -Additionally, the standard Tower logs should be deliverable through this +Additionally, the standard Tower logs are be deliverable through this same mechanism. It should be obvious to the user how to enable to disable each of these 5 sources of data without manipulating a complex dictionary in their local settings file, as well as adjust the log-level consumed @@ -34,16 +34,19 @@ from the standard Tower logs. ## Supported Services -Currently committed to support: +Committed to support: - Splunk - Elastic Stack / ELK Stack / Elastic Cloud -Under consideration for testing: +Have tested: - - Sumo Logic - - Datadog + - Sumologic - Loggly + +Considered, but have not tested: + + - Datadog - Red Hat Common Logging via logstash connector ### Elastic Search Instructions @@ -64,8 +67,8 @@ make docker-compose-elk make docker-compose-cluster-elk ``` -Kibana is the visualization service, and it can be accessed in a web browser -by going to `{server address}:5601`. +For more instructions on getting started with the environment this stands +up, also refer to instructions in `/tools/elastic/README.md`. If you were to start from scratch, standing up your own version the elastic stack, then the only change you should need is to add the following lines @@ -149,6 +152,8 @@ the job model. In addition to the common fields, this will contain a `msg` field with the log message. Errors contain a separate `traceback` field. +These logs can be enabled or disabled in CTiT by adding or removing +it to the setting `LOG_AGGREGATOR_LOGGERS`. # Configuring Inside of Tower @@ -158,10 +163,12 @@ supported services: - Host - Port - - some kind of token - - enabling sending logs, and selecting which loggers to send - - use fully qualified domain name (fqdn) or not - - flag to use HTTPS or not + - The type of service, allowing service-specific customizations + - Optional username for the connection, used by certain services + - Some kind of token or password + - A flag to indicate how system tracking records will be sent + - Selecting which loggers to send + - Enabling sending logs Some settings for the log handler will not be exposed to the user via this mechanism. In particular, threading (enabled), and connection type diff --git a/tools/elastic/README.md b/tools/elastic/README.md index b98b0b34b6..c872831eaf 100644 --- a/tools/elastic/README.md +++ b/tools/elastic/README.md @@ -1,8 +1,7 @@ # Docker ELK / Elastic Stack Development Tools These are tools to run a containerized version of ELK stack, comprising -of Logstash, Elastic Search, and Kibana. There are also cases where -only a subset of these are needed to run. +of Logstash, Elastic Search, and Kibana. A copy of the license is in `docs/licenses/docker-elk.txt` @@ -12,12 +11,10 @@ Due to complex requirements from the elastic search container upstream, there is a prerequisite to get the containers running. The docker _host_ machine must have the `max_map_count` variable increased. For a developer using docker-machine with something like VirtualBox of VMWare, this can be -done by getting bash in the running Docker machine. Example: +done by getting via bash in the running Docker machine. Example: ```bash -$ docker-machine ssh default -docker@default:~$ sudo sysctl -w vm.max_map_count=262144 -vm.max_map_count = 262144 +docker-machine ssh default sudo sysctl -w vm.max_map_count=262144 ``` After this, the containers can be started up with commands like: @@ -32,6 +29,37 @@ make docker-compose-cluster-elk These are ran from the root folder of the ansible-tower repository. +Kibana is the visualization service, and it can be accessed in a web browser +by going to `{server address}:5601`. + + +### Authentication + +The default logstash configuration makes use of basic auth, so a username +and password is needed in the configuration, in addition to the other +parameters. The following settings are supported: + +``` +{ + "LOG_AGGREGATOR_HOST": "logstash", + "LOG_AGGREGATOR_PORT": 8085, + "LOG_AGGREGATOR_TYPE": "logstash", + "LOG_AGGREGATOR_USERNAME": "awx_logger", + "LOG_AGGREGATOR_PASSWORD": "workflows", + "LOG_AGGREGATOR_LOGGERS": [ + "awx", + "activity_stream", + "job_events", + "system_tracking" + ], + "LOG_AGGREGATOR_INDIVIDUAL_FACTS": false, + "LOG_AGGREGATOR_ENABLED": true +} +``` + +These can be entered via Configure-Tower-in-Tower by making a POST to +`/api/v1/settings/logging/`. + ### Connecting Logstash to 3rd Party Receivers In order to send these logs to an external consumer of logstash format