From f3427d1359fc4bcb5fa0c67190fcd1c95d4c2dd0 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 17 Oct 2016 15:00:38 -0400 Subject: [PATCH 01/14] Logging Integration, ELK docker-compose as update file --- Makefile | 15 ++ awx/api/views.py | 17 +- awx/main/conf.py | 33 +++ awx/main/consumers.py | 5 + awx/main/log_utils/__init__.py | 2 + awx/main/log_utils/formatters.py | 59 +++++ awx/main/log_utils/handlers.py | 210 ++++++++++++++++++ awx/main/log_utils/utils.py | 57 +++++ .../commands/run_fact_cache_receiver.py | 3 + .../commands/run_socketio_service.py | 2 +- awx/main/middleware.py | 5 + awx/main/models/jobs.py | 6 +- awx/main/signals.py | 12 +- awx/main/tests/unit/test_log_formatter.py | 31 +++ awx/main/utils.py | 62 ++++++ awx/settings/defaults.py | 78 +++++-- awx/settings/production.py | 1 - docs/licenses/docker-elk.txt | 21 ++ tools/elastic/README.md | 56 +++++ .../docker-compose.elastic-override.yml | 49 ++++ .../docker-compose.logstash-link-cluster.yml | 12 + .../elastic/docker-compose.logstash-link.yml | 6 + tools/elastic/elasticsearch/Dockerfile | 5 + .../elastic/elasticsearch/config/.placeholder | 1 + tools/elastic/kibana/Dockerfile | 1 + tools/elastic/kibana/config/kibana.yml | 92 ++++++++ tools/elastic/logstash/Dockerfile | 4 + tools/elastic/logstash/config/logstash.conf | 19 ++ 28 files changed, 839 insertions(+), 25 deletions(-) create mode 100644 awx/main/log_utils/__init__.py create mode 100644 awx/main/log_utils/formatters.py create mode 100644 awx/main/log_utils/handlers.py create mode 100644 awx/main/log_utils/utils.py create mode 100644 awx/main/tests/unit/test_log_formatter.py create mode 100644 docs/licenses/docker-elk.txt create mode 100644 tools/elastic/README.md create mode 100644 tools/elastic/docker-compose.elastic-override.yml create mode 100644 tools/elastic/docker-compose.logstash-link-cluster.yml create mode 100644 tools/elastic/docker-compose.logstash-link.yml create mode 100644 tools/elastic/elasticsearch/Dockerfile create mode 100644 tools/elastic/elasticsearch/config/.placeholder create mode 100644 tools/elastic/kibana/Dockerfile create mode 100644 tools/elastic/kibana/config/kibana.yml create mode 100644 tools/elastic/logstash/Dockerfile create mode 100644 tools/elastic/logstash/config/logstash.conf diff --git a/Makefile b/Makefile index 9d6bdb1f53..669b5eeeca 100644 --- a/Makefile +++ b/Makefile @@ -878,6 +878,21 @@ docker-clean: docker-refresh: docker-clean docker-compose +# Docker Development Environment with Elastic Stack Connected +docker-compose-elk: docker-auth + TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate + +docker-compose-cluster-elk: docker-auth + TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate + +clean-elk: + docker stop tools_kibana_1 + docker stop tools_logstash_1 + docker stop tools_elasticsearch_1 + docker rm tools_logstash_1 + docker rm tools_elasticsearch_1 + docker rm tools_kibana_1 + mongo-debug-ui: docker run -it --rm --name mongo-express --link tools_mongo_1:mongo -e ME_CONFIG_OPTIONS_EDITORTHEME=ambiance -e ME_CONFIG_BASICAUTH_USERNAME=admin -e ME_CONFIG_BASICAUTH_PASSWORD=password -p 8081:8081 knickers/mongo-express diff --git a/awx/api/views.py b/awx/api/views.py index bea9711da9..deaa8c03a3 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -259,14 +259,16 @@ class ApiV1ConfigView(APIView): try: data_actual = json.dumps(request.data) except Exception: - # FIX: Log + logger.info(smart_text(u"Invalid JSON submitted for Tower license."), + extra=dict(actor=request.user.username)) return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST) try: from awx.main.task_engine import TaskEnhancer license_data = json.loads(data_actual) license_data_validated = TaskEnhancer(**license_data).validate_enhancements() except Exception: - # FIX: Log + logger.warning(smart_text(u"Invalid Tower license submitted."), + extra=dict(actor=request.user.username)) return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) # If the license is valid, write it to the database. @@ -275,6 +277,8 @@ class ApiV1ConfigView(APIView): settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data_validated) + logger.warning(smart_text(u"Invalid Tower license submitted."), + extra=dict(actor=request.user.username)) return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): @@ -541,12 +545,14 @@ class AuthTokenView(APIView): reason='')[0] token.refresh() if 'username' in request.data: - logger.info(smart_text(u"User {} logged in".format(request.data['username']))) + logger.info(smart_text(u"User {} logged in".format(request.data['username'])), + extra=dict(actor=request.data['username'])) except IndexError: token = AuthToken.objects.create(user=serializer.validated_data['user'], request_hash=request_hash) if 'username' in request.data: - logger.info(smart_text(u"User {} logged in".format(request.data['username']))) + logger.info(smart_text(u"User {} logged in".format(request.data['username'])), + extra=dict(actor=request.data['username'])) # Get user un-expired tokens that are not invalidated that are # over the configured limit. # Mark them as invalid and inform the user @@ -564,7 +570,8 @@ class AuthTokenView(APIView): } return Response({'token': token.key, 'expires': token.expires}, headers=headers) if 'username' in request.data: - logger.warning(smart_text(u"Login failed for user {}".format(request.data['username']))) + logger.warning(smart_text(u"Login failed for user {}".format(request.data['username'])), + user=dict(actor=request.data['username'])) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/awx/main/conf.py b/awx/main/conf.py index 85f1009c3b..df080c046b 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -223,3 +223,36 @@ register( category=_('Jobs'), category_slug='jobs', ) + +register( + 'LOG_AGGREGATOR_HOST', + field_class=fields.CharField, + label=_('Logging Aggregator Receiving Host'), + help_text=_('External host maintain a log collector to send logs to'), + category=_('Logging'), + category_slug='logging', +) +register( + 'LOG_AGGREGATOR_PORT', + field_class=fields.CharField, + label=_('Logging Aggregator Receiving Port'), + help_text=_('Port that the log collector is listening on'), + category=_('Logging'), + category_slug='logging', +) +register( + 'LOG_AGGREGATOR_TYPE', + field_class=fields.CharField, + label=_('Logging Aggregator Type: Logstash, Loggly, Datadog, etc'), + help_text=_('The type of log aggregator service to format messages for'), + category=_('Logging'), + category_slug='logging', +) +register( + 'LOG_AGGREGATOR_USERNAME', + field_class=fields.CharField, + label=_('Logging Aggregator Receiver Username'), + help_text=_('Username for Logstash or others'), + category=_('Logging'), + category_slug='logging', +) diff --git a/awx/main/consumers.py b/awx/main/consumers.py index 5e0ec6a376..2e74f05114 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -1,5 +1,6 @@ import json import urlparse +import logging from channels import Group from channels.sessions import channel_session @@ -8,6 +9,8 @@ from django.contrib.auth.models import User from awx.main.models.organization import AuthToken +logger = logging.getLogger('awx.main.consumers') + def discard_groups(message): if 'groups' in message.channel_session: for group in message.channel_session['groups']: @@ -52,11 +55,13 @@ def ws_receive(message): auth_token = validate_token(token) if auth_token is None: + logger.error("Authentication Failure validating user") message.reply_channel.send({"text": json.dumps({"error": "invalid auth token"})}) return None user = user_from_token(auth_token) if user is None: + logger.error("No valid user corresponding to submitted auth_token") message.reply_channel.send({"text": json.dumps({"error": "no valid user"})}) return None diff --git a/awx/main/log_utils/__init__.py b/awx/main/log_utils/__init__.py new file mode 100644 index 0000000000..6c3f484790 --- /dev/null +++ b/awx/main/log_utils/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2017 Ansible by Red Hat +# All Rights Reserved. diff --git a/awx/main/log_utils/formatters.py b/awx/main/log_utils/formatters.py new file mode 100644 index 0000000000..17fafca063 --- /dev/null +++ b/awx/main/log_utils/formatters.py @@ -0,0 +1,59 @@ +# Copyright (c) 2017 Ansible Tower by Red Hat +# All Rights Reserved. + +from logstash.formatter import LogstashFormatterVersion1 +from django.conf import settings + + +# # Loggly example +# 'json': { +# 'format': '{ +# "loggerName":"%(name)s", +# "asciTime":"%(asctime)s", +# "fileName":"%(filename)s", +# "logRecordCreationTime":"%(created)f", +# "functionName":"%(funcName)s", +# "levelNo":"%(levelno)s", +# "lineNo":"%(lineno)d", +# "time":"%(msecs)d", +# "levelName":"%(levelname)s", +# "message":"%(message)s"}', +# }, + +class LogstashFormatter(LogstashFormatterVersion1): + def __init__(self, **kwargs): + ret = super(LogstashFormatter, self).__init__(**kwargs) + self.host_id = settings.CLUSTER_HOST_ID + return ret + + def get_extra_fields(self, record): + fields = super(LogstashFormatter, self).get_extra_fields(record) + fields['cluster_host_id'] = self.host_id + return fields + + def format(self, record): + # Create message dict + # message = record.getMessage() + # print ' message ' + str(message) + message = { + '@timestamp': self.format_timestamp(record.created), + '@version': '1', + 'message': record.getMessage(), + 'host': self.host, + 'path': record.pathname, + 'tags': self.tags, + 'type': self.message_type, + + # Extra Fields + 'level': record.levelname, + 'logger_name': record.name, + } + + # Add extra fields + message.update(self.get_extra_fields(record)) + + # If exception, add debug info + if record.exc_info: + message.update(self.get_debug_fields(record)) + + return self.serialize(message) diff --git a/awx/main/log_utils/handlers.py b/awx/main/log_utils/handlers.py new file mode 100644 index 0000000000..762b359608 --- /dev/null +++ b/awx/main/log_utils/handlers.py @@ -0,0 +1,210 @@ +# Copyright (c) 2017 Ansible Tower by Red Hat +# All Rights Reserved. + +# common +import socket +import logging + +# Splunk +import urllib +import json +import gzip +import cStringIO + +import requests + +from .utils import parse_config_file, get_config_from_env + +# loggly +import traceback + +from requests_futures.sessions import FuturesSession + +# Logstash +from logstash import formatter + + +ENABLED_LOGS = ['ansible'] + +# Logstash +# https://github.com/vklochan/python-logstash +class TCPLogstashHandler(logging.handlers.SocketHandler, object): + """Python logging handler for Logstash. Sends events over TCP. + :param host: The host of the logstash server. + :param port: The port of the logstash server (default 5959). + :param message_type: The type of the message (default logstash). + :param fqdn; Indicates whether to show fully qualified domain name or not (default False). + :param version: version of logstash event schema (default is 0). + :param tags: list of tags for a logger (default is None). + """ + + def __init__(self, host, port=5959, message_type='logstash', tags=None, fqdn=False, version=0): + super(TCPLogstashHandler, self).__init__(host, port) + if version == 1: + self.formatter = formatter.LogstashFormatterVersion1(message_type, tags, fqdn) + else: + self.formatter = formatter.LogstashFormatterVersion0(message_type, tags, fqdn) + + def makePickle(self, record): + return self.formatter.format(record) + b'\n' + + +# loggly +# https://github.com/varshneyjayant/loggly-python-handler + +session = FuturesSession() + + +def bg_cb(sess, resp): + """ Don't do anything with the response """ + pass + +# add port for a generic handler +class HTTPSHandler(logging.Handler): + def __init__(self, host, port=80, message_type='logstash', fqdn=False): + super(HTTPSHandler, self).__init__() + self.host_saved = host + self.port = port + # self.port = port + self.message_type = message_type + self.fqdn = fqdn + + def get_full_message(self, record): + if record.exc_info: + return '\n'.join(traceback.format_exception(*record.exc_info)) + else: + return record.getMessage() + + def emit(self, record): + try: + payload = self.format(record) + # TODO: move this enablement logic to rely on individual loggers once + # the enablement config variable is hooked up + payload_data = json.loads(payload) + if payload_data['logger_name'].startswith('awx.analytics.system_tracking'): + st_type = None + for fd in ['services', 'packages', 'files', 'ansible']: + if fd in payload_data: + st_type = fd + break + if st_type not in ENABLED_LOGS: + return + host = self.host_saved + if not host.startswith('http'): + host = 'http://%s' % self.host_saved + if self.port != 80: + host = '%s:%s' % (host, str(self.port)) + session.post(host, data=payload, background_callback=bg_cb) + except (KeyboardInterrupt, SystemExit): + raise + except: + self.handleError(record) + + +# splunk +# https://github.com/andresriancho/splunk-logger + +class SplunkLogger(logging.Handler): + """ + A class to send messages to splunk storm using their API + """ + # Required format for splunk storm + INPUT_URL_FMT = 'https://%s/1/inputs/http' + + def __init__(self, access_token=None, project_id=None, api_domain=None): + logging.Handler.__init__(self) + + self._set_auth(access_token, project_id, api_domain) + self.url = self.INPUT_URL_FMT % self.api_domain + + self._set_url_opener() + + # Handle errors in authentication + self._auth_failed = False + + def _set_auth(self, access_token, project_id, api_domain): + # The access token and project id passed as parameter override the ones + # configured in the .splunk_logger file. + if access_token is not None\ + and project_id is not None\ + and api_domain is not None: + self.project_id = project_id + self.access_token = access_token + self.api_domain = api_domain + + else: + # Try to get the credentials form the configuration file + self.project_id, self.access_token, self.api_domain = parse_config_file() + + if self.project_id is None\ + or self.access_token is None\ + or self.api_domain is None: + # Try to get the credentials form the environment variables + self.project_id, self.access_token, self.api_domain = get_config_from_env() + + if self.access_token is None or self.project_id is None: + raise ValueError('Access token, project id and API endpoint domain' + ' need to be set.') + + def _set_url_opener(self): + # We disable the logging of the requests module to avoid some infinite + # recursion errors that might appear. + requests_log = logging.getLogger("requests") + requests_log.setLevel(logging.CRITICAL) + + self.session = requests.Session() + self.session.auth = ('x', self.access_token) + self.session.headers.update({'Content-Encoding': 'gzip'}) + + def usesTime(self): + return False + + def _compress(self, input_str): + """ + Compress the log message in order to send less bytes to the wire. + """ + compressed_bits = cStringIO.StringIO() + + f = gzip.GzipFile(fileobj=compressed_bits, mode='wb') + f.write(input_str) + f.close() + + return compressed_bits.getvalue() + + def emit(self, record): + + if self._auth_failed: + # Don't send anything else once a 401 was returned + return + + try: + response = self._send_to_splunk(record) + except (KeyboardInterrupt, SystemExit): + raise + except: + # All errors end here. + self.handleError(record) + else: + if response.status_code == 401: + self._auth_failed = True + + def _send_to_splunk(self, record): + # http://docs.splunk.com/Documentation/Storm/latest/User/Sourcesandsourcetypes + sourcetype = 'json_no_timestamp' + + host = socket.gethostname() + + event_dict = {'data': self.format(record), + 'level': record.levelname, + 'module': record.module, + 'line': record.lineno} + event = json.dumps(event_dict) + event = self._compress(event) + + params = {'index': self.project_id, + 'sourcetype': sourcetype, + 'host': host} + + url = '%s?%s' % (self.url, urllib.urlencode(params)) + return self.session.post(url, data=event) + diff --git a/awx/main/log_utils/utils.py b/awx/main/log_utils/utils.py new file mode 100644 index 0000000000..81b1d397c7 --- /dev/null +++ b/awx/main/log_utils/utils.py @@ -0,0 +1,57 @@ +import os +import yaml + + +def parse_config_file(): + """ + Find the .splunk_logger config file in the current directory, or in the + user's home and parse it. The one in the current directory has precedence. + + :return: A tuple with: + - project_id + - access_token + """ + for filename in ('.splunk_logger', os.path.expanduser('~/.splunk_logger')): + + project_id, access_token, api_domain = _parse_config_file_impl(filename) + + if project_id is not None\ + and access_token is not None\ + and api_domain is not None: + return project_id, access_token, api_domain + + else: + return None, None, None + + +def _parse_config_file_impl(filename): + """ + Format for the file is: + + credentials: + project_id: ... + access_token: ... + api_domain: ... + + :param filename: The filename to parse + :return: A tuple with: + - project_id + - access_token + - api_domain + """ + try: + doc = yaml.load(file(filename).read()) + + project_id = doc["credentials"]["project_id"] + access_token = doc["credentials"]["access_token"] + api_domain = doc["credentials"]["api_domain"] + + return project_id, access_token, api_domain + except: + return None, None, None + + +def get_config_from_env(): + return (os.environ.get('SPLUNK_PROJECT_ID', None), + os.environ.get('SPLUNK_ACCESS_TOKEN', None), + os.environ.get('SPLUNK_API_DOMAIN', None)) \ No newline at end of file diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index 9796e1db6c..3d962243f0 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -16,8 +16,10 @@ from django.utils import timezone # AWX from awx.main.models.fact import Fact from awx.main.models.inventory import Host +from awx.main.utils import format_for_log logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver') +data_logger = logging.getLogger('awx.analytics.system_tracking') class FactBrokerWorker(ConsumerMixin): @@ -83,6 +85,7 @@ class FactBrokerWorker(ConsumerMixin): # Create new Fact entry fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts) logger.info('Created new fact <%s, %s>' % (fact_obj.id, module_name)) + data_logger.info('Received message with fact data', extra=format_for_log({module_name: facts}, kind="fact")) return fact_obj diff --git a/awx/main/management/commands/run_socketio_service.py b/awx/main/management/commands/run_socketio_service.py index 9b7e5a61d2..6a6404f2b4 100644 --- a/awx/main/management/commands/run_socketio_service.py +++ b/awx/main/management/commands/run_socketio_service.py @@ -23,7 +23,7 @@ from socketio import socketio_manage from socketio.server import SocketIOServer from socketio.namespace import BaseNamespace -logger = logging.getLogger('awx.main.commands.run_socketio_service') +logger = logging.getLogger('awx.main.consumers') class SocketSession(object): diff --git a/awx/main/middleware.py b/awx/main/middleware.py index 187017ddba..1c8f8fc4d5 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -16,6 +16,7 @@ from awx.api.authentication import TokenAuthentication logger = logging.getLogger('awx.main.middleware') +analytics_logger = logging.getLogger('awx.analytics.activity_stream') class ActivityStreamMiddleware(threading.local): @@ -46,6 +47,10 @@ class ActivityStreamMiddleware(threading.local): instance.actor = drf_user try: instance.save(update_fields=['actor']) + analytics_logger.info('Activity Stream update entry for %s' % str(instance.object1), + extra=dict(changes=instance.changes, relationship=instance.object_relationship_type, + actor=drf_user.username, operation=instance.operation, + object1=instance.object1, object2=instance.object2)) except IntegrityError: logger.debug("Integrity Error saving Activity Stream instance for id : " + str(instance.id)) # else: diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 2688219d00..fb4cd10638 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -32,6 +32,7 @@ from awx.main.models.notifications import ( from awx.main.utils import ( ignore_inventory_computed_fields, parse_yaml_or_json, + format_for_log ) from awx.main.redact import PlainTextCleaner from awx.main.fields import ImplicitRoleField @@ -43,6 +44,7 @@ from awx.main.consumers import emit_channel_notification logger = logging.getLogger('awx.main.models.jobs') +event_logger = logging.getLogger('awx.analytics.job_events') __all__ = ['JobTemplate', 'Job', 'JobHostSummary', 'JobEvent', 'SystemJobOptions', 'SystemJobTemplate', 'SystemJob'] @@ -1186,6 +1188,9 @@ class JobEvent(CreatedModifiedModel): if parent_id: kwargs['parent_id'] = parent_id + # event_logger.info('Body: {}'.format(str(data_for_log)), extra=data_for_log) + event_logger.info('Job event data saved.', extra=format_for_log(kwargs, kind='event')) + job_event = JobEvent.objects.create(**kwargs) # Cache this job event ID vs. UUID for future parent lookups. @@ -1196,7 +1201,6 @@ class JobEvent(CreatedModifiedModel): # Save artifact data to parent job (if provided). if artifact_data: artifact_dict = json.loads(artifact_data) - event_data = kwargs.get('event_data', None) if event_data and isinstance(event_data, dict): res = event_data.get('res', None) if res and isinstance(res, dict): diff --git a/awx/main/signals.py b/awx/main/signals.py index ceda8899b1..477a9f2257 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -20,7 +20,7 @@ from crum.signals import current_user_getter from awx.main.models import * # noqa from awx.api.serializers import * # noqa from awx.main.utils import model_instance_diff, model_to_dict, camelcase_to_underscore -from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates +from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates, format_for_log from awx.main.tasks import update_inventory_computed_fields from awx.main.consumers import emit_channel_notification @@ -28,6 +28,7 @@ from awx.main.consumers import emit_channel_notification __all__ = [] logger = logging.getLogger('awx.main.signals') +analytics_logger = logging.getLogger('awx.analytics.activity_stream') # Update has_active_failures for inventory/groups when a Host/Group is deleted, # when a Host-Group or Group-Group relationship is updated, or when a Job is deleted @@ -369,11 +370,15 @@ def activity_stream_create(sender, instance, created, **kwargs): if type(instance) == Job: if 'extra_vars' in changes: changes['extra_vars'] = instance.display_extra_vars() + changes_dict = json.dumps(changes) activity_entry = ActivityStream( operation='create', object1=object1, - changes=json.dumps(changes)) + changes=changes_dict) activity_entry.save() + # analytics_logger.info('Activity Stream create entry for %s' % str(object1), + # extra=format_for_log(changes, kind='activity_stream', + # actor=activity_entry.actor, operation='update', object1=object1)) #TODO: Weird situation where cascade SETNULL doesn't work # it might actually be a good idea to remove all of these FK references since # we don't really use them anyway. @@ -401,6 +406,9 @@ def activity_stream_update(sender, instance, **kwargs): object1=object1, changes=json.dumps(changes)) activity_entry.save() + # analytics_logger.info('Activity Stream update entry for %s' % str(object1), + # extra=format_for_log(changes, kind='activity_stream', + # actor=activity_entry.actor, operation='update', object1=object1)) if instance._meta.model_name != 'setting': # Is not conf.Setting instance getattr(activity_entry, object1).add(instance) diff --git a/awx/main/tests/unit/test_log_formatter.py b/awx/main/tests/unit/test_log_formatter.py new file mode 100644 index 0000000000..afa2a799e4 --- /dev/null +++ b/awx/main/tests/unit/test_log_formatter.py @@ -0,0 +1,31 @@ +from awx.main.utils import format_for_log +import datetime + +# Example data +event_data = { + 'stdout': u'\x1b[0;36mskipping: [host1]\x1b[0m\r\n', u'uuid': u'ffe4858c-ac38-4cab-9192-b07bdbe80502', + u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 376051), 'counter': 17, u'job_id': 209, + u'event': u'runner_on_skipped', 'parent_id': 1937, 'end_line': 24, 'start_line': 23, + u'event_data': {u'play_pattern': u'all', u'play': u'all', u'task': u'Scan files (Windows)', + u'task_args': u'paths={{ scan_file_paths }}, recursive={{ scan_use_recursive }}, get_checksum={{ scan_use_checksum }}', + u'remote_addr': u'host1', u'pid': 1427, u'play_uuid': u'da784361-3811-4ea7-9cc8-46ec758fde66', + u'task_uuid': u'4f9525fd-bc25-4ace-9eb2-adad9fa21a94', u'event_loop': None, + u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e', u'playbook': u'scan_facts.yml', + u'task_action': u'win_scan_files', u'host': u'host1', u'task_path': None}} + +event_stats = { + 'stdout': u'asdf', u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 385416), + 'counter': 18, u'job_id': 209, u'event': u'playbook_on_stats', 'parent_id': 1923, + 'end_line': 28, 'start_line': 24, u'event_data': { + u'skipped': {u'host1': 4}, u'ok': {u'host2': 3}, u'changed': {}, + u'pid': 1427, u'dark': {}, u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e', + u'playbook': u'scan_facts.yml', u'failures': {}, u'processed': {u'duck': 1} + } +} + + + +def test_format_event(): + log_data = format_for_log(event_data, kind='event') + assert log_data['event_host'] == 'host1' + diff --git a/awx/main/utils.py b/awx/main/utils.py index 00937a84c1..13ece206a9 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -17,6 +17,7 @@ import urlparse import threading import contextlib import tempfile +from copy import copy # Decorator from decorator import decorator @@ -824,3 +825,64 @@ class OutputEventFilter(object): self._current_event_data = next_event_data else: self._current_event_data = None + +def format_for_log(raw_data, kind=None, **kwargs): + ''' + Process dictionaries from various contexts (job events, activity stream + changes, etc.) to give meaningful information + Output a dictionary which will be passed in logstash or syslog format + to the logging receiver + ''' + rename_fields = set(( + 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', + 'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module', + 'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process', + 'processName', 'relativeCreated', 'thread', 'threadName', 'extra', + 'auth_token', 'tags', 'host', 'host_id', 'level', 'port', 'uuid')) + data = copy(raw_data) + if isinstance(data, basestring): + data = json.loads(data) + data.update(kwargs) + skip_fields = ('res', 'password', 'event_data', 'stdout') + data_for_log = {} + + def index_by_name(alist): + """Takes a list of dictionaries with `name` as a key in each dict + and returns a dictionary indexed by those names""" + adict = {} + for item in alist: + subdict = copy(item) + name = subdict.pop('name', None) + if name: + # Logstash v2 can not accept '.' in a name + name = name.replace('.', '_') + adict[name] = subdict + return adict + + if kind == 'event': + data.update(data.get('event_data', {})) + for fd in data: + if fd in skip_fields: + continue + key = fd + if fd in rename_fields: + key = 'event_%s' % fd + if type(data[fd]) is dict: + data_for_log[key] = len(data[fd]) + else: + data_for_log[key] = data[fd] + elif kind == 'fact': + if 'services' in data: + data_for_log['services'] = index_by_name(data['services']) + elif 'packages' in data: + data_for_log['packages'] = index_by_name(data['packages']) + elif 'files' in data: + data_for_log['files'] = index_by_name(data['files']) + elif 'ansible' in data: + data_for_log['ansible'] = data['ansible'] + # Remove sub-keys with data type conflicts in elastic search + data_for_log['ansible'].pop('ansible_python_version', None) + data_for_log['ansible']['ansible_python'].pop('version_info', None) + else: + data_for_log['facts'] = data + return data_for_log diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 040bbb2ee0..fbb5230d38 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -842,6 +842,19 @@ LOGGING = { 'simple': { 'format': '%(asctime)s %(levelname)-8s %(name)s %(message)s', }, + 'logstash': { + '()': 'awx.main.log_utils.formatters.LogstashFormatter' + }, + 'json': { + '()': 'awx.main.log_utils.formatters.LogstashFormatter' + } + # From loggly examples + # 'json': { + # 'format': '{ "loggerName":"%(name)s", "asciTime":"%(asctime)s", "fileName":"%(filename)s", "logRecordCreationTime":"%(created)f", "functionName":"%(funcName)s", "levelNo":"%(levelno)s", "lineNo":"%(lineno)d", "time":"%(msecs)d", "levelName":"%(levelname)s", "message":"%(message)s"}', + # }, + # 'json': { + # 'format': '{"message": %(message)s}', + # }, }, 'handlers': { 'console': { @@ -863,6 +876,24 @@ LOGGING = { 'class': 'django.utils.log.NullHandler', 'formatter': 'simple', }, + 'http_receiver': { + 'class': 'awx.main.log_utils.handlers.HTTPSHandler', + 'level': 'INFO', + 'formatter': 'json', + 'host': '', + }, + 'logstash': { + 'level': 'INFO', + 'class': 'awx.main.log_utils.handlers.HTTPSHandler', + 'host': 'logstash', # IP/name of our Logstash EC2 instance + 'port': 8085, + # 'port': 5000, + # 'version': 1, + 'message_type': 'logstash', + 'fqdn': True, + # 'tags': ['tower'], + 'formatter': 'json' + }, 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], @@ -939,7 +970,6 @@ LOGGING = { 'django.request': { 'handlers': ['mail_admins', 'console', 'file', 'tower_warnings'], 'level': 'WARNING', - 'propagate': False, }, 'rest_framework.request': { 'handlers': ['mail_admins', 'console', 'file', 'tower_warnings'], @@ -954,29 +984,30 @@ LOGGING = { 'level': 'DEBUG', }, 'awx.conf': { - 'handlers': ['console', 'file', 'tower_warnings'], + 'handlers': ['null'], 'level': 'WARNING', - 'propagate': False, + }, + 'awx.conf.settings': { + 'handlers': ['null'], + 'level': 'WARNING', + }, + 'awx.main': { + 'handlers': ['null'] }, 'awx.main.commands.run_callback_receiver': { - 'handlers': ['console', 'file', 'callback_receiver'], - 'propagate': False - }, - 'awx.main.commands.run_socketio_service': { - 'handlers': ['console', 'file', 'socketio_service'], - 'propagate': False + 'handlers': ['callback_receiver'], }, 'awx.main.tasks': { - 'handlers': ['console', 'file', 'task_system'], - 'propagate': False + 'handlers': ['task_system'] }, 'awx.main.scheduler': { - 'handlers': ['console', 'file', 'task_system'], - 'propagate': False + 'handlers': ['task_system'], + }, + 'awx.main.consumers': { + 'handlers': ['null'] }, 'awx.main.commands.run_fact_cache_receiver': { - 'handlers': ['console', 'file', 'fact_receiver'], - 'propagate': False + 'handlers': ['fact_receiver'], }, 'awx.main.access': { 'handlers': ['null'], @@ -990,6 +1021,23 @@ LOGGING = { 'handlers': ['null'], 'propagate': False, }, + 'awx.analytics': { + 'handlers': ['null'], + 'level': 'INFO', + 'propagate': False + }, + 'awx.analytics.job_events': { + 'handlers': ['null'], + 'level': 'INFO' + }, + 'awx.analytics.activity_stream': { + 'handlers': ['null'], + 'level': 'INFO' + }, + 'awx.analytics.system_tracking': { + 'handlers': ['null'], + 'level': 'INFO' + }, 'django_auth_ldap': { 'handlers': ['console', 'file', 'tower_warnings'], 'level': 'DEBUG', diff --git a/awx/settings/production.py b/awx/settings/production.py index 7df7953e25..103f775d86 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -52,7 +52,6 @@ TOWER_VENV_PATH = "/var/lib/awx/venv/tower" LOGGING['handlers']['tower_warnings']['filename'] = '/var/log/tower/tower.log' LOGGING['handlers']['callback_receiver']['filename'] = '/var/log/tower/callback_receiver.log' -LOGGING['handlers']['socketio_service']['filename'] = '/var/log/tower/socketio_service.log' LOGGING['handlers']['task_system']['filename'] = '/var/log/tower/task_system.log' LOGGING['handlers']['fact_receiver']['filename'] = '/var/log/tower/fact_receiver.log' LOGGING['handlers']['system_tracking_migrations']['filename'] = '/var/log/tower/tower_system_tracking_migrations.log' diff --git a/docs/licenses/docker-elk.txt b/docs/licenses/docker-elk.txt new file mode 100644 index 0000000000..0dbd69f8e2 --- /dev/null +++ b/docs/licenses/docker-elk.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Anthony Lapenna + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/elastic/README.md b/tools/elastic/README.md new file mode 100644 index 0000000000..b98b0b34b6 --- /dev/null +++ b/tools/elastic/README.md @@ -0,0 +1,56 @@ +# Docker ELK / Elastic Stack Development Tools + +These are tools to run a containerized version of ELK stack, comprising +of Logstash, Elastic Search, and Kibana. There are also cases where +only a subset of these are needed to run. + +A copy of the license is in `docs/licenses/docker-elk.txt` + +## Instructions + +Due to complex requirements from the elastic search container upstream, there +is a prerequisite to get the containers running. The docker _host_ machine +must have the `max_map_count` variable increased. For a developer using +docker-machine with something like VirtualBox of VMWare, this can be +done by getting bash in the running Docker machine. Example: + +```bash +$ docker-machine ssh default +docker@default:~$ sudo sysctl -w vm.max_map_count=262144 +vm.max_map_count = 262144 +``` + +After this, the containers can be started up with commands like: + +```bash +make docker-compose-elk +``` + +```bash +make docker-compose-cluster-elk +``` + +These are ran from the root folder of the ansible-tower repository. + +### Connecting Logstash to 3rd Party Receivers + +In order to send these logs to an external consumer of logstash format +messages, replace the output variables in the logstash.conf file. + +``` +output { + elasticsearch { + hosts => "elasticsearch:9200" + } +} +``` + +## Changelog + +Current branch point `a776151221182dcfaec7df727459e208c895d25b` +Nov 18, 2016 + + + - Original branch point `b5a4deee142b152d4f9232ebac5bbabb2d2cef3c` + Sep 25, 2016, before X-Pack support + diff --git a/tools/elastic/docker-compose.elastic-override.yml b/tools/elastic/docker-compose.elastic-override.yml new file mode 100644 index 0000000000..8f16e70cea --- /dev/null +++ b/tools/elastic/docker-compose.elastic-override.yml @@ -0,0 +1,49 @@ +# Structure for the Elastic Stack docker configuration came from docker-elk: +# https://github.com/deviantony/docker-elk +# docker-elk is under the MIT License, +# a copy of its license is provided in docs/licenses/docker-elk.txt +# contents modified + +version: '2' +services: + + # Components of ELK stack for logging + elasticsearch: + build: elastic/elasticsearch/ + ports: + - "9200:9200" + - "9300:9300" + environment: + ES_JAVA_OPTS: "-Xms1g -Xmx1g" + # networks: # add back in when a connection to tower_tools is possible + # - docker_elk + + logstash: + build: elastic/logstash/ + command: -f /etc/logstash/conf.d/ + volumes: + - ./elastic/logstash/config:/etc/logstash/conf.d + ports: + - "8085:8085" + links: + - elasticsearch + # networks: + # - docker_elk + depends_on: + - elasticsearch + kibana: + build: elastic/kibana/ + volumes: + - ./elastic/kibana/config/:/opt/kibana/config/ + ports: + - "5601:5601" + links: + - elasticsearch + # networks: + # - docker_elk + depends_on: + - elasticsearch + +# networks: +# docker_elk: +# driver: bridge diff --git a/tools/elastic/docker-compose.logstash-link-cluster.yml b/tools/elastic/docker-compose.logstash-link-cluster.yml new file mode 100644 index 0000000000..fcb896c3ca --- /dev/null +++ b/tools/elastic/docker-compose.logstash-link-cluster.yml @@ -0,0 +1,12 @@ +version: '2' +services: + # Tower Development Cluster + tower_1: + links: + - logstash + tower_2: + links: + - logstash + tower_3: + links: + - logstash \ No newline at end of file diff --git a/tools/elastic/docker-compose.logstash-link.yml b/tools/elastic/docker-compose.logstash-link.yml new file mode 100644 index 0000000000..7092efcffe --- /dev/null +++ b/tools/elastic/docker-compose.logstash-link.yml @@ -0,0 +1,6 @@ +version: '2' +services: + # Primary Tower Development Container + tower: + links: + - logstash \ No newline at end of file diff --git a/tools/elastic/elasticsearch/Dockerfile b/tools/elastic/elasticsearch/Dockerfile new file mode 100644 index 0000000000..3e5abd2217 --- /dev/null +++ b/tools/elastic/elasticsearch/Dockerfile @@ -0,0 +1,5 @@ +FROM elasticsearch:5 + +ENV ES_JAVA_OPTS="-Des.path.conf=/etc/elasticsearch" + +CMD ["-E", "network.host=0.0.0.0", "-E", "discovery.zen.minimum_master_nodes=1"] \ No newline at end of file diff --git a/tools/elastic/elasticsearch/config/.placeholder b/tools/elastic/elasticsearch/config/.placeholder new file mode 100644 index 0000000000..9ad266259c --- /dev/null +++ b/tools/elastic/elasticsearch/config/.placeholder @@ -0,0 +1 @@ +Ensure the existence of the parent folder. diff --git a/tools/elastic/kibana/Dockerfile b/tools/elastic/kibana/Dockerfile new file mode 100644 index 0000000000..3f07deffe2 --- /dev/null +++ b/tools/elastic/kibana/Dockerfile @@ -0,0 +1 @@ +FROM kibana:5 \ No newline at end of file diff --git a/tools/elastic/kibana/config/kibana.yml b/tools/elastic/kibana/config/kibana.yml new file mode 100644 index 0000000000..630004a7b7 --- /dev/null +++ b/tools/elastic/kibana/config/kibana.yml @@ -0,0 +1,92 @@ +# Kibana is served by a back end server. This setting specifies the port to use. +server.port: 5601 + +# This setting specifies the IP address of the back end server. +server.host: "0.0.0.0" + +# Enables you to specify a path to mount Kibana at if you are running behind a proxy. This setting +# cannot end in a slash. +# server.basePath: "" + +# The maximum payload size in bytes for incoming server requests. +# server.maxPayloadBytes: 1048576 + +# The Kibana server's name. This is used for display purposes. +# server.name: "your-hostname" + +# The URL of the Elasticsearch instance to use for all your queries. +elasticsearch.url: "http://elasticsearch:9200" + +# When this setting’s value is true Kibana uses the hostname specified in the server.host +# setting. When the value of this setting is false, Kibana uses the hostname of the host +# that connects to this Kibana instance. +# elasticsearch.preserveHost: true + +# Kibana uses an index in Elasticsearch to store saved searches, visualizations and +# dashboards. Kibana creates a new index if the index doesn’t already exist. +# kibana.index: ".kibana" + +# The default application to load. +# kibana.defaultAppId: "discover" + +# If your Elasticsearch is protected with basic authentication, these settings provide +# the username and password that the Kibana server uses to perform maintenance on the Kibana +# index at startup. Your Kibana users still need to authenticate with Elasticsearch, which +# is proxied through the Kibana server. +# elasticsearch.username: "user" +# elasticsearch.password: "pass" + +# Paths to the PEM-format SSL certificate and SSL key files, respectively. These +# files enable SSL for outgoing requests from the Kibana server to the browser. +# server.ssl.cert: /path/to/your/server.crt +# server.ssl.key: /path/to/your/server.key + +# Optional settings that provide the paths to the PEM-format SSL certificate and key files. +# These files validate that your Elasticsearch backend uses the same key files. +# elasticsearch.ssl.cert: /path/to/your/client.crt +# elasticsearch.ssl.key: /path/to/your/client.key + +# Optional setting that enables you to specify a path to the PEM file for the certificate +# authority for your Elasticsearch instance. +# elasticsearch.ssl.ca: /path/to/your/CA.pem + +# To disregard the validity of SSL certificates, change this setting’s value to false. +# elasticsearch.ssl.verify: true + +# Time in milliseconds to wait for Elasticsearch to respond to pings. Defaults to the value of +# the elasticsearch.requestTimeout setting. +# elasticsearch.pingTimeout: 1500 + +# Time in milliseconds to wait for responses from the back end or Elasticsearch. This value +# must be a positive integer. +# elasticsearch.requestTimeout: 30000 + +# List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side +# headers, set this value to [] (an empty list). +# elasticsearch.requestHeadersWhitelist: [ authorization ] + +# Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable. +# elasticsearch.shardTimeout: 0 + +# Time in milliseconds to wait for Elasticsearch at Kibana startup before retrying. +# elasticsearch.startupTimeout: 5000 + +# Specifies the path where Kibana creates the process ID file. +# pid.file: /var/run/kibana.pid + +# Enables you specify a file where Kibana stores log output. +# logging.dest: stdout + +# Set the value of this setting to true to suppress all logging output. +# logging.silent: false + +# Set the value of this setting to true to suppress all logging output other than error messages. +# logging.quiet: false + +# Set the value of this setting to true to log all events, including system usage information +# and all requests. +# logging.verbose: false + +# Set the interval in milliseconds to sample system and process performance +# metrics. Minimum is 100ms. Defaults to 10000. +# ops.interval: 10000 \ No newline at end of file diff --git a/tools/elastic/logstash/Dockerfile b/tools/elastic/logstash/Dockerfile new file mode 100644 index 0000000000..a340f51bbb --- /dev/null +++ b/tools/elastic/logstash/Dockerfile @@ -0,0 +1,4 @@ +FROM logstash:5 + +# Add your logstash plugins setup here +# Example: RUN logstash-plugin install logstash-filter-json \ No newline at end of file diff --git a/tools/elastic/logstash/config/logstash.conf b/tools/elastic/logstash/config/logstash.conf new file mode 100644 index 0000000000..1f25b6ac95 --- /dev/null +++ b/tools/elastic/logstash/config/logstash.conf @@ -0,0 +1,19 @@ +input { + http { + port => 8085 + } +} + +## Add your filters / logstash plugins configuration here + +filter { + json { + source => "message" + } +} + +output { + elasticsearch { + hosts => "elasticsearch:9200" + } +} From cad112e8da9f8dd9bb0c6d2e1aad1dbc61e145e9 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 29 Nov 2016 13:29:13 -0500 Subject: [PATCH 02/14] Add python-logstash dependency --- requirements/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/requirements.txt b/requirements/requirements.txt index f0123a123c..be25d3e703 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -145,6 +145,7 @@ python-heatclient==1.6.0 # via shade python-ironicclient==1.8.0 # via shade python-keystoneclient==3.6.0 # via python-glanceclient, python-mistralclient, python-openstackclient, shade python-ldap==2.4.27 # via django-auth-ldap +python-logstash==0.4.6 python-magnumclient==2.3.1 # via shade python-memcached==1.58 python-mistralclient==2.1.1 # via python-troveclient From 7df11c59c0611a00513f49d71431cc0ebfabc855 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 29 Nov 2016 13:40:35 -0500 Subject: [PATCH 03/14] Adding requests-futures dependency --- requirements/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/requirements.txt b/requirements/requirements.txt index be25d3e703..d6f887735e 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -168,6 +168,7 @@ rax-scheduled-images-python-novaclient-ext==0.3.1 # via rackspace-novaclient redbaron==0.6.2 requests-oauthlib==0.7.0 # via msrest, python-social-auth requests==2.12.1 # via azure-servicebus, azure-servicemanagement-legacy, azure-storage, keystoneauth1, msrest, python-cinderclient, python-designateclient, python-glanceclient, python-heatclient, python-ironicclient, python-keystoneclient, python-magnumclient, python-mistralclient, python-neutronclient, python-novaclient, python-social-auth, python-swiftclient, python-troveclient, requests-oauthlib, slackclient +requests-futures==0.9.7 requestsexceptions==1.1.3 # via os-client-config, shade rfc3986==0.4.1 # via oslo.config rply==0.7.4 # via baron From f3805b13170155ccd3c012837deb876410fc7b0d Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 29 Nov 2016 13:59:33 -0500 Subject: [PATCH 04/14] enable Logstash auth and prepare for handler for multiple auth types --- awx/main/log_utils/handlers.py | 31 +++++++++++++++++---- awx/settings/defaults.py | 4 ++- tools/elastic/logstash/config/logstash.conf | 2 ++ 3 files changed, 31 insertions(+), 6 deletions(-) diff --git a/awx/main/log_utils/handlers.py b/awx/main/log_utils/handlers.py index 762b359608..50fb98ff78 100644 --- a/awx/main/log_utils/handlers.py +++ b/awx/main/log_utils/handlers.py @@ -23,6 +23,9 @@ from requests_futures.sessions import FuturesSession # Logstash from logstash import formatter +# custom +from requests.auth import HTTPBasicAuth + ENABLED_LOGS = ['ansible'] @@ -61,13 +64,16 @@ def bg_cb(sess, resp): # add port for a generic handler class HTTPSHandler(logging.Handler): - def __init__(self, host, port=80, message_type='logstash', fqdn=False): + def __init__(self, host, fqdn=False, **kwargs): super(HTTPSHandler, self).__init__() self.host_saved = host - self.port = port - # self.port = port - self.message_type = message_type self.fqdn = fqdn + for fd in ['port', 'message_type', 'username', 'password']: + if fd in kwargs: + attr_name = fd + if fd == 'username': + attr_name = 'user' + setattr(self, attr_name, kwargs[fd]) def get_full_message(self, record): if record.exc_info: @@ -75,6 +81,19 @@ class HTTPSHandler(logging.Handler): else: return record.getMessage() + def add_auth_information(self, kwargs): + if self.message_type == 'logstash': + if not self.user: + # Logstash authentication not enabled + return kwargs + logstash_auth = HTTPBasicAuth(self.user, self.password) + kwargs['auth'] = logstash_auth + elif self.message_type == 'splunk': + auth_header = "Splunk %s" % self.token + headers = dict(Authorization=auth_header) + kwargs['headers'] = headers + return kwargs + def emit(self, record): try: payload = self.format(record) @@ -94,7 +113,9 @@ class HTTPSHandler(logging.Handler): host = 'http://%s' % self.host_saved if self.port != 80: host = '%s:%s' % (host, str(self.port)) - session.post(host, data=payload, background_callback=bg_cb) + bare_kwargs = dict(data=payload, background_callback=bg_cb) + kwargs = self.add_auth_information(bare_kwargs) + session.post(host, **kwargs) except (KeyboardInterrupt, SystemExit): raise except: diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index fbb5230d38..89afcc0f26 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -892,7 +892,9 @@ LOGGING = { 'message_type': 'logstash', 'fqdn': True, # 'tags': ['tower'], - 'formatter': 'json' + 'formatter': 'json', + 'username': 'awx_logger', + 'password': 'workflows', }, 'mail_admins': { 'level': 'ERROR', diff --git a/tools/elastic/logstash/config/logstash.conf b/tools/elastic/logstash/config/logstash.conf index 1f25b6ac95..9ad3a94f3c 100644 --- a/tools/elastic/logstash/config/logstash.conf +++ b/tools/elastic/logstash/config/logstash.conf @@ -1,6 +1,8 @@ input { http { port => 8085 + user => awx_logger + password => "workflows" } } From 1921f8cf04bbf13db9703dadf5fef18ad4dc1cc1 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 29 Nov 2016 14:58:10 -0500 Subject: [PATCH 05/14] Re-sync tower and ansible dependencies --- requirements/requirements.in | 2 ++ requirements/requirements.txt | 27 +++++++++++---------------- requirements/requirements_ansible.in | 1 + requirements/requirements_ansible.txt | 12 +++++++----- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/requirements/requirements.in b/requirements/requirements.in index 23ca0537c1..886476fecb 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -33,12 +33,14 @@ psphere==0.5.2 psutil==5.0.0 pygerduty==0.35.1 pyOpenSSL==16.2.0 +python-logstash==0.4.6 python-memcached==1.58 python-radius==1.0 python-saml==2.2.0 python-social-auth==0.2.21 pyzmq==14.5.0 redbaron==0.6.2 +requests-futures==0.9.7 shade==1.13.1 slackclient==1.0.2 twilio==5.6.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index d6f887735e..20319bdb8b 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -4,11 +4,6 @@ # # pip-compile --output-file requirements.txt requirements.in # -git+https://github.com/ansible/ansiconv.git@tower_1.0.0#egg=ansiconv -git+https://github.com/ansible/django-qsstats-magic.git@tower_0.7.2#egg=django-qsstats-magic -git+https://github.com/ansible/dm.xmlsec.binding.git@master#egg=dm.xmlsec.binding -git+https://github.com/ansible/django-jsonbfield@master#egg=jsonbfield -git+https://github.com/chrismeyersfsu/pyrax@tower#egg=pyrax amqp==1.4.9 # via kombu anyjson==0.3.3 # via kombu apache-libcloud==1.3.0 @@ -17,7 +12,7 @@ asgi-amqp==0.3.1 asgiref==1.0.0 # via asgi-amqp, channels, daphne autobahn==0.16.1 # via daphne azure-batch==1.0.0 # via azure -azure-common[autorest]==1.1.4 # via azure-batch, azure-mgmt-batch, azure-mgmt-compute, azure-mgmt-keyvault, azure-mgmt-logic, azure-mgmt-network, azure-mgmt-redis, azure-mgmt-resource, azure-mgmt-scheduler, azure-mgmt-storage, azure-servicebus, azure-servicemanagement-legacy, azure-storage +azure-common==1.1.4 # via azure-batch, azure-mgmt-batch, azure-mgmt-compute, azure-mgmt-keyvault, azure-mgmt-logic, azure-mgmt-network, azure-mgmt-redis, azure-mgmt-resource, azure-mgmt-scheduler, azure-mgmt-storage, azure-servicebus, azure-servicemanagement-legacy, azure-storage azure-mgmt-batch==1.0.0 # via azure-mgmt azure-mgmt-compute==0.30.0rc6 # via azure-mgmt azure-mgmt-keyvault==0.30.0rc6 # via azure-mgmt @@ -48,7 +43,7 @@ chardet==2.3.0 # via msrest cliff==2.3.0 # via osc-lib, python-designateclient, python-heatclient, python-mistralclient, python-neutronclient, python-openstackclient cmd2==0.6.9 # via cliff constantly==15.1.0 # via twisted -cryptography==1.5.3 # via azure-storage, pyopenssl, python-magnumclient, secretstorage +cryptography==1.6 # via azure-storage, pyopenssl, python-magnumclient, secretstorage daphne==0.15.0 # via channels debtcollector==1.9.0 # via oslo.config, oslo.utils, python-designateclient, python-keystoneclient, python-neutronclient decorator==4.0.10 # via python-magnumclient, shade @@ -71,7 +66,7 @@ dogpile.cache==0.6.2 # via python-ironicclient, shade enum34==1.1.6 # via cryptography, msrest funcsigs==1.0.2 # via debtcollector, mock, oslo.utils functools32==3.2.3.post2 # via jsonschema -futures==3.0.5 # via azure-storage, python-swiftclient +futures==3.0.5 # via azure-storage, python-swiftclient, requests-futures gevent-websocket==0.9.5 gevent==1.1.2 # via gevent-websocket greenlet==0.4.10 # via gevent @@ -111,7 +106,7 @@ msrestazure==0.4.4 # via azure-common munch==2.0.4 # via shade netaddr==0.7.18 # via oslo.config, oslo.utils, python-neutronclient netifaces==0.10.5 # via oslo.utils, shade -oauthlib==2.0.0 # via python-social-auth, requests-oauthlib +oauthlib==2.0.1 # via python-social-auth, requests-oauthlib openstacksdk==0.9.10 # via python-openstackclient ordereddict==1.1 os-client-config==1.24.0 # via openstacksdk, osc-lib, python-magnumclient, python-neutronclient, shade @@ -141,10 +136,10 @@ python-cinderclient==1.9.0 # via python-openstackclient, shade python-dateutil==2.6.0 # via azure-storage python-designateclient==2.3.0 # via shade python-glanceclient==2.5.0 # via python-openstackclient, shade -python-heatclient==1.6.0 # via shade +python-heatclient==1.6.1 # via shade python-ironicclient==1.8.0 # via shade -python-keystoneclient==3.6.0 # via python-glanceclient, python-mistralclient, python-openstackclient, shade -python-ldap==2.4.27 # via django-auth-ldap +python-keystoneclient==3.7.0 # via python-glanceclient, python-mistralclient, python-openstackclient, shade +python-ldap==2.4.28 # via django-auth-ldap python-logstash==0.4.6 python-magnumclient==2.3.1 # via shade python-memcached==1.58 @@ -152,7 +147,7 @@ python-mistralclient==2.1.1 # via python-troveclient python-neutronclient==6.0.0 # via shade python-novaclient==6.0.0 # via ip-associations-python-novaclient-ext, os-diskconfig-python-novaclient-ext, os-networksv2-python-novaclient-ext, os-virtual-interfacesv2-python-novaclient-ext, python-openstackclient, rackspace-auth-openstack, rackspace-novaclient, rax-default-network-flags-python-novaclient-ext, rax-scheduled-images-python-novaclient-ext, shade python-openid==2.2.5 # via python-social-auth -python-openstackclient==3.4.0 # via python-ironicclient +python-openstackclient==3.4.1 # via python-ironicclient python-radius==1.0 python-saml==2.2.0 python-social-auth==0.2.21 @@ -166,9 +161,9 @@ rackspace-novaclient==2.1 rax-default-network-flags-python-novaclient-ext==0.4.0 # via rackspace-novaclient rax-scheduled-images-python-novaclient-ext==0.3.1 # via rackspace-novaclient redbaron==0.6.2 -requests-oauthlib==0.7.0 # via msrest, python-social-auth -requests==2.12.1 # via azure-servicebus, azure-servicemanagement-legacy, azure-storage, keystoneauth1, msrest, python-cinderclient, python-designateclient, python-glanceclient, python-heatclient, python-ironicclient, python-keystoneclient, python-magnumclient, python-mistralclient, python-neutronclient, python-novaclient, python-social-auth, python-swiftclient, python-troveclient, requests-oauthlib, slackclient requests-futures==0.9.7 +requests-oauthlib==0.7.0 # via msrest, python-social-auth +requests==2.12.1 # via azure-servicebus, azure-servicemanagement-legacy, azure-storage, keystoneauth1, msrest, python-cinderclient, python-designateclient, python-glanceclient, python-heatclient, python-ironicclient, python-keystoneclient, python-magnumclient, python-mistralclient, python-neutronclient, python-novaclient, python-social-auth, python-swiftclient, python-troveclient, requests-futures, requests-oauthlib, slackclient requestsexceptions==1.1.3 # via os-client-config, shade rfc3986==0.4.1 # via oslo.config rply==0.7.4 # via baron @@ -181,7 +176,7 @@ stevedore==1.18.0 # via cliff, keystoneauth1, openstacksdk, osc-lib, osl suds==0.4 # via psphere tempora==1.6.1 # via irc, jaraco.logging twilio==5.6.0 -twisted==16.5.0 # via daphne +twisted==16.6.0 # via daphne txaio==2.5.2 # via autobahn typing==3.5.2.2 # via m2crypto unicodecsv==0.14.1 # via cliff diff --git a/requirements/requirements_ansible.in b/requirements/requirements_ansible.in index b5b62a15e4..2e2085985b 100644 --- a/requirements/requirements_ansible.in +++ b/requirements/requirements_ansible.in @@ -1,6 +1,7 @@ -e git+https://github.com/chrismeyersfsu/pyrax@tower#egg=pyrax apache-libcloud==1.3.0 azure==2.0.0rc6 +kombu==3.0.35 boto==2.43.0 psutil==5.0.0 shade==1.13.1 diff --git a/requirements/requirements_ansible.txt b/requirements/requirements_ansible.txt index 714f2a827d..d11dac50a1 100644 --- a/requirements/requirements_ansible.txt +++ b/requirements/requirements_ansible.txt @@ -4,7 +4,8 @@ # # pip-compile --output-file requirements_ansible.txt requirements_ansible.in # -git+https://github.com/chrismeyersfsu/pyrax@tower#egg=pyrax +amqp==1.4.9 # via kombu +anyjson==0.3.3 # via kombu apache-libcloud==1.3.0 appdirs==1.4.0 # via os-client-config, python-ironicclient azure-batch==1.0.0 # via azure @@ -32,7 +33,7 @@ cffi==1.9.1 # via cryptography chardet==2.3.0 # via msrest cliff==2.3.0 # via osc-lib, python-designateclient, python-heatclient, python-mistralclient, python-neutronclient, python-openstackclient cmd2==0.6.9 # via cliff -cryptography==1.5.3 # via azure-storage, python-magnumclient, secretstorage +cryptography==1.6 # via azure-storage, python-magnumclient, secretstorage debtcollector==1.9.0 # via oslo.config, oslo.utils, python-designateclient, python-keystoneclient, python-neutronclient decorator==4.0.10 # via python-magnumclient, shade dogpile.cache==0.6.2 # via python-ironicclient, shade @@ -51,6 +52,7 @@ jsonpointer==1.10 # via jsonpatch jsonschema==2.5.1 # via python-designateclient, python-ironicclient, warlock keyring==10.0.2 # via msrest keystoneauth1==2.15.0 # via openstacksdk, os-client-config, osc-lib, python-cinderclient, python-designateclient, python-heatclient, python-ironicclient, python-keystoneclient, python-magnumclient, python-neutronclient, python-novaclient, python-openstackclient, python-troveclient, shade +kombu==3.0.35 mock==2.0.0 monotonic==1.2 # via oslo.utils msgpack-python==0.4.8 # via oslo.serialization @@ -59,7 +61,7 @@ msrestazure==0.4.4 # via azure-common munch==2.0.4 # via shade netaddr==0.7.18 # via oslo.config, oslo.utils, python-neutronclient netifaces==0.10.5 # via oslo.utils, shade -oauthlib==2.0.0 # via requests-oauthlib +oauthlib==2.0.1 # via requests-oauthlib openstacksdk==0.9.10 # via python-openstackclient os-client-config==1.24.0 # via openstacksdk, osc-lib, python-magnumclient, python-neutronclient, shade os-diskconfig-python-novaclient-ext==0.1.3 # via rackspace-novaclient @@ -81,9 +83,9 @@ python-cinderclient==1.9.0 # via python-openstackclient, shade python-dateutil==2.6.0 # via azure-storage python-designateclient==2.3.0 # via shade python-glanceclient==2.5.0 # via python-openstackclient, shade -python-heatclient==1.6.0 # via shade +python-heatclient==1.6.1 # via shade python-ironicclient==1.8.0 # via shade -python-keystoneclient==3.6.0 # via python-glanceclient, python-mistralclient, python-openstackclient, shade +python-keystoneclient==3.7.0 # via python-glanceclient, python-mistralclient, python-openstackclient, shade python-magnumclient==2.3.1 # via shade python-mistralclient==2.1.1 # via python-troveclient python-neutronclient==6.0.0 # via shade From 9cecb7d8703422c736e50cd362d81eb08857df75 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 29 Nov 2016 14:58:31 -0500 Subject: [PATCH 06/14] Cleaning up some parts of the logging branch * Fixing some flake8 warnings * Removing some superflous print statements --- awx/main/log_utils/utils.py | 8 ++++---- awx/main/management/commands/run_fact_cache_receiver.py | 2 -- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/awx/main/log_utils/utils.py b/awx/main/log_utils/utils.py index 81b1d397c7..a57515888f 100644 --- a/awx/main/log_utils/utils.py +++ b/awx/main/log_utils/utils.py @@ -15,9 +15,9 @@ def parse_config_file(): project_id, access_token, api_domain = _parse_config_file_impl(filename) - if project_id is not None\ - and access_token is not None\ - and api_domain is not None: + if project_id is not None \ + and access_token is not None \ + and api_domain is not None: return project_id, access_token, api_domain else: @@ -54,4 +54,4 @@ def _parse_config_file_impl(filename): def get_config_from_env(): return (os.environ.get('SPLUNK_PROJECT_ID', None), os.environ.get('SPLUNK_ACCESS_TOKEN', None), - os.environ.get('SPLUNK_API_DOMAIN', None)) \ No newline at end of file + os.environ.get('SPLUNK_API_DOMAIN', None)) diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index 3d962243f0..a0082416c2 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -53,8 +53,6 @@ class FactBrokerWorker(ConsumerMixin): return (module, facts) def process_fact_message(self, body, message): - print body - print type(body) hostname = body['host'] inventory_id = body['inventory_id'] facts_data = body['facts'] From 37ef4e8bc80f74a14578e69cc98946a5ac89db1c Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 29 Nov 2016 16:26:16 -0500 Subject: [PATCH 07/14] HTTP logger overrides kwargs with settings --- awx/main/conf.py | 21 ++++++++++-- awx/main/log_utils/handlers.py | 58 ++++++++++++++++++++++------------ 2 files changed, 56 insertions(+), 23 deletions(-) diff --git a/awx/main/conf.py b/awx/main/conf.py index df080c046b..8fbbac796d 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -251,8 +251,25 @@ register( register( 'LOG_AGGREGATOR_USERNAME', field_class=fields.CharField, - label=_('Logging Aggregator Receiver Username'), - help_text=_('Username for Logstash or others'), + label=_('Logging Aggregator Username to Authenticate With'), + help_text=_('Username for Logstash or others (basic auth)'), + category=_('Logging'), + category_slug='logging', +) +register( + 'LOG_AGGREGATOR_PASSWORD', + field_class=fields.CharField, + label=_('Logging Aggregator Password to Authenticate With'), + help_text=_('Password for Logstash or others (basic auth)'), + category=_('Logging'), + category_slug='logging', +) +register( + 'LOG_AGGREGATOR_LOGGERS', + field_class=fields.StringListField, + default=['awx', 'activity_stream', 'job_events', 'packages', 'services', 'ansible'], + label=_(''), + help_text=_(''), category=_('Logging'), category_slug='logging', ) diff --git a/awx/main/log_utils/handlers.py b/awx/main/log_utils/handlers.py index 50fb98ff78..614d2aeb66 100644 --- a/awx/main/log_utils/handlers.py +++ b/awx/main/log_utils/handlers.py @@ -25,6 +25,7 @@ from logstash import formatter # custom from requests.auth import HTTPBasicAuth +from django.conf import settings as django_settings ENABLED_LOGS = ['ansible'] @@ -52,28 +53,42 @@ class TCPLogstashHandler(logging.handlers.SocketHandler, object): return self.formatter.format(record) + b'\n' -# loggly +# techniquest borrowed from the loggly library # https://github.com/varshneyjayant/loggly-python-handler +# MIT License -session = FuturesSession() +# Translation of parameter names to names in Django settings +PARAM_NAMES = { + 'host': 'LOG_AGGREGATOR_HOST', + 'port': 'LOG_AGGREGATOR_PORT', + 'message_type': 'LOG_AGGREGATOR_TYPE', + 'username': 'LOG_AGGREGATOR_USERNAME', + 'password': 'LOG_AGGREGATOR_PASSWORD', +} +# TODO: figure out what to do with LOG_AGGREGATOR_LOGGERS (if anything) def bg_cb(sess, resp): """ Don't do anything with the response """ pass -# add port for a generic handler class HTTPSHandler(logging.Handler): - def __init__(self, host, fqdn=False, **kwargs): + def __init__(self, fqdn=False, **kwargs): super(HTTPSHandler, self).__init__() - self.host_saved = host self.fqdn = fqdn - for fd in ['port', 'message_type', 'username', 'password']: - if fd in kwargs: + for fd in PARAM_NAMES: + # settings values take precedence over the input params + settings_name = PARAM_NAMES[fd] + settings_val = getattr(django_settings, settings_name, None) + if settings_val: + setattr(self, fd, settings_val) + elif fd in kwargs: attr_name = fd - if fd == 'username': - attr_name = 'user' - setattr(self, attr_name, kwargs[fd]) + setattr(self, fd, kwargs[fd]) + else: + setattr(self, fd, None) + self.session = FuturesSession() + self.add_auth_information() def get_full_message(self, record): if record.exc_info: @@ -81,18 +96,20 @@ class HTTPSHandler(logging.Handler): else: return record.getMessage() - def add_auth_information(self, kwargs): + def add_auth_information(self): if self.message_type == 'logstash': - if not self.user: + if not self.username: # Logstash authentication not enabled return kwargs - logstash_auth = HTTPBasicAuth(self.user, self.password) - kwargs['auth'] = logstash_auth + logstash_auth = HTTPBasicAuth(self.username, self.password) + self.session.auth = logstash_auth elif self.message_type == 'splunk': + ## Auth used by Splunk logger library + # self.session.auth = ('x', self.access_token) + # self.session.headers.update({'Content-Encoding': 'gzip'}) auth_header = "Splunk %s" % self.token headers = dict(Authorization=auth_header) - kwargs['headers'] = headers - return kwargs + self.session.headers.update(headers) def emit(self, record): try: @@ -108,14 +125,13 @@ class HTTPSHandler(logging.Handler): break if st_type not in ENABLED_LOGS: return - host = self.host_saved + host = self.host if not host.startswith('http'): - host = 'http://%s' % self.host_saved + host = 'http://%s' % self.host if self.port != 80: host = '%s:%s' % (host, str(self.port)) - bare_kwargs = dict(data=payload, background_callback=bg_cb) - kwargs = self.add_auth_information(bare_kwargs) - session.post(host, **kwargs) + kwargs = dict(data=payload, background_callback=bg_cb) + self.session.post(host, **kwargs) except (KeyboardInterrupt, SystemExit): raise except: From 3643d9c06b0ace1edbd0ca18d61375ff24a64e5a Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 30 Nov 2016 16:52:47 -0500 Subject: [PATCH 08/14] allow for reloading of logging settings via CTiT --- awx/conf/signals.py | 4 ++++ awx/main/conf.py | 22 +++++++++++++++++----- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/awx/conf/signals.py b/awx/conf/signals.py index fdcac300cf..2b9d17c227 100644 --- a/awx/conf/signals.py +++ b/awx/conf/signals.py @@ -40,6 +40,10 @@ def handle_setting_change(key, for_delete=False): value=getattr(settings, setting_key, None), enter=not bool(for_delete), ) + # TODO: Move logic to task to run on all cluster nodes + if setting_key.startswith('LOG_AGGREGATOR_'): + settings.LOGGING_CONFIG = None + logging.config.dictConfig(settings.LOGGING) @receiver(post_save, sender=Setting) diff --git a/awx/main/conf.py b/awx/main/conf.py index 8fbbac796d..9ac555bcb7 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -227,6 +227,7 @@ register( register( 'LOG_AGGREGATOR_HOST', field_class=fields.CharField, + allow_null=True, label=_('Logging Aggregator Receiving Host'), help_text=_('External host maintain a log collector to send logs to'), category=_('Logging'), @@ -234,7 +235,8 @@ register( ) register( 'LOG_AGGREGATOR_PORT', - field_class=fields.CharField, + field_class=fields.IntegerField, + allow_null=True, label=_('Logging Aggregator Receiving Port'), help_text=_('Port that the log collector is listening on'), category=_('Logging'), @@ -242,7 +244,9 @@ register( ) register( 'LOG_AGGREGATOR_TYPE', - field_class=fields.CharField, + field_class=fields.ChoiceField, + choices=['logstash', 'splunk', 'loggly', 'sumologic', 'other'], + allow_null=True, label=_('Logging Aggregator Type: Logstash, Loggly, Datadog, etc'), help_text=_('The type of log aggregator service to format messages for'), category=_('Logging'), @@ -251,6 +255,7 @@ register( register( 'LOG_AGGREGATOR_USERNAME', field_class=fields.CharField, + allow_null=True, label=_('Logging Aggregator Username to Authenticate With'), help_text=_('Username for Logstash or others (basic auth)'), category=_('Logging'), @@ -259,6 +264,7 @@ register( register( 'LOG_AGGREGATOR_PASSWORD', field_class=fields.CharField, + allow_null=True, label=_('Logging Aggregator Password to Authenticate With'), help_text=_('Password for Logstash or others (basic auth)'), category=_('Logging'), @@ -267,9 +273,15 @@ register( register( 'LOG_AGGREGATOR_LOGGERS', field_class=fields.StringListField, - default=['awx', 'activity_stream', 'job_events', 'packages', 'services', 'ansible'], - label=_(''), - help_text=_(''), + default=['awx', 'activity_stream', 'job_events', 'system_tracking'], + label=_('Loggers to send data to the log aggregator from'), + help_text=_('List of loggers that will send HTTP logs to the collector, these can ' + 'include any or all of: \n' + 'activity_stream - logs duplicate to records entered in activity stream\n' + 'job_events - callback data from Ansible job events\n' + 'system_tracking - data generated from scan jobs\n' + 'Sending generic Tower logs must be configured through local_settings.py' + 'instead of this mechanism.'), category=_('Logging'), category_slug='logging', ) From 7344a998b28aff944593dffdd609887daf3152ec Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Thu, 1 Dec 2016 19:09:55 -0500 Subject: [PATCH 09/14] move formatter to logger file, settings reload on startup --- awx/conf/apps.py | 3 + awx/conf/signals.py | 6 +- awx/main/conf.py | 11 ++ awx/main/consumers.py | 1 + awx/main/log_utils/formatters.py | 102 ++++++++++++++---- awx/main/log_utils/handlers.py | 72 +++++++------ .../commands/run_fact_cache_receiver.py | 6 +- awx/main/models/jobs.py | 7 +- awx/main/signals.py | 12 +-- awx/main/tests/unit/test_log_formatter.py | 13 +-- awx/main/utils.py | 62 ----------- awx/settings/defaults.py | 24 ----- 12 files changed, 156 insertions(+), 163 deletions(-) diff --git a/awx/conf/apps.py b/awx/conf/apps.py index a77cc84209..8f9980dde9 100644 --- a/awx/conf/apps.py +++ b/awx/conf/apps.py @@ -2,6 +2,8 @@ from django.apps import AppConfig # from django.core import checks from django.utils.translation import ugettext_lazy as _ +from django.utils.log import configure_logging +from django.conf import settings class ConfConfig(AppConfig): @@ -13,4 +15,5 @@ class ConfConfig(AppConfig): self.module.autodiscover() from .settings import SettingsWrapper SettingsWrapper.initialize() + configure_logging(settings.LOGGING_CONFIG, settings.LOGGING) # checks.register(SettingsWrapper._check_settings) diff --git a/awx/conf/signals.py b/awx/conf/signals.py index 2b9d17c227..f2ae1baf16 100644 --- a/awx/conf/signals.py +++ b/awx/conf/signals.py @@ -7,6 +7,7 @@ from django.core.cache import cache from django.core.signals import setting_changed from django.db.models.signals import post_save, pre_delete, post_delete from django.dispatch import receiver +from django.utils.log import configure_logging # Tower import awx.main.signals @@ -42,8 +43,9 @@ def handle_setting_change(key, for_delete=False): ) # TODO: Move logic to task to run on all cluster nodes if setting_key.startswith('LOG_AGGREGATOR_'): - settings.LOGGING_CONFIG = None - logging.config.dictConfig(settings.LOGGING) + configure_logging(settings.LOGGING_CONFIG, settings.LOGGING) + # settings.LOGGING_CONFIG = None + # logging.config.dictConfig(settings.LOGGING) @receiver(post_save, sender=Setting) diff --git a/awx/main/conf.py b/awx/main/conf.py index 9ac555bcb7..527f262334 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -285,3 +285,14 @@ register( category=_('Logging'), category_slug='logging', ) +register( + 'LOG_AGGREGATOR_INDIVIDUAL_FACTS', + field_class=fields.BooleanField, + default=False, + label=_('Flag denoting to send individual messages for each fact in system tracking'), + help_text=_('If not set, the data from system tracking will be sent inside ' + 'of a single dictionary, but if set, separate requests will be sent ' + 'for each package, service, etc. that is found in the scan.'), + category=_('Logging'), + category_slug='logging', +) diff --git a/awx/main/consumers.py b/awx/main/consumers.py index 2e74f05114..a8c56a264d 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -11,6 +11,7 @@ from awx.main.models.organization import AuthToken logger = logging.getLogger('awx.main.consumers') + def discard_groups(message): if 'groups' in message.channel_session: for group in message.channel_session['groups']: diff --git a/awx/main/log_utils/formatters.py b/awx/main/log_utils/formatters.py index 17fafca063..94f9b5c0f0 100644 --- a/awx/main/log_utils/formatters.py +++ b/awx/main/log_utils/formatters.py @@ -3,50 +3,108 @@ from logstash.formatter import LogstashFormatterVersion1 from django.conf import settings +from copy import copy +import json +import time -# # Loggly example -# 'json': { -# 'format': '{ -# "loggerName":"%(name)s", -# "asciTime":"%(asctime)s", -# "fileName":"%(filename)s", -# "logRecordCreationTime":"%(created)f", -# "functionName":"%(funcName)s", -# "levelNo":"%(levelno)s", -# "lineNo":"%(lineno)d", -# "time":"%(msecs)d", -# "levelName":"%(levelname)s", -# "message":"%(message)s"}', -# }, - class LogstashFormatter(LogstashFormatterVersion1): def __init__(self, **kwargs): ret = super(LogstashFormatter, self).__init__(**kwargs) self.host_id = settings.CLUSTER_HOST_ID return ret + def reformat_data_for_log(self, raw_data, kind=None): + ''' + Process dictionaries from various contexts (job events, activity stream + changes, etc.) to give meaningful information + Output a dictionary which will be passed in logstash or syslog format + to the logging receiver + ''' + if kind == 'activity_stream': + return raw_data + rename_fields = set(( + 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', + 'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module', + 'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process', + 'processName', 'relativeCreated', 'thread', 'threadName', 'extra', + 'auth_token', 'tags', 'host', 'host_id', 'level', 'port', 'uuid')) + if kind == 'system_tracking': + data = copy(raw_data['facts_data']) + elif kind == 'job_events': + data = copy(raw_data['event_model_data']) + else: + data = copy(raw_data) + if isinstance(data, basestring): + data = json.loads(data) + skip_fields = ('res', 'password', 'event_data', 'stdout') + data_for_log = {} + + def index_by_name(alist): + """Takes a list of dictionaries with `name` as a key in each dict + and returns a dictionary indexed by those names""" + adict = {} + for item in alist: + subdict = copy(item) + if 'name' in subdict: + name = subdict.get('name', None) + elif 'path' in subdict: + name = subdict.get('path', None) + if name: + # Logstash v2 can not accept '.' in a name + name = name.replace('.', '_') + adict[name] = subdict + return adict + + if kind == 'job_events': + data.update(data.get('event_data', {})) + for fd in data: + if fd in skip_fields: + continue + key = fd + if fd in rename_fields: + key = 'event_%s' % fd + val = data[fd] + if key.endswith('created'): + time_float = time.mktime(data[fd].timetuple()) + val = self.format_timestamp(time_float) + data_for_log[key] = val + elif kind == 'system_tracking': + module_name = raw_data['module_name'] + if module_name in ['services', 'packages', 'files']: + data_for_log[module_name] = index_by_name(data) + elif module_name == 'ansible': + data_for_log['ansible'] = data + # Remove sub-keys with data type conflicts in elastic search + data_for_log['ansible'].pop('ansible_python_version', None) + data_for_log['ansible']['ansible_python'].pop('version_info', None) + else: + data_for_log['facts'] = data + data_for_log['module_name'] = module_name + return data_for_log + def get_extra_fields(self, record): fields = super(LogstashFormatter, self).get_extra_fields(record) - fields['cluster_host_id'] = self.host_id + if record.name.startswith('awx.analytics'): + log_kind = record.name.split('.')[-1] + fields = self.reformat_data_for_log(fields, kind=log_kind) return fields def format(self, record): - # Create message dict - # message = record.getMessage() - # print ' message ' + str(message) message = { + # Fields not included, but exist in related logs + # 'path': record.pathname + # '@version': '1', # from python-logstash + # 'tags': self.tags, '@timestamp': self.format_timestamp(record.created), - '@version': '1', 'message': record.getMessage(), 'host': self.host, - 'path': record.pathname, - 'tags': self.tags, 'type': self.message_type, # Extra Fields 'level': record.levelname, 'logger_name': record.name, + 'cluster_host_id': self.host_id } # Add extra fields diff --git a/awx/main/log_utils/handlers.py b/awx/main/log_utils/handlers.py index 614d2aeb66..55027dc7d9 100644 --- a/awx/main/log_utils/handlers.py +++ b/awx/main/log_utils/handlers.py @@ -24,12 +24,11 @@ from requests_futures.sessions import FuturesSession from logstash import formatter # custom +from copy import copy from requests.auth import HTTPBasicAuth from django.conf import settings as django_settings -ENABLED_LOGS = ['ansible'] - # Logstash # https://github.com/vklochan/python-logstash class TCPLogstashHandler(logging.handlers.SocketHandler, object): @@ -64,14 +63,16 @@ PARAM_NAMES = { 'message_type': 'LOG_AGGREGATOR_TYPE', 'username': 'LOG_AGGREGATOR_USERNAME', 'password': 'LOG_AGGREGATOR_PASSWORD', + 'enabled_loggers': 'LOG_AGGREGATOR_LOGGERS', + 'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS', } # TODO: figure out what to do with LOG_AGGREGATOR_LOGGERS (if anything) -def bg_cb(sess, resp): - """ Don't do anything with the response """ +def unused_callback(sess, resp): pass + class HTTPSHandler(logging.Handler): def __init__(self, fqdn=False, **kwargs): super(HTTPSHandler, self).__init__() @@ -83,7 +84,6 @@ class HTTPSHandler(logging.Handler): if settings_val: setattr(self, fd, settings_val) elif fd in kwargs: - attr_name = fd setattr(self, fd, kwargs[fd]) else: setattr(self, fd, None) @@ -100,37 +100,53 @@ class HTTPSHandler(logging.Handler): if self.message_type == 'logstash': if not self.username: # Logstash authentication not enabled - return kwargs + return logstash_auth = HTTPBasicAuth(self.username, self.password) self.session.auth = logstash_auth elif self.message_type == 'splunk': - ## Auth used by Splunk logger library - # self.session.auth = ('x', self.access_token) - # self.session.headers.update({'Content-Encoding': 'gzip'}) - auth_header = "Splunk %s" % self.token - headers = dict(Authorization=auth_header) + auth_header = "Splunk %s" % self.password + headers = { + "Authorization": auth_header, + "Content-Type": "application/json" + } self.session.headers.update(headers) def emit(self, record): + if (self.host == '' or self.enabled_loggers is None or + record.name.split('.')[-1] not in self.enabled_loggers): + return try: payload = self.format(record) - # TODO: move this enablement logic to rely on individual loggers once - # the enablement config variable is hooked up - payload_data = json.loads(payload) - if payload_data['logger_name'].startswith('awx.analytics.system_tracking'): - st_type = None - for fd in ['services', 'packages', 'files', 'ansible']: - if fd in payload_data: - st_type = fd - break - if st_type not in ENABLED_LOGS: - return host = self.host if not host.startswith('http'): host = 'http://%s' % self.host - if self.port != 80: + if self.port != 80 and self.port is not None: host = '%s:%s' % (host, str(self.port)) - kwargs = dict(data=payload, background_callback=bg_cb) + + # Special action for System Tracking, queue up multiple log messages + if self.indv_facts: + payload_data = json.loads(payload) + if record.name.startswith('awx.analytics.system_tracking'): + module_name = payload_data['module_name'] + if module_name in ['services', 'packages', 'files']: + facts_dict = payload_data.pop(module_name) + for key in facts_dict: + fact_payload = copy(payload_data) + fact_payload.update(facts_dict[key]) + kwargs = dict(data=json.dumps(fact_payload), background_callback=unused_callback) + self.session.post(host, **kwargs) + return + + kwargs = dict(data=payload, background_callback=unused_callback) + # # splunk doesn't take "@" in the keys + # if self.message_type == 'splunk': + # payload_dict = json.loads(payload) + # new_dict = {} + # for key in payload_dict: + # new_key = key.replace('@', '') + # new_dict[new_key] = payload_dict[key] + # new_payload = json.dumps(new_dict) + # kwargs['data'] = json.dumps(new_dict) self.session.post(host, **kwargs) except (KeyboardInterrupt, SystemExit): raise @@ -162,9 +178,7 @@ class SplunkLogger(logging.Handler): def _set_auth(self, access_token, project_id, api_domain): # The access token and project id passed as parameter override the ones # configured in the .splunk_logger file. - if access_token is not None\ - and project_id is not None\ - and api_domain is not None: + if access_token is not None and project_id is not None and api_domain is not None: self.project_id = project_id self.access_token = access_token self.api_domain = api_domain @@ -173,9 +187,7 @@ class SplunkLogger(logging.Handler): # Try to get the credentials form the configuration file self.project_id, self.access_token, self.api_domain = parse_config_file() - if self.project_id is None\ - or self.access_token is None\ - or self.api_domain is None: + if self.project_id is None or self.access_token is None or self.api_domain is None: # Try to get the credentials form the environment variables self.project_id, self.access_token, self.api_domain = get_config_from_env() diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index a0082416c2..5a111c6fa2 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -16,10 +16,9 @@ from django.utils import timezone # AWX from awx.main.models.fact import Fact from awx.main.models.inventory import Host -from awx.main.utils import format_for_log logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver') -data_logger = logging.getLogger('awx.analytics.system_tracking') +analytics_logger = logging.getLogger('awx.analytics.system_tracking') class FactBrokerWorker(ConsumerMixin): @@ -83,7 +82,8 @@ class FactBrokerWorker(ConsumerMixin): # Create new Fact entry fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts) logger.info('Created new fact <%s, %s>' % (fact_obj.id, module_name)) - data_logger.info('Received message with fact data', extra=format_for_log({module_name: facts}, kind="fact")) + analytics_logger.info('Received message with fact data', extra=dict( + module_name=module_name, facts_data=facts)) return fact_obj diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index fb4cd10638..9671575f30 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -32,7 +32,6 @@ from awx.main.models.notifications import ( from awx.main.utils import ( ignore_inventory_computed_fields, parse_yaml_or_json, - format_for_log ) from awx.main.redact import PlainTextCleaner from awx.main.fields import ImplicitRoleField @@ -44,7 +43,7 @@ from awx.main.consumers import emit_channel_notification logger = logging.getLogger('awx.main.models.jobs') -event_logger = logging.getLogger('awx.analytics.job_events') +analytics_logger = logging.getLogger('awx.analytics.job_events') __all__ = ['JobTemplate', 'Job', 'JobHostSummary', 'JobEvent', 'SystemJobOptions', 'SystemJobTemplate', 'SystemJob'] @@ -1188,8 +1187,7 @@ class JobEvent(CreatedModifiedModel): if parent_id: kwargs['parent_id'] = parent_id - # event_logger.info('Body: {}'.format(str(data_for_log)), extra=data_for_log) - event_logger.info('Job event data saved.', extra=format_for_log(kwargs, kind='event')) + analytics_logger.info('Job event data saved.', extra=dict(event_model_data=kwargs)) job_event = JobEvent.objects.create(**kwargs) @@ -1201,6 +1199,7 @@ class JobEvent(CreatedModifiedModel): # Save artifact data to parent job (if provided). if artifact_data: artifact_dict = json.loads(artifact_data) + event_data = kwargs.get('event_data', None) if event_data and isinstance(event_data, dict): res = event_data.get('res', None) if res and isinstance(res, dict): diff --git a/awx/main/signals.py b/awx/main/signals.py index 477a9f2257..ceda8899b1 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -20,7 +20,7 @@ from crum.signals import current_user_getter from awx.main.models import * # noqa from awx.api.serializers import * # noqa from awx.main.utils import model_instance_diff, model_to_dict, camelcase_to_underscore -from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates, format_for_log +from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates from awx.main.tasks import update_inventory_computed_fields from awx.main.consumers import emit_channel_notification @@ -28,7 +28,6 @@ from awx.main.consumers import emit_channel_notification __all__ = [] logger = logging.getLogger('awx.main.signals') -analytics_logger = logging.getLogger('awx.analytics.activity_stream') # Update has_active_failures for inventory/groups when a Host/Group is deleted, # when a Host-Group or Group-Group relationship is updated, or when a Job is deleted @@ -370,15 +369,11 @@ def activity_stream_create(sender, instance, created, **kwargs): if type(instance) == Job: if 'extra_vars' in changes: changes['extra_vars'] = instance.display_extra_vars() - changes_dict = json.dumps(changes) activity_entry = ActivityStream( operation='create', object1=object1, - changes=changes_dict) + changes=json.dumps(changes)) activity_entry.save() - # analytics_logger.info('Activity Stream create entry for %s' % str(object1), - # extra=format_for_log(changes, kind='activity_stream', - # actor=activity_entry.actor, operation='update', object1=object1)) #TODO: Weird situation where cascade SETNULL doesn't work # it might actually be a good idea to remove all of these FK references since # we don't really use them anyway. @@ -406,9 +401,6 @@ def activity_stream_update(sender, instance, **kwargs): object1=object1, changes=json.dumps(changes)) activity_entry.save() - # analytics_logger.info('Activity Stream update entry for %s' % str(object1), - # extra=format_for_log(changes, kind='activity_stream', - # actor=activity_entry.actor, operation='update', object1=object1)) if instance._meta.model_name != 'setting': # Is not conf.Setting instance getattr(activity_entry, object1).add(instance) diff --git a/awx/main/tests/unit/test_log_formatter.py b/awx/main/tests/unit/test_log_formatter.py index afa2a799e4..1d3b46328b 100644 --- a/awx/main/tests/unit/test_log_formatter.py +++ b/awx/main/tests/unit/test_log_formatter.py @@ -6,12 +6,13 @@ event_data = { 'stdout': u'\x1b[0;36mskipping: [host1]\x1b[0m\r\n', u'uuid': u'ffe4858c-ac38-4cab-9192-b07bdbe80502', u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 376051), 'counter': 17, u'job_id': 209, u'event': u'runner_on_skipped', 'parent_id': 1937, 'end_line': 24, 'start_line': 23, - u'event_data': {u'play_pattern': u'all', u'play': u'all', u'task': u'Scan files (Windows)', - u'task_args': u'paths={{ scan_file_paths }}, recursive={{ scan_use_recursive }}, get_checksum={{ scan_use_checksum }}', - u'remote_addr': u'host1', u'pid': 1427, u'play_uuid': u'da784361-3811-4ea7-9cc8-46ec758fde66', - u'task_uuid': u'4f9525fd-bc25-4ace-9eb2-adad9fa21a94', u'event_loop': None, - u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e', u'playbook': u'scan_facts.yml', - u'task_action': u'win_scan_files', u'host': u'host1', u'task_path': None}} + u'event_data': { + u'play_pattern': u'all', u'play': u'all', u'task': u'Scan files (Windows)', + u'task_args': u'paths={{ scan_file_paths }}, recursive={{ scan_use_recursive }}, get_checksum={{ scan_use_checksum }}', + u'remote_addr': u'host1', u'pid': 1427, u'play_uuid': u'da784361-3811-4ea7-9cc8-46ec758fde66', + u'task_uuid': u'4f9525fd-bc25-4ace-9eb2-adad9fa21a94', u'event_loop': None, + u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e', u'playbook': u'scan_facts.yml', + u'task_action': u'win_scan_files', u'host': u'host1', u'task_path': None}} event_stats = { 'stdout': u'asdf', u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 385416), diff --git a/awx/main/utils.py b/awx/main/utils.py index 13ece206a9..00937a84c1 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -17,7 +17,6 @@ import urlparse import threading import contextlib import tempfile -from copy import copy # Decorator from decorator import decorator @@ -825,64 +824,3 @@ class OutputEventFilter(object): self._current_event_data = next_event_data else: self._current_event_data = None - -def format_for_log(raw_data, kind=None, **kwargs): - ''' - Process dictionaries from various contexts (job events, activity stream - changes, etc.) to give meaningful information - Output a dictionary which will be passed in logstash or syslog format - to the logging receiver - ''' - rename_fields = set(( - 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', - 'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module', - 'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process', - 'processName', 'relativeCreated', 'thread', 'threadName', 'extra', - 'auth_token', 'tags', 'host', 'host_id', 'level', 'port', 'uuid')) - data = copy(raw_data) - if isinstance(data, basestring): - data = json.loads(data) - data.update(kwargs) - skip_fields = ('res', 'password', 'event_data', 'stdout') - data_for_log = {} - - def index_by_name(alist): - """Takes a list of dictionaries with `name` as a key in each dict - and returns a dictionary indexed by those names""" - adict = {} - for item in alist: - subdict = copy(item) - name = subdict.pop('name', None) - if name: - # Logstash v2 can not accept '.' in a name - name = name.replace('.', '_') - adict[name] = subdict - return adict - - if kind == 'event': - data.update(data.get('event_data', {})) - for fd in data: - if fd in skip_fields: - continue - key = fd - if fd in rename_fields: - key = 'event_%s' % fd - if type(data[fd]) is dict: - data_for_log[key] = len(data[fd]) - else: - data_for_log[key] = data[fd] - elif kind == 'fact': - if 'services' in data: - data_for_log['services'] = index_by_name(data['services']) - elif 'packages' in data: - data_for_log['packages'] = index_by_name(data['packages']) - elif 'files' in data: - data_for_log['files'] = index_by_name(data['files']) - elif 'ansible' in data: - data_for_log['ansible'] = data['ansible'] - # Remove sub-keys with data type conflicts in elastic search - data_for_log['ansible'].pop('ansible_python_version', None) - data_for_log['ansible']['ansible_python'].pop('version_info', None) - else: - data_for_log['facts'] = data - return data_for_log diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 89afcc0f26..1be3f2ca89 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -842,19 +842,9 @@ LOGGING = { 'simple': { 'format': '%(asctime)s %(levelname)-8s %(name)s %(message)s', }, - 'logstash': { - '()': 'awx.main.log_utils.formatters.LogstashFormatter' - }, 'json': { '()': 'awx.main.log_utils.formatters.LogstashFormatter' } - # From loggly examples - # 'json': { - # 'format': '{ "loggerName":"%(name)s", "asciTime":"%(asctime)s", "fileName":"%(filename)s", "logRecordCreationTime":"%(created)f", "functionName":"%(funcName)s", "levelNo":"%(levelno)s", "lineNo":"%(lineno)d", "time":"%(msecs)d", "levelName":"%(levelname)s", "message":"%(message)s"}', - # }, - # 'json': { - # 'format': '{"message": %(message)s}', - # }, }, 'handlers': { 'console': { @@ -882,20 +872,6 @@ LOGGING = { 'formatter': 'json', 'host': '', }, - 'logstash': { - 'level': 'INFO', - 'class': 'awx.main.log_utils.handlers.HTTPSHandler', - 'host': 'logstash', # IP/name of our Logstash EC2 instance - 'port': 8085, - # 'port': 5000, - # 'version': 1, - 'message_type': 'logstash', - 'fqdn': True, - # 'tags': ['tower'], - 'formatter': 'json', - 'username': 'awx_logger', - 'password': 'workflows', - }, 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], From db21178b14b1c40bd8efe2f20dcf16b1eaf04fbf Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Fri, 2 Dec 2016 10:21:36 -0500 Subject: [PATCH 10/14] remove unused log handlers and cleanup --- awx/main/log_utils/handlers.py | 196 +++++---------------------------- 1 file changed, 30 insertions(+), 166 deletions(-) diff --git a/awx/main/log_utils/handlers.py b/awx/main/log_utils/handlers.py index 55027dc7d9..23f23c4d5e 100644 --- a/awx/main/log_utils/handlers.py +++ b/awx/main/log_utils/handlers.py @@ -1,60 +1,24 @@ # Copyright (c) 2017 Ansible Tower by Red Hat # All Rights Reserved. -# common -import socket +# Python import logging - -# Splunk -import urllib import json -import gzip -import cStringIO - import requests - -from .utils import parse_config_file, get_config_from_env +from copy import copy # loggly import traceback from requests_futures.sessions import FuturesSession -# Logstash -from logstash import formatter - # custom -from copy import copy -from requests.auth import HTTPBasicAuth from django.conf import settings as django_settings - -# Logstash -# https://github.com/vklochan/python-logstash -class TCPLogstashHandler(logging.handlers.SocketHandler, object): - """Python logging handler for Logstash. Sends events over TCP. - :param host: The host of the logstash server. - :param port: The port of the logstash server (default 5959). - :param message_type: The type of the message (default logstash). - :param fqdn; Indicates whether to show fully qualified domain name or not (default False). - :param version: version of logstash event schema (default is 0). - :param tags: list of tags for a logger (default is None). - """ - - def __init__(self, host, port=5959, message_type='logstash', tags=None, fqdn=False, version=0): - super(TCPLogstashHandler, self).__init__(host, port) - if version == 1: - self.formatter = formatter.LogstashFormatterVersion1(message_type, tags, fqdn) - else: - self.formatter = formatter.LogstashFormatterVersion0(message_type, tags, fqdn) - - def makePickle(self, record): - return self.formatter.format(record) + b'\n' - - -# techniquest borrowed from the loggly library -# https://github.com/varshneyjayant/loggly-python-handler -# MIT License +# AWX external logging handler, generally designed to be used +# with the accompanying LogstashHandler, derives from python-logstash library +# Non-blocking request accomplished by FuturesSession, similar +# to the loggly-python-handler library (not used) # Translation of parameter names to names in Django settings PARAM_NAMES = { @@ -66,7 +30,6 @@ PARAM_NAMES = { 'enabled_loggers': 'LOG_AGGREGATOR_LOGGERS', 'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS', } -# TODO: figure out what to do with LOG_AGGREGATOR_LOGGERS (if anything) def unused_callback(sess, resp): @@ -101,7 +64,7 @@ class HTTPSHandler(logging.Handler): if not self.username: # Logstash authentication not enabled return - logstash_auth = HTTPBasicAuth(self.username, self.password) + logstash_auth = requests.auth.HTTPBasicAuth(self.username, self.password) self.session.auth = logstash_auth elif self.message_type == 'splunk': auth_header = "Splunk %s" % self.password @@ -111,17 +74,33 @@ class HTTPSHandler(logging.Handler): } self.session.headers.update(headers) + def get_http_host(self): + host = self.host + if not host.startswith('http'): + host = 'http://%s' % self.host + if self.port != 80 and self.port is not None: + host = '%s:%s' % (host, str(self.port)) + return host + + def get_post_kwargs(self, payload_input): + if self.message_type == 'splunk': + # Splunk needs data nested under key "event" + if not isinstance(payload_input, dict): + payload_input = json.loads(payload_input) + payload_input = {'event': payload_input} + if isinstance(payload_input, dict): + payload_str = json.dumps(payload_input) + else: + payload_str = payload_input + return dict(data=payload_str, background_callback=unused_callback) + def emit(self, record): if (self.host == '' or self.enabled_loggers is None or record.name.split('.')[-1] not in self.enabled_loggers): return try: payload = self.format(record) - host = self.host - if not host.startswith('http'): - host = 'http://%s' % self.host - if self.port != 80 and self.port is not None: - host = '%s:%s' % (host, str(self.port)) + host = self.get_http_host() # Special action for System Tracking, queue up multiple log messages if self.indv_facts: @@ -133,127 +112,12 @@ class HTTPSHandler(logging.Handler): for key in facts_dict: fact_payload = copy(payload_data) fact_payload.update(facts_dict[key]) - kwargs = dict(data=json.dumps(fact_payload), background_callback=unused_callback) - self.session.post(host, **kwargs) + self.session.post(host, **self.get_post_kwargs(fact_payload)) return - kwargs = dict(data=payload, background_callback=unused_callback) - # # splunk doesn't take "@" in the keys - # if self.message_type == 'splunk': - # payload_dict = json.loads(payload) - # new_dict = {} - # for key in payload_dict: - # new_key = key.replace('@', '') - # new_dict[new_key] = payload_dict[key] - # new_payload = json.dumps(new_dict) - # kwargs['data'] = json.dumps(new_dict) - self.session.post(host, **kwargs) + self.session.post(host, **self.get_post_kwargs(payload)) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) - -# splunk -# https://github.com/andresriancho/splunk-logger - -class SplunkLogger(logging.Handler): - """ - A class to send messages to splunk storm using their API - """ - # Required format for splunk storm - INPUT_URL_FMT = 'https://%s/1/inputs/http' - - def __init__(self, access_token=None, project_id=None, api_domain=None): - logging.Handler.__init__(self) - - self._set_auth(access_token, project_id, api_domain) - self.url = self.INPUT_URL_FMT % self.api_domain - - self._set_url_opener() - - # Handle errors in authentication - self._auth_failed = False - - def _set_auth(self, access_token, project_id, api_domain): - # The access token and project id passed as parameter override the ones - # configured in the .splunk_logger file. - if access_token is not None and project_id is not None and api_domain is not None: - self.project_id = project_id - self.access_token = access_token - self.api_domain = api_domain - - else: - # Try to get the credentials form the configuration file - self.project_id, self.access_token, self.api_domain = parse_config_file() - - if self.project_id is None or self.access_token is None or self.api_domain is None: - # Try to get the credentials form the environment variables - self.project_id, self.access_token, self.api_domain = get_config_from_env() - - if self.access_token is None or self.project_id is None: - raise ValueError('Access token, project id and API endpoint domain' - ' need to be set.') - - def _set_url_opener(self): - # We disable the logging of the requests module to avoid some infinite - # recursion errors that might appear. - requests_log = logging.getLogger("requests") - requests_log.setLevel(logging.CRITICAL) - - self.session = requests.Session() - self.session.auth = ('x', self.access_token) - self.session.headers.update({'Content-Encoding': 'gzip'}) - - def usesTime(self): - return False - - def _compress(self, input_str): - """ - Compress the log message in order to send less bytes to the wire. - """ - compressed_bits = cStringIO.StringIO() - - f = gzip.GzipFile(fileobj=compressed_bits, mode='wb') - f.write(input_str) - f.close() - - return compressed_bits.getvalue() - - def emit(self, record): - - if self._auth_failed: - # Don't send anything else once a 401 was returned - return - - try: - response = self._send_to_splunk(record) - except (KeyboardInterrupt, SystemExit): - raise - except: - # All errors end here. - self.handleError(record) - else: - if response.status_code == 401: - self._auth_failed = True - - def _send_to_splunk(self, record): - # http://docs.splunk.com/Documentation/Storm/latest/User/Sourcesandsourcetypes - sourcetype = 'json_no_timestamp' - - host = socket.gethostname() - - event_dict = {'data': self.format(record), - 'level': record.levelname, - 'module': record.module, - 'line': record.lineno} - event = json.dumps(event_dict) - event = self._compress(event) - - params = {'index': self.project_id, - 'sourcetype': sourcetype, - 'host': host} - - url = '%s?%s' % (self.url, urllib.urlencode(params)) - return self.session.post(url, data=event) - From 7848198b9f02dc3754e77ea5b5124682f499d21d Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Fri, 2 Dec 2016 14:36:04 -0500 Subject: [PATCH 11/14] New awx.main.utils directory, distributed task to invalidate settings --- Makefile | 2 +- awx/conf/signals.py | 16 +++--- awx/main/log_utils/__init__.py | 2 - awx/main/log_utils/utils.py | 57 --------------------- awx/main/tasks.py | 13 +++++ awx/main/utils/__init__.py | 25 +++++++++ awx/main/{utils.py => utils/common.py} | 0 awx/main/{log_utils => utils}/formatters.py | 0 awx/main/{log_utils => utils}/handlers.py | 0 awx/settings/defaults.py | 6 ++- 10 files changed, 50 insertions(+), 71 deletions(-) delete mode 100644 awx/main/log_utils/__init__.py delete mode 100644 awx/main/log_utils/utils.py create mode 100644 awx/main/utils/__init__.py rename awx/main/{utils.py => utils/common.py} (100%) rename awx/main/{log_utils => utils}/formatters.py (100%) rename awx/main/{log_utils => utils}/handlers.py (100%) diff --git a/Makefile b/Makefile index 669b5eeeca..63b5ac259e 100644 --- a/Makefile +++ b/Makefile @@ -433,7 +433,7 @@ celeryd: @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/tower/bin/activate; \ fi; \ - $(PYTHON) manage.py celeryd -l DEBUG -B --autoreload --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,$(COMPOSE_HOST) + $(PYTHON) manage.py celeryd -l DEBUG -B --autoreload --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,broadcast_all,$(COMPOSE_HOST) #$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE) # Run to start the zeromq callback receiver diff --git a/awx/conf/signals.py b/awx/conf/signals.py index f2ae1baf16..a2a30b09c1 100644 --- a/awx/conf/signals.py +++ b/awx/conf/signals.py @@ -3,17 +3,16 @@ import logging # Django from django.conf import settings -from django.core.cache import cache from django.core.signals import setting_changed from django.db.models.signals import post_save, pre_delete, post_delete from django.dispatch import receiver -from django.utils.log import configure_logging # Tower import awx.main.signals from awx.conf import settings_registry from awx.conf.models import Setting from awx.conf.serializers import SettingSerializer +from awx.main.tasks import clear_cache_keys logger = logging.getLogger('awx.conf.signals') @@ -26,12 +25,16 @@ def handle_setting_change(key, for_delete=False): # When a setting changes or is deleted, remove its value from cache along # with any other settings that depend on it. setting_keys = [key] + setting_key_dict = {} + setting_key_dict[key] = key for dependent_key in settings_registry.get_dependent_settings(key): # Note: Doesn't handle multiple levels of dependencies! setting_keys.append(dependent_key) + setting_key_dict[dependent_key] = dependent_key cache_keys = set([Setting.get_cache_key(k) for k in setting_keys]) - logger.debug('cache delete_many(%r)', cache_keys) - cache.delete_many(cache_keys) + logger.debug('sending signals to delete cache keys(%r)', cache_keys) + # cache.delete_many(cache_keys) + clear_cache_keys.delay(setting_key_dict) # Send setting_changed signal with new value for each setting. for setting_key in setting_keys: @@ -41,11 +44,6 @@ def handle_setting_change(key, for_delete=False): value=getattr(settings, setting_key, None), enter=not bool(for_delete), ) - # TODO: Move logic to task to run on all cluster nodes - if setting_key.startswith('LOG_AGGREGATOR_'): - configure_logging(settings.LOGGING_CONFIG, settings.LOGGING) - # settings.LOGGING_CONFIG = None - # logging.config.dictConfig(settings.LOGGING) @receiver(post_save, sender=Setting) diff --git a/awx/main/log_utils/__init__.py b/awx/main/log_utils/__init__.py deleted file mode 100644 index 6c3f484790..0000000000 --- a/awx/main/log_utils/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) 2017 Ansible by Red Hat -# All Rights Reserved. diff --git a/awx/main/log_utils/utils.py b/awx/main/log_utils/utils.py deleted file mode 100644 index a57515888f..0000000000 --- a/awx/main/log_utils/utils.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -import yaml - - -def parse_config_file(): - """ - Find the .splunk_logger config file in the current directory, or in the - user's home and parse it. The one in the current directory has precedence. - - :return: A tuple with: - - project_id - - access_token - """ - for filename in ('.splunk_logger', os.path.expanduser('~/.splunk_logger')): - - project_id, access_token, api_domain = _parse_config_file_impl(filename) - - if project_id is not None \ - and access_token is not None \ - and api_domain is not None: - return project_id, access_token, api_domain - - else: - return None, None, None - - -def _parse_config_file_impl(filename): - """ - Format for the file is: - - credentials: - project_id: ... - access_token: ... - api_domain: ... - - :param filename: The filename to parse - :return: A tuple with: - - project_id - - access_token - - api_domain - """ - try: - doc = yaml.load(file(filename).read()) - - project_id = doc["credentials"]["project_id"] - access_token = doc["credentials"]["access_token"] - api_domain = doc["credentials"]["api_domain"] - - return project_id, access_token, api_domain - except: - return None, None, None - - -def get_config_from_env(): - return (os.environ.get('SPLUNK_PROJECT_ID', None), - os.environ.get('SPLUNK_ACCESS_TOKEN', None), - os.environ.get('SPLUNK_API_DOMAIN', None)) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 0c8aa8acce..4f6e647b0c 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -42,6 +42,8 @@ from django.utils.encoding import smart_str from django.core.mail import send_mail from django.contrib.auth.models import User from django.utils.translation import ugettext_lazy as _ +from django.core.cache import cache +from django.utils.log import configure_logging # AWX from awx.main.constants import CLOUD_PROVIDERS @@ -83,6 +85,17 @@ def celery_startup(conf=None, **kwargs): logger.error("Failed to rebuild schedule {}: {}".format(sch, e)) +@task(queue='broadcast_all') +def clear_cache_keys(cache_keys): + set_of_keys = set([key for key in cache_keys]) + logger.debug('cache delete_many(%r)', set_of_keys) + cache.delete_many(set_of_keys) + for setting_key in set_of_keys: + if setting_key.startswith('LOG_AGGREGATOR_'): + configure_logging(settings.LOGGING_CONFIG, settings.LOGGING) + break + + @task(queue='default') def send_notifications(notification_list, job_id=None): if not isinstance(notification_list, list): diff --git a/awx/main/utils/__init__.py b/awx/main/utils/__init__.py new file mode 100644 index 0000000000..45b0a0131a --- /dev/null +++ b/awx/main/utils/__init__.py @@ -0,0 +1,25 @@ +# Copyright (c) 2017 Ansible Tower by Red Hat +# All Rights Reserved. + +# AWX +from awx.main.utils.common import * # noqa + +# Fields that didn't get included in __all__ +# TODO: after initial commit of file move to devel, these can be added +# to common.py __all__ and removed here +from awx.main.utils.common import ( # noqa + RequireDebugTrueOrTest, + encrypt_field, + parse_yaml_or_json, + decrypt_field, + build_url, + timestamp_apiformat, + model_instance_diff, + model_to_dict, + check_proot_installed, + build_proot_temp_dir, + wrap_args_with_proot, + get_system_task_capacity, + decrypt_field_value +) + diff --git a/awx/main/utils.py b/awx/main/utils/common.py similarity index 100% rename from awx/main/utils.py rename to awx/main/utils/common.py diff --git a/awx/main/log_utils/formatters.py b/awx/main/utils/formatters.py similarity index 100% rename from awx/main/log_utils/formatters.py rename to awx/main/utils/formatters.py diff --git a/awx/main/log_utils/handlers.py b/awx/main/utils/handlers.py similarity index 100% rename from awx/main/log_utils/handlers.py rename to awx/main/utils/handlers.py diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 1be3f2ca89..1174de726e 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -9,6 +9,7 @@ import djcelery from datetime import timedelta from kombu import Queue, Exchange +from kombu.common import Broadcast # global settings from django.conf import global_settings @@ -373,6 +374,7 @@ CELERY_QUEUES = ( Queue('default', Exchange('default'), routing_key='default'), Queue('jobs', Exchange('jobs'), routing_key='jobs'), Queue('scheduler', Exchange('scheduler', type='topic'), routing_key='scheduler.job.#', durable=False), + Broadcast('broadcast_all') # Projects use a fanout queue, this isn't super well supported ) CELERY_ROUTES = {'awx.main.tasks.run_job': {'queue': 'jobs', @@ -843,7 +845,7 @@ LOGGING = { 'format': '%(asctime)s %(levelname)-8s %(name)s %(message)s', }, 'json': { - '()': 'awx.main.log_utils.formatters.LogstashFormatter' + '()': 'awx.main.utils.formatters.LogstashFormatter' } }, 'handlers': { @@ -867,7 +869,7 @@ LOGGING = { 'formatter': 'simple', }, 'http_receiver': { - 'class': 'awx.main.log_utils.handlers.HTTPSHandler', + 'class': 'awx.main.utils.handlers.HTTPSHandler', 'level': 'INFO', 'formatter': 'json', 'host': '', From ebff4f4f24b359ccd92fa0f928b0d9f2c1e543a3 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Fri, 2 Dec 2016 15:16:00 -0500 Subject: [PATCH 12/14] get rid of out of date logger unit tests --- awx/main/tests/unit/test_log_formatter.py | 32 ----------------------- 1 file changed, 32 deletions(-) delete mode 100644 awx/main/tests/unit/test_log_formatter.py diff --git a/awx/main/tests/unit/test_log_formatter.py b/awx/main/tests/unit/test_log_formatter.py deleted file mode 100644 index 1d3b46328b..0000000000 --- a/awx/main/tests/unit/test_log_formatter.py +++ /dev/null @@ -1,32 +0,0 @@ -from awx.main.utils import format_for_log -import datetime - -# Example data -event_data = { - 'stdout': u'\x1b[0;36mskipping: [host1]\x1b[0m\r\n', u'uuid': u'ffe4858c-ac38-4cab-9192-b07bdbe80502', - u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 376051), 'counter': 17, u'job_id': 209, - u'event': u'runner_on_skipped', 'parent_id': 1937, 'end_line': 24, 'start_line': 23, - u'event_data': { - u'play_pattern': u'all', u'play': u'all', u'task': u'Scan files (Windows)', - u'task_args': u'paths={{ scan_file_paths }}, recursive={{ scan_use_recursive }}, get_checksum={{ scan_use_checksum }}', - u'remote_addr': u'host1', u'pid': 1427, u'play_uuid': u'da784361-3811-4ea7-9cc8-46ec758fde66', - u'task_uuid': u'4f9525fd-bc25-4ace-9eb2-adad9fa21a94', u'event_loop': None, - u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e', u'playbook': u'scan_facts.yml', - u'task_action': u'win_scan_files', u'host': u'host1', u'task_path': None}} - -event_stats = { - 'stdout': u'asdf', u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 385416), - 'counter': 18, u'job_id': 209, u'event': u'playbook_on_stats', 'parent_id': 1923, - 'end_line': 28, 'start_line': 24, u'event_data': { - u'skipped': {u'host1': 4}, u'ok': {u'host2': 3}, u'changed': {}, - u'pid': 1427, u'dark': {}, u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e', - u'playbook': u'scan_facts.yml', u'failures': {}, u'processed': {u'duck': 1} - } -} - - - -def test_format_event(): - log_data = format_for_log(event_data, kind='event') - assert log_data['event_host'] == 'host1' - From 064a40ba92b2fae8f42f4204fbe9aaa956670d4a Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Fri, 2 Dec 2016 16:45:27 -0500 Subject: [PATCH 13/14] Manage null default state for logging integration and DB definition --- awx/conf/apps.py | 5 ++++- awx/main/conf.py | 9 +++++++++ awx/main/management/commands/run_socketio_service.py | 2 +- awx/main/tasks.py | 7 ++++++- awx/main/utils/handlers.py | 8 ++++++++ awx/settings/defaults.py | 4 +++- 6 files changed, 31 insertions(+), 4 deletions(-) diff --git a/awx/conf/apps.py b/awx/conf/apps.py index 8f9980dde9..62ad0085df 100644 --- a/awx/conf/apps.py +++ b/awx/conf/apps.py @@ -15,5 +15,8 @@ class ConfConfig(AppConfig): self.module.autodiscover() from .settings import SettingsWrapper SettingsWrapper.initialize() - configure_logging(settings.LOGGING_CONFIG, settings.LOGGING) + if settings.LOG_AGGREGATOR_ENABLED: + LOGGING = settings.LOGGING + LOGGING['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler' + configure_logging(settings.LOGGING_CONFIG, LOGGING) # checks.register(SettingsWrapper._check_settings) diff --git a/awx/main/conf.py b/awx/main/conf.py index 527f262334..1a4d7cb0ea 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -296,3 +296,12 @@ register( category=_('Logging'), category_slug='logging', ) +register( + 'LOG_AGGREGATOR_ENABLED', + field_class=fields.BooleanField, + default=False, + label=_('Flag denoting whether to use the external logger system'), + help_text=_('If not set, only normal settings data will be used to configure loggers.'), + category=_('Logging'), + category_slug='logging', +) diff --git a/awx/main/management/commands/run_socketio_service.py b/awx/main/management/commands/run_socketio_service.py index 6a6404f2b4..9b7e5a61d2 100644 --- a/awx/main/management/commands/run_socketio_service.py +++ b/awx/main/management/commands/run_socketio_service.py @@ -23,7 +23,7 @@ from socketio import socketio_manage from socketio.server import SocketIOServer from socketio.namespace import BaseNamespace -logger = logging.getLogger('awx.main.consumers') +logger = logging.getLogger('awx.main.commands.run_socketio_service') class SocketSession(object): diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 4f6e647b0c..71d65df772 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -92,7 +92,12 @@ def clear_cache_keys(cache_keys): cache.delete_many(set_of_keys) for setting_key in set_of_keys: if setting_key.startswith('LOG_AGGREGATOR_'): - configure_logging(settings.LOGGING_CONFIG, settings.LOGGING) + LOGGING = settings.LOGGING + if settings.LOG_AGGREGATOR_ENABLED: + LOGGING['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler' + else: + LOGGING['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSNullHandler' + configure_logging(settings.LOGGING_CONFIG, LOGGING) break diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index 23f23c4d5e..28b7c26af7 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -14,6 +14,7 @@ from requests_futures.sessions import FuturesSession # custom from django.conf import settings as django_settings +from django.utils.log import NullHandler # AWX external logging handler, generally designed to be used # with the accompanying LogstashHandler, derives from python-logstash library @@ -36,6 +37,13 @@ def unused_callback(sess, resp): pass +class HTTPSNullHandler(NullHandler): + "Placeholder null handler to allow loading without database access" + + def __init__(self, host, **kwargs): + return super(HTTPSNullHandler, self).__init__() + + class HTTPSHandler(logging.Handler): def __init__(self, fqdn=False, **kwargs): super(HTTPSHandler, self).__init__() diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 1174de726e..ac193c1507 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -825,6 +825,8 @@ TOWER_URL_BASE = "https://towerhost" TOWER_SETTINGS_MANIFEST = {} +LOG_AGGREGATOR_ENABLED = False + # Logging configuration. LOGGING = { 'version': 1, @@ -869,7 +871,7 @@ LOGGING = { 'formatter': 'simple', }, 'http_receiver': { - 'class': 'awx.main.utils.handlers.HTTPSHandler', + 'class': 'awx.main.utils.handlers.HTTPSNullHandler', 'level': 'INFO', 'formatter': 'json', 'host': '', From b17ca58fdd18127f58926e95e159cb0bc13593e7 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Fri, 2 Dec 2016 16:46:02 -0500 Subject: [PATCH 14/14] Trigger local cache flush before broadcasting on settings change --- awx/conf/signals.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/awx/conf/signals.py b/awx/conf/signals.py index a2a30b09c1..8ef0005b1f 100644 --- a/awx/conf/signals.py +++ b/awx/conf/signals.py @@ -5,6 +5,7 @@ import logging from django.conf import settings from django.core.signals import setting_changed from django.db.models.signals import post_save, pre_delete, post_delete +from django.core.cache import cache from django.dispatch import receiver # Tower @@ -33,7 +34,7 @@ def handle_setting_change(key, for_delete=False): setting_key_dict[dependent_key] = dependent_key cache_keys = set([Setting.get_cache_key(k) for k in setting_keys]) logger.debug('sending signals to delete cache keys(%r)', cache_keys) - # cache.delete_many(cache_keys) + cache.delete_many(cache_keys) clear_cache_keys.delay(setting_key_dict) # Send setting_changed signal with new value for each setting.