Logging Integration, ELK docker-compose as update file

This commit is contained in:
AlanCoding
2016-10-17 15:00:38 -04:00
parent dc032489c2
commit f3427d1359
28 changed files with 839 additions and 25 deletions

View File

@@ -259,14 +259,16 @@ class ApiV1ConfigView(APIView):
try:
data_actual = json.dumps(request.data)
except Exception:
# FIX: Log
logger.info(smart_text(u"Invalid JSON submitted for Tower license."),
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
try:
from awx.main.task_engine import TaskEnhancer
license_data = json.loads(data_actual)
license_data_validated = TaskEnhancer(**license_data).validate_enhancements()
except Exception:
# FIX: Log
logger.warning(smart_text(u"Invalid Tower license submitted."),
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
# If the license is valid, write it to the database.
@@ -275,6 +277,8 @@ class ApiV1ConfigView(APIView):
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
return Response(license_data_validated)
logger.warning(smart_text(u"Invalid Tower license submitted."),
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
@@ -541,12 +545,14 @@ class AuthTokenView(APIView):
reason='')[0]
token.refresh()
if 'username' in request.data:
logger.info(smart_text(u"User {} logged in".format(request.data['username'])))
logger.info(smart_text(u"User {} logged in".format(request.data['username'])),
extra=dict(actor=request.data['username']))
except IndexError:
token = AuthToken.objects.create(user=serializer.validated_data['user'],
request_hash=request_hash)
if 'username' in request.data:
logger.info(smart_text(u"User {} logged in".format(request.data['username'])))
logger.info(smart_text(u"User {} logged in".format(request.data['username'])),
extra=dict(actor=request.data['username']))
# Get user un-expired tokens that are not invalidated that are
# over the configured limit.
# Mark them as invalid and inform the user
@@ -564,7 +570,8 @@ class AuthTokenView(APIView):
}
return Response({'token': token.key, 'expires': token.expires}, headers=headers)
if 'username' in request.data:
logger.warning(smart_text(u"Login failed for user {}".format(request.data['username'])))
logger.warning(smart_text(u"Login failed for user {}".format(request.data['username'])),
user=dict(actor=request.data['username']))
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -223,3 +223,36 @@ register(
category=_('Jobs'),
category_slug='jobs',
)
register(
'LOG_AGGREGATOR_HOST',
field_class=fields.CharField,
label=_('Logging Aggregator Receiving Host'),
help_text=_('External host maintain a log collector to send logs to'),
category=_('Logging'),
category_slug='logging',
)
register(
'LOG_AGGREGATOR_PORT',
field_class=fields.CharField,
label=_('Logging Aggregator Receiving Port'),
help_text=_('Port that the log collector is listening on'),
category=_('Logging'),
category_slug='logging',
)
register(
'LOG_AGGREGATOR_TYPE',
field_class=fields.CharField,
label=_('Logging Aggregator Type: Logstash, Loggly, Datadog, etc'),
help_text=_('The type of log aggregator service to format messages for'),
category=_('Logging'),
category_slug='logging',
)
register(
'LOG_AGGREGATOR_USERNAME',
field_class=fields.CharField,
label=_('Logging Aggregator Receiver Username'),
help_text=_('Username for Logstash or others'),
category=_('Logging'),
category_slug='logging',
)

View File

@@ -1,5 +1,6 @@
import json
import urlparse
import logging
from channels import Group
from channels.sessions import channel_session
@@ -8,6 +9,8 @@ from django.contrib.auth.models import User
from awx.main.models.organization import AuthToken
logger = logging.getLogger('awx.main.consumers')
def discard_groups(message):
if 'groups' in message.channel_session:
for group in message.channel_session['groups']:
@@ -52,11 +55,13 @@ def ws_receive(message):
auth_token = validate_token(token)
if auth_token is None:
logger.error("Authentication Failure validating user")
message.reply_channel.send({"text": json.dumps({"error": "invalid auth token"})})
return None
user = user_from_token(auth_token)
if user is None:
logger.error("No valid user corresponding to submitted auth_token")
message.reply_channel.send({"text": json.dumps({"error": "no valid user"})})
return None

View File

@@ -0,0 +1,2 @@
# Copyright (c) 2017 Ansible by Red Hat
# All Rights Reserved.

View File

@@ -0,0 +1,59 @@
# Copyright (c) 2017 Ansible Tower by Red Hat
# All Rights Reserved.
from logstash.formatter import LogstashFormatterVersion1
from django.conf import settings
# # Loggly example
# 'json': {
# 'format': '{
# "loggerName":"%(name)s",
# "asciTime":"%(asctime)s",
# "fileName":"%(filename)s",
# "logRecordCreationTime":"%(created)f",
# "functionName":"%(funcName)s",
# "levelNo":"%(levelno)s",
# "lineNo":"%(lineno)d",
# "time":"%(msecs)d",
# "levelName":"%(levelname)s",
# "message":"%(message)s"}',
# },
class LogstashFormatter(LogstashFormatterVersion1):
def __init__(self, **kwargs):
ret = super(LogstashFormatter, self).__init__(**kwargs)
self.host_id = settings.CLUSTER_HOST_ID
return ret
def get_extra_fields(self, record):
fields = super(LogstashFormatter, self).get_extra_fields(record)
fields['cluster_host_id'] = self.host_id
return fields
def format(self, record):
# Create message dict
# message = record.getMessage()
# print ' message ' + str(message)
message = {
'@timestamp': self.format_timestamp(record.created),
'@version': '1',
'message': record.getMessage(),
'host': self.host,
'path': record.pathname,
'tags': self.tags,
'type': self.message_type,
# Extra Fields
'level': record.levelname,
'logger_name': record.name,
}
# Add extra fields
message.update(self.get_extra_fields(record))
# If exception, add debug info
if record.exc_info:
message.update(self.get_debug_fields(record))
return self.serialize(message)

View File

@@ -0,0 +1,210 @@
# Copyright (c) 2017 Ansible Tower by Red Hat
# All Rights Reserved.
# common
import socket
import logging
# Splunk
import urllib
import json
import gzip
import cStringIO
import requests
from .utils import parse_config_file, get_config_from_env
# loggly
import traceback
from requests_futures.sessions import FuturesSession
# Logstash
from logstash import formatter
ENABLED_LOGS = ['ansible']
# Logstash
# https://github.com/vklochan/python-logstash
class TCPLogstashHandler(logging.handlers.SocketHandler, object):
"""Python logging handler for Logstash. Sends events over TCP.
:param host: The host of the logstash server.
:param port: The port of the logstash server (default 5959).
:param message_type: The type of the message (default logstash).
:param fqdn; Indicates whether to show fully qualified domain name or not (default False).
:param version: version of logstash event schema (default is 0).
:param tags: list of tags for a logger (default is None).
"""
def __init__(self, host, port=5959, message_type='logstash', tags=None, fqdn=False, version=0):
super(TCPLogstashHandler, self).__init__(host, port)
if version == 1:
self.formatter = formatter.LogstashFormatterVersion1(message_type, tags, fqdn)
else:
self.formatter = formatter.LogstashFormatterVersion0(message_type, tags, fqdn)
def makePickle(self, record):
return self.formatter.format(record) + b'\n'
# loggly
# https://github.com/varshneyjayant/loggly-python-handler
session = FuturesSession()
def bg_cb(sess, resp):
""" Don't do anything with the response """
pass
# add port for a generic handler
class HTTPSHandler(logging.Handler):
def __init__(self, host, port=80, message_type='logstash', fqdn=False):
super(HTTPSHandler, self).__init__()
self.host_saved = host
self.port = port
# self.port = port
self.message_type = message_type
self.fqdn = fqdn
def get_full_message(self, record):
if record.exc_info:
return '\n'.join(traceback.format_exception(*record.exc_info))
else:
return record.getMessage()
def emit(self, record):
try:
payload = self.format(record)
# TODO: move this enablement logic to rely on individual loggers once
# the enablement config variable is hooked up
payload_data = json.loads(payload)
if payload_data['logger_name'].startswith('awx.analytics.system_tracking'):
st_type = None
for fd in ['services', 'packages', 'files', 'ansible']:
if fd in payload_data:
st_type = fd
break
if st_type not in ENABLED_LOGS:
return
host = self.host_saved
if not host.startswith('http'):
host = 'http://%s' % self.host_saved
if self.port != 80:
host = '%s:%s' % (host, str(self.port))
session.post(host, data=payload, background_callback=bg_cb)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
# splunk
# https://github.com/andresriancho/splunk-logger
class SplunkLogger(logging.Handler):
"""
A class to send messages to splunk storm using their API
"""
# Required format for splunk storm
INPUT_URL_FMT = 'https://%s/1/inputs/http'
def __init__(self, access_token=None, project_id=None, api_domain=None):
logging.Handler.__init__(self)
self._set_auth(access_token, project_id, api_domain)
self.url = self.INPUT_URL_FMT % self.api_domain
self._set_url_opener()
# Handle errors in authentication
self._auth_failed = False
def _set_auth(self, access_token, project_id, api_domain):
# The access token and project id passed as parameter override the ones
# configured in the .splunk_logger file.
if access_token is not None\
and project_id is not None\
and api_domain is not None:
self.project_id = project_id
self.access_token = access_token
self.api_domain = api_domain
else:
# Try to get the credentials form the configuration file
self.project_id, self.access_token, self.api_domain = parse_config_file()
if self.project_id is None\
or self.access_token is None\
or self.api_domain is None:
# Try to get the credentials form the environment variables
self.project_id, self.access_token, self.api_domain = get_config_from_env()
if self.access_token is None or self.project_id is None:
raise ValueError('Access token, project id and API endpoint domain'
' need to be set.')
def _set_url_opener(self):
# We disable the logging of the requests module to avoid some infinite
# recursion errors that might appear.
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.CRITICAL)
self.session = requests.Session()
self.session.auth = ('x', self.access_token)
self.session.headers.update({'Content-Encoding': 'gzip'})
def usesTime(self):
return False
def _compress(self, input_str):
"""
Compress the log message in order to send less bytes to the wire.
"""
compressed_bits = cStringIO.StringIO()
f = gzip.GzipFile(fileobj=compressed_bits, mode='wb')
f.write(input_str)
f.close()
return compressed_bits.getvalue()
def emit(self, record):
if self._auth_failed:
# Don't send anything else once a 401 was returned
return
try:
response = self._send_to_splunk(record)
except (KeyboardInterrupt, SystemExit):
raise
except:
# All errors end here.
self.handleError(record)
else:
if response.status_code == 401:
self._auth_failed = True
def _send_to_splunk(self, record):
# http://docs.splunk.com/Documentation/Storm/latest/User/Sourcesandsourcetypes
sourcetype = 'json_no_timestamp'
host = socket.gethostname()
event_dict = {'data': self.format(record),
'level': record.levelname,
'module': record.module,
'line': record.lineno}
event = json.dumps(event_dict)
event = self._compress(event)
params = {'index': self.project_id,
'sourcetype': sourcetype,
'host': host}
url = '%s?%s' % (self.url, urllib.urlencode(params))
return self.session.post(url, data=event)

View File

@@ -0,0 +1,57 @@
import os
import yaml
def parse_config_file():
"""
Find the .splunk_logger config file in the current directory, or in the
user's home and parse it. The one in the current directory has precedence.
:return: A tuple with:
- project_id
- access_token
"""
for filename in ('.splunk_logger', os.path.expanduser('~/.splunk_logger')):
project_id, access_token, api_domain = _parse_config_file_impl(filename)
if project_id is not None\
and access_token is not None\
and api_domain is not None:
return project_id, access_token, api_domain
else:
return None, None, None
def _parse_config_file_impl(filename):
"""
Format for the file is:
credentials:
project_id: ...
access_token: ...
api_domain: ...
:param filename: The filename to parse
:return: A tuple with:
- project_id
- access_token
- api_domain
"""
try:
doc = yaml.load(file(filename).read())
project_id = doc["credentials"]["project_id"]
access_token = doc["credentials"]["access_token"]
api_domain = doc["credentials"]["api_domain"]
return project_id, access_token, api_domain
except:
return None, None, None
def get_config_from_env():
return (os.environ.get('SPLUNK_PROJECT_ID', None),
os.environ.get('SPLUNK_ACCESS_TOKEN', None),
os.environ.get('SPLUNK_API_DOMAIN', None))

View File

@@ -16,8 +16,10 @@ from django.utils import timezone
# AWX
from awx.main.models.fact import Fact
from awx.main.models.inventory import Host
from awx.main.utils import format_for_log
logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver')
data_logger = logging.getLogger('awx.analytics.system_tracking')
class FactBrokerWorker(ConsumerMixin):
@@ -83,6 +85,7 @@ class FactBrokerWorker(ConsumerMixin):
# Create new Fact entry
fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts)
logger.info('Created new fact <fact_id, module> <%s, %s>' % (fact_obj.id, module_name))
data_logger.info('Received message with fact data', extra=format_for_log({module_name: facts}, kind="fact"))
return fact_obj

View File

@@ -23,7 +23,7 @@ from socketio import socketio_manage
from socketio.server import SocketIOServer
from socketio.namespace import BaseNamespace
logger = logging.getLogger('awx.main.commands.run_socketio_service')
logger = logging.getLogger('awx.main.consumers')
class SocketSession(object):

View File

@@ -16,6 +16,7 @@ from awx.api.authentication import TokenAuthentication
logger = logging.getLogger('awx.main.middleware')
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
class ActivityStreamMiddleware(threading.local):
@@ -46,6 +47,10 @@ class ActivityStreamMiddleware(threading.local):
instance.actor = drf_user
try:
instance.save(update_fields=['actor'])
analytics_logger.info('Activity Stream update entry for %s' % str(instance.object1),
extra=dict(changes=instance.changes, relationship=instance.object_relationship_type,
actor=drf_user.username, operation=instance.operation,
object1=instance.object1, object2=instance.object2))
except IntegrityError:
logger.debug("Integrity Error saving Activity Stream instance for id : " + str(instance.id))
# else:

View File

@@ -32,6 +32,7 @@ from awx.main.models.notifications import (
from awx.main.utils import (
ignore_inventory_computed_fields,
parse_yaml_or_json,
format_for_log
)
from awx.main.redact import PlainTextCleaner
from awx.main.fields import ImplicitRoleField
@@ -43,6 +44,7 @@ from awx.main.consumers import emit_channel_notification
logger = logging.getLogger('awx.main.models.jobs')
event_logger = logging.getLogger('awx.analytics.job_events')
__all__ = ['JobTemplate', 'Job', 'JobHostSummary', 'JobEvent', 'SystemJobOptions', 'SystemJobTemplate', 'SystemJob']
@@ -1186,6 +1188,9 @@ class JobEvent(CreatedModifiedModel):
if parent_id:
kwargs['parent_id'] = parent_id
# event_logger.info('Body: {}'.format(str(data_for_log)), extra=data_for_log)
event_logger.info('Job event data saved.', extra=format_for_log(kwargs, kind='event'))
job_event = JobEvent.objects.create(**kwargs)
# Cache this job event ID vs. UUID for future parent lookups.
@@ -1196,7 +1201,6 @@ class JobEvent(CreatedModifiedModel):
# Save artifact data to parent job (if provided).
if artifact_data:
artifact_dict = json.loads(artifact_data)
event_data = kwargs.get('event_data', None)
if event_data and isinstance(event_data, dict):
res = event_data.get('res', None)
if res and isinstance(res, dict):

View File

@@ -20,7 +20,7 @@ from crum.signals import current_user_getter
from awx.main.models import * # noqa
from awx.api.serializers import * # noqa
from awx.main.utils import model_instance_diff, model_to_dict, camelcase_to_underscore
from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates
from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates, format_for_log
from awx.main.tasks import update_inventory_computed_fields
from awx.main.consumers import emit_channel_notification
@@ -28,6 +28,7 @@ from awx.main.consumers import emit_channel_notification
__all__ = []
logger = logging.getLogger('awx.main.signals')
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
# Update has_active_failures for inventory/groups when a Host/Group is deleted,
# when a Host-Group or Group-Group relationship is updated, or when a Job is deleted
@@ -369,11 +370,15 @@ def activity_stream_create(sender, instance, created, **kwargs):
if type(instance) == Job:
if 'extra_vars' in changes:
changes['extra_vars'] = instance.display_extra_vars()
changes_dict = json.dumps(changes)
activity_entry = ActivityStream(
operation='create',
object1=object1,
changes=json.dumps(changes))
changes=changes_dict)
activity_entry.save()
# analytics_logger.info('Activity Stream create entry for %s' % str(object1),
# extra=format_for_log(changes, kind='activity_stream',
# actor=activity_entry.actor, operation='update', object1=object1))
#TODO: Weird situation where cascade SETNULL doesn't work
# it might actually be a good idea to remove all of these FK references since
# we don't really use them anyway.
@@ -401,6 +406,9 @@ def activity_stream_update(sender, instance, **kwargs):
object1=object1,
changes=json.dumps(changes))
activity_entry.save()
# analytics_logger.info('Activity Stream update entry for %s' % str(object1),
# extra=format_for_log(changes, kind='activity_stream',
# actor=activity_entry.actor, operation='update', object1=object1))
if instance._meta.model_name != 'setting': # Is not conf.Setting instance
getattr(activity_entry, object1).add(instance)

View File

@@ -0,0 +1,31 @@
from awx.main.utils import format_for_log
import datetime
# Example data
event_data = {
'stdout': u'\x1b[0;36mskipping: [host1]\x1b[0m\r\n', u'uuid': u'ffe4858c-ac38-4cab-9192-b07bdbe80502',
u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 376051), 'counter': 17, u'job_id': 209,
u'event': u'runner_on_skipped', 'parent_id': 1937, 'end_line': 24, 'start_line': 23,
u'event_data': {u'play_pattern': u'all', u'play': u'all', u'task': u'Scan files (Windows)',
u'task_args': u'paths={{ scan_file_paths }}, recursive={{ scan_use_recursive }}, get_checksum={{ scan_use_checksum }}',
u'remote_addr': u'host1', u'pid': 1427, u'play_uuid': u'da784361-3811-4ea7-9cc8-46ec758fde66',
u'task_uuid': u'4f9525fd-bc25-4ace-9eb2-adad9fa21a94', u'event_loop': None,
u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e', u'playbook': u'scan_facts.yml',
u'task_action': u'win_scan_files', u'host': u'host1', u'task_path': None}}
event_stats = {
'stdout': u'asdf', u'created': datetime.datetime(2016, 11, 10, 14, 59, 16, 385416),
'counter': 18, u'job_id': 209, u'event': u'playbook_on_stats', 'parent_id': 1923,
'end_line': 28, 'start_line': 24, u'event_data': {
u'skipped': {u'host1': 4}, u'ok': {u'host2': 3}, u'changed': {},
u'pid': 1427, u'dark': {}, u'playbook_uuid': u'653fd95e-f718-428e-9df0-3f279df9f07e',
u'playbook': u'scan_facts.yml', u'failures': {}, u'processed': {u'duck': 1}
}
}
def test_format_event():
log_data = format_for_log(event_data, kind='event')
assert log_data['event_host'] == 'host1'

View File

@@ -17,6 +17,7 @@ import urlparse
import threading
import contextlib
import tempfile
from copy import copy
# Decorator
from decorator import decorator
@@ -824,3 +825,64 @@ class OutputEventFilter(object):
self._current_event_data = next_event_data
else:
self._current_event_data = None
def format_for_log(raw_data, kind=None, **kwargs):
'''
Process dictionaries from various contexts (job events, activity stream
changes, etc.) to give meaningful information
Output a dictionary which will be passed in logstash or syslog format
to the logging receiver
'''
rename_fields = set((
'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module',
'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process',
'processName', 'relativeCreated', 'thread', 'threadName', 'extra',
'auth_token', 'tags', 'host', 'host_id', 'level', 'port', 'uuid'))
data = copy(raw_data)
if isinstance(data, basestring):
data = json.loads(data)
data.update(kwargs)
skip_fields = ('res', 'password', 'event_data', 'stdout')
data_for_log = {}
def index_by_name(alist):
"""Takes a list of dictionaries with `name` as a key in each dict
and returns a dictionary indexed by those names"""
adict = {}
for item in alist:
subdict = copy(item)
name = subdict.pop('name', None)
if name:
# Logstash v2 can not accept '.' in a name
name = name.replace('.', '_')
adict[name] = subdict
return adict
if kind == 'event':
data.update(data.get('event_data', {}))
for fd in data:
if fd in skip_fields:
continue
key = fd
if fd in rename_fields:
key = 'event_%s' % fd
if type(data[fd]) is dict:
data_for_log[key] = len(data[fd])
else:
data_for_log[key] = data[fd]
elif kind == 'fact':
if 'services' in data:
data_for_log['services'] = index_by_name(data['services'])
elif 'packages' in data:
data_for_log['packages'] = index_by_name(data['packages'])
elif 'files' in data:
data_for_log['files'] = index_by_name(data['files'])
elif 'ansible' in data:
data_for_log['ansible'] = data['ansible']
# Remove sub-keys with data type conflicts in elastic search
data_for_log['ansible'].pop('ansible_python_version', None)
data_for_log['ansible']['ansible_python'].pop('version_info', None)
else:
data_for_log['facts'] = data
return data_for_log

View File

@@ -842,6 +842,19 @@ LOGGING = {
'simple': {
'format': '%(asctime)s %(levelname)-8s %(name)s %(message)s',
},
'logstash': {
'()': 'awx.main.log_utils.formatters.LogstashFormatter'
},
'json': {
'()': 'awx.main.log_utils.formatters.LogstashFormatter'
}
# From loggly examples
# 'json': {
# 'format': '{ "loggerName":"%(name)s", "asciTime":"%(asctime)s", "fileName":"%(filename)s", "logRecordCreationTime":"%(created)f", "functionName":"%(funcName)s", "levelNo":"%(levelno)s", "lineNo":"%(lineno)d", "time":"%(msecs)d", "levelName":"%(levelname)s", "message":"%(message)s"}',
# },
# 'json': {
# 'format': '{"message": %(message)s}',
# },
},
'handlers': {
'console': {
@@ -863,6 +876,24 @@ LOGGING = {
'class': 'django.utils.log.NullHandler',
'formatter': 'simple',
},
'http_receiver': {
'class': 'awx.main.log_utils.handlers.HTTPSHandler',
'level': 'INFO',
'formatter': 'json',
'host': '',
},
'logstash': {
'level': 'INFO',
'class': 'awx.main.log_utils.handlers.HTTPSHandler',
'host': 'logstash', # IP/name of our Logstash EC2 instance
'port': 8085,
# 'port': 5000,
# 'version': 1,
'message_type': 'logstash',
'fqdn': True,
# 'tags': ['tower'],
'formatter': 'json'
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
@@ -939,7 +970,6 @@ LOGGING = {
'django.request': {
'handlers': ['mail_admins', 'console', 'file', 'tower_warnings'],
'level': 'WARNING',
'propagate': False,
},
'rest_framework.request': {
'handlers': ['mail_admins', 'console', 'file', 'tower_warnings'],
@@ -954,29 +984,30 @@ LOGGING = {
'level': 'DEBUG',
},
'awx.conf': {
'handlers': ['console', 'file', 'tower_warnings'],
'handlers': ['null'],
'level': 'WARNING',
'propagate': False,
},
'awx.conf.settings': {
'handlers': ['null'],
'level': 'WARNING',
},
'awx.main': {
'handlers': ['null']
},
'awx.main.commands.run_callback_receiver': {
'handlers': ['console', 'file', 'callback_receiver'],
'propagate': False
},
'awx.main.commands.run_socketio_service': {
'handlers': ['console', 'file', 'socketio_service'],
'propagate': False
'handlers': ['callback_receiver'],
},
'awx.main.tasks': {
'handlers': ['console', 'file', 'task_system'],
'propagate': False
'handlers': ['task_system']
},
'awx.main.scheduler': {
'handlers': ['console', 'file', 'task_system'],
'propagate': False
'handlers': ['task_system'],
},
'awx.main.consumers': {
'handlers': ['null']
},
'awx.main.commands.run_fact_cache_receiver': {
'handlers': ['console', 'file', 'fact_receiver'],
'propagate': False
'handlers': ['fact_receiver'],
},
'awx.main.access': {
'handlers': ['null'],
@@ -990,6 +1021,23 @@ LOGGING = {
'handlers': ['null'],
'propagate': False,
},
'awx.analytics': {
'handlers': ['null'],
'level': 'INFO',
'propagate': False
},
'awx.analytics.job_events': {
'handlers': ['null'],
'level': 'INFO'
},
'awx.analytics.activity_stream': {
'handlers': ['null'],
'level': 'INFO'
},
'awx.analytics.system_tracking': {
'handlers': ['null'],
'level': 'INFO'
},
'django_auth_ldap': {
'handlers': ['console', 'file', 'tower_warnings'],
'level': 'DEBUG',

View File

@@ -52,7 +52,6 @@ TOWER_VENV_PATH = "/var/lib/awx/venv/tower"
LOGGING['handlers']['tower_warnings']['filename'] = '/var/log/tower/tower.log'
LOGGING['handlers']['callback_receiver']['filename'] = '/var/log/tower/callback_receiver.log'
LOGGING['handlers']['socketio_service']['filename'] = '/var/log/tower/socketio_service.log'
LOGGING['handlers']['task_system']['filename'] = '/var/log/tower/task_system.log'
LOGGING['handlers']['fact_receiver']['filename'] = '/var/log/tower/fact_receiver.log'
LOGGING['handlers']['system_tracking_migrations']['filename'] = '/var/log/tower/tower_system_tracking_migrations.log'