More consistently provide fields in job_events logger

* Change scheme from using event dict to JobEvent object
* Add processing to grok object fields
* Allow override of provided formatter in case of future issues
This commit is contained in:
AlanCoding 2017-06-13 09:36:54 -04:00 committed by Matthew Jones
parent 7d12427497
commit 459c33d272
5 changed files with 90 additions and 21 deletions

View File

@ -1289,10 +1289,10 @@ class JobEvent(CreatedModifiedModel):
if event_data:
artifact_dict = event_data.pop('artifact_data', None)
analytics_logger.info('Job event data saved.', extra=dict(event_model_data=kwargs))
job_event = JobEvent.objects.create(**kwargs)
analytics_logger.info('Job event data saved.', extra=dict(python_objects=dict(job_event=job_event)))
# Save artifact data to parent job (if provided).
if artifact_dict:
if event_data and isinstance(event_data, dict):

View File

@ -0,0 +1,24 @@
from awx.main.models import Job, JobEvent
from awx.main.utils.formatters import LogstashFormatter
def test_log_from_job_event_object():
job = Job(id=4)
event = JobEvent(job_id=job.id)
formatter = LogstashFormatter()
data_for_log = formatter.reformat_data_for_log(
dict(python_objects=dict(job_event=event)), kind='job_events')
# Check entire body of data for any exceptions from getattr on event object
for fd in data_for_log:
if not isinstance(data_for_log[fd], basestring):
continue
assert 'Exception' not in data_for_log[fd], 'Exception delivered in data: {}'.format(data_for_log[fd])
# Verify existence of certain high-importance fields
for fd in ['changed', 'uuid', 'start_line', 'end_line', 'id', 'counter', 'host_name', 'stdout']:
assert fd in data_for_log
assert data_for_log['job'] == 4

View File

@ -37,13 +37,7 @@ class LogstashFormatter(LogstashFormatterVersion1):
'''
if kind == 'activity_stream':
return raw_data
rename_fields = set((
'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module',
'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process',
'processName', 'relativeCreated', 'thread', 'threadName', 'extra',
'auth_token', 'tags', 'host', 'host_id', 'level', 'port', 'uuid'))
if kind == 'system_tracking':
elif kind == 'system_tracking':
data = copy(raw_data['ansible_facts'])
elif kind == 'job_events':
data = copy(raw_data['event_model_data'])
@ -51,7 +45,6 @@ class LogstashFormatter(LogstashFormatterVersion1):
data = copy(raw_data)
if isinstance(data, basestring):
data = json.loads(data)
skip_fields = ('res', 'password', 'event_data', 'stdout')
data_for_log = {}
def index_by_name(alist):
@ -86,18 +79,31 @@ class LogstashFormatter(LogstashFormatterVersion1):
return val
if kind == 'job_events':
data.update(data.get('event_data', {}))
for fd in data:
if fd in skip_fields:
job_event = raw_data['python_objects']['job_event']
for field_object in job_event._meta.fields:
if not field_object.__class__ or not field_object.__class__.__name__:
field_class_name = ''
else:
field_class_name = field_object.__class__.__name__
if field_class_name in ['ManyToOneRel', 'ManyToManyField']:
continue
fd = field_object.name
key = fd
if fd in rename_fields:
key = 'event_%s' % fd
val = data[fd]
if key.endswith('created'):
time_float = time.mktime(data[fd].timetuple())
val = self.format_timestamp(time_float)
data_for_log[key] = val
if field_class_name == 'ForeignKey':
fd = '{}_id'.format(field_object.name)
try:
data_for_log[key] = getattr(job_event, fd)
if fd in ['created', 'modified'] and data_for_log[key] is not None:
time_float = time.mktime(data_for_log[key].timetuple())
data_for_log[key] = self.format_timestamp(time_float)
except Exception as e:
data_for_log[key] = 'Exception `{}` producing field'.format(e)
data_for_log['event_display'] = job_event.get_event_display2()
elif kind == 'system_tracking':
data.pop('ansible_python_version', None)
if 'ansible_python' in data:

View File

@ -26,6 +26,7 @@ from awx.main.utils.formatters import LogstashFormatter
__all__ = ['HTTPSNullHandler', 'BaseHTTPSHandler', 'TCPHandler', 'UDPHandler',
'configure_external_logger']
logger = logging.getLogger('awx.main.utils.handlers')
# AWX external logging handler, generally designed to be used
@ -346,7 +347,15 @@ def configure_external_logger(settings_module, is_startup=True):
if is_enabled:
handler_class = HANDLER_MAPPING[settings_module.LOG_AGGREGATOR_PROTOCOL]
instance = handler_class.from_django_settings(settings_module)
instance.setFormatter(LogstashFormatter(settings_module=settings_module))
# Obtain the Formatter class from settings to maintain customizations
configurator = logging.config.DictConfigurator(settings_module.LOGGING)
formatter_config = settings_module.LOGGING['formatters']['json'].copy()
formatter_config['settings_module'] = settings_module
formatter = configurator.configure_custom(formatter_config)
instance.setFormatter(formatter)
awx_logger_instance = instance
if is_enabled and 'awx' not in settings_module.LOG_AGGREGATOR_LOGGERS:
awx_logger_instance = None

View File

@ -1,2 +1,32 @@
docker build --no-cache=true --rm=true -t ansible/tower_devel:latest .
docker run --name tower_test -it --memory="4g" --cpuset="0,1" -v /Users/meyers/ansible/:/tower_devel -p 8013:8013 -p 8080:8080 -p 27017:27017 -p 2222:22 ansible/tower_devel
## How to use the logstash container
POST the following content to `/api/v1/settings/logging/` (this uses
authentication set up inside of the logstash configuration file).
```
{
"LOG_AGGREGATOR_HOST": "logstash",
"LOG_AGGREGATOR_PORT": 8085,
"LOG_AGGREGATOR_TYPE": "logstash",
"LOG_AGGREGATOR_USERNAME": "awx_logger",
"LOG_AGGREGATOR_PASSWORD": "workflows",
"LOG_AGGREGATOR_LOGGERS": [
"awx",
"activity_stream",
"job_events",
"system_tracking"
],
"LOG_AGGREGATOR_INDIVIDUAL_FACTS": false,
"LOG_AGGREGATOR_TOWER_UUID": "991ac7e9-6d68-48c8-bbde-7ca1096653c6",
"LOG_AGGREGATOR_ENABLED": true
}
```
An example of how to view the most recent logs from the container:
```
docker exec -i -t $(docker ps -aqf "name=tools_logstash_1") tail -n 50 /logstash.log
```