From 8ce3a14da5ee2377a08247a255cf9daf5b5a13bf Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 23 Mar 2021 13:46:21 -0400 Subject: [PATCH] A couple more fixes: - stop trying to ship csv slices when one breaks - only update LAST_ENTRIES if all of the files in a time/pk slice succeed - don't allow an explicit --until parameter to set the GATHER/ENTRIES values backwards --- awx/main/analytics/core.py | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/awx/main/analytics/core.py b/awx/main/analytics/core.py index b80db5094c..2671f19601 100644 --- a/awx/main/analytics/core.py +++ b/awx/main/analytics/core.py @@ -194,12 +194,14 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti if tgzfile is not None: tarfiles.append(tgzfile) if collection_type != 'dry-run': - if not ship(tgzfile): + if ship(tgzfile): + with disable_activity_stream(): + for filename in data: + key = filename.replace('.json', '') + last_entries[key] = max(last_entries[key], until) if last_entries.get(key) else until + settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder) + else: succeeded = False - with disable_activity_stream(): - for filename in data: - last_entries[filename.replace('.json', '')] = until - settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder) for func in csv_collectors: key = func.__awx_analytics_key__ @@ -214,9 +216,11 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti if not files: if collection_type != 'dry-run': with disable_activity_stream(): - last_entries[key] = end + last_entries[key] = max(last_entries[key], end) if last_entries.get(key) else end settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder) continue + + slice_succeeded = True for fpath in files: payload = {filename: (fpath, func.__awx_analytics_version__)} @@ -228,14 +232,16 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti tgzfile = package(dest.parent, payload, until) if tgzfile is not None: tarfiles.append(tgzfile) + if not ship(tgzfile): + slice_succeeded, succeeded = False, False + break - if collection_type != 'dry-run': - if not ship(tgzfile): - succeeded = False - with disable_activity_stream(): - last_entries[key] = end - settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder) + if slice_succeeded and collection_type != 'dry-run': + with disable_activity_stream(): + last_entries[key] = max(last_entries[key], end) if last_entries.get(key) else end + settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder) except Exception: + succeeded = False logger.exception("Could not generate metric {}".format(filename)) if collection_type != 'dry-run': @@ -244,7 +250,8 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti if os.path.exists(fpath): os.remove(fpath) with disable_activity_stream(): - settings.AUTOMATION_ANALYTICS_LAST_GATHER = until + if not settings.AUTOMATION_ANALYTICS_LAST_GATHER or until > settings.AUTOMATION_ANALYTICS_LAST_GATHER: + settings.AUTOMATION_ANALYTICS_LAST_GATHER = until shutil.rmtree(dest, ignore_errors=True) # clean up individual artifact files if not tarfiles: