Only clean up tarballs if we had all successfully ship

This commit is contained in:
Jeff Bradberry
2021-03-23 10:36:59 -04:00
parent 39886da4b6
commit 1dacd7e8cf

View File

@@ -172,6 +172,7 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
gather_dir = dest.joinpath('stage') gather_dir = dest.joinpath('stage')
gather_dir.mkdir(mode=0o700) gather_dir.mkdir(mode=0o700)
tarfiles = [] tarfiles = []
succeeded = True
# These json collectors are pretty compact, so collect all of them before shipping to analytics. # These json collectors are pretty compact, so collect all of them before shipping to analytics.
data = {} data = {}
@@ -193,7 +194,8 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
if tgzfile is not None: if tgzfile is not None:
tarfiles.append(tgzfile) tarfiles.append(tgzfile)
if collection_type != 'dry-run': if collection_type != 'dry-run':
ship(tgzfile) if not ship(tgzfile):
succeeded = False
with disable_activity_stream(): with disable_activity_stream():
for filename in data: for filename in data:
last_entries[filename.replace('.json', '')] = until last_entries[filename.replace('.json', '')] = until
@@ -228,15 +230,21 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
tarfiles.append(tgzfile) tarfiles.append(tgzfile)
if collection_type != 'dry-run': if collection_type != 'dry-run':
ship(tgzfile) if not ship(tgzfile):
succeeded = False
with disable_activity_stream(): with disable_activity_stream():
last_entries[key] = end last_entries[key] = end
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder) settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
except Exception: except Exception:
logger.exception("Could not generate metric {}".format(filename)) logger.exception("Could not generate metric {}".format(filename))
with disable_activity_stream(): if collection_type != 'dry-run':
settings.AUTOMATION_ANALYTICS_LAST_GATHER = until if succeeded:
for fpath in tarfiles:
if os.path.exists(fpath):
os.remove(fpath)
with disable_activity_stream():
settings.AUTOMATION_ANALYTICS_LAST_GATHER = until
shutil.rmtree(dest, ignore_errors=True) # clean up individual artifact files shutil.rmtree(dest, ignore_errors=True) # clean up individual artifact files
if not tarfiles: if not tarfiles:
@@ -253,42 +261,38 @@ def ship(path):
""" """
if not path: if not path:
logger.error('Automation Analytics TAR not found') logger.error('Automation Analytics TAR not found')
return return False
if not os.path.exists(path): if not os.path.exists(path):
logger.error('Automation Analytics TAR {} not found'.format(path)) logger.error('Automation Analytics TAR {} not found'.format(path))
return return False
if "Error:" in str(path): if "Error:" in str(path):
return return False
try:
logger.debug('shipping analytics file: {}'.format(path)) logger.debug('shipping analytics file: {}'.format(path))
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None) url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
if not url: if not url:
logger.error('AUTOMATION_ANALYTICS_URL is not set') logger.error('AUTOMATION_ANALYTICS_URL is not set')
return return False
rh_user = getattr(settings, 'REDHAT_USERNAME', None) rh_user = getattr(settings, 'REDHAT_USERNAME', None)
rh_password = getattr(settings, 'REDHAT_PASSWORD', None) rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
if not rh_user: if not rh_user:
return logger.error('REDHAT_USERNAME is not set') logger.error('REDHAT_USERNAME is not set')
if not rh_password: return False
return logger.error('REDHAT_PASSWORD is not set') if not rh_password:
with open(path, 'rb') as f: logger.error('REDHAT_PASSWORD is not set')
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)} return False
s = requests.Session() with open(path, 'rb') as f:
s.headers = get_awx_http_client_headers() files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
s.headers.pop('Content-Type') s = requests.Session()
with set_environ(**settings.AWX_TASK_ENV): s.headers = get_awx_http_client_headers()
response = s.post( s.headers.pop('Content-Type')
url, with set_environ(**settings.AWX_TASK_ENV):
files=files, response = s.post(
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", url, files=files, verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31)
auth=(rh_user, rh_password), )
headers=s.headers, # Accept 2XX status_codes
timeout=(31, 31), if response.status_code >= 300:
) logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
# Accept 2XX status_codes return False
if response.status_code >= 300:
return logger.exception('Upload failed with status {}, {}'.format(response.status_code, response.text)) return True
finally:
# cleanup tar.gz
if os.path.exists(path):
os.remove(path)