mirror of
https://github.com/ansible/awx.git
synced 2026-02-15 18:20:00 -03:30
slightly refactor isolated event consumption
This commit is contained in:
@@ -187,39 +187,7 @@ class IsolatedManager(object):
|
|||||||
self.private_data_dir,
|
self.private_data_dir,
|
||||||
extravars=extravars)
|
extravars=extravars)
|
||||||
status, rc = runner_obj.status, runner_obj.rc
|
status, rc = runner_obj.status, runner_obj.rc
|
||||||
|
self.consume_events(dispatcher)
|
||||||
# discover new events and ingest them
|
|
||||||
events_path = self.path_to('artifacts', self.ident, 'job_events')
|
|
||||||
|
|
||||||
# it's possible that `events_path` doesn't exist *yet*, because runner
|
|
||||||
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
|
|
||||||
# only attempt to consume events if any were rsynced back
|
|
||||||
if os.path.exists(events_path):
|
|
||||||
for event in set(os.listdir(events_path)) - self.handled_events:
|
|
||||||
path = os.path.join(events_path, event)
|
|
||||||
if os.path.exists(path):
|
|
||||||
try:
|
|
||||||
event_data = json.load(
|
|
||||||
open(os.path.join(events_path, event), 'r')
|
|
||||||
)
|
|
||||||
except json.decoder.JSONDecodeError:
|
|
||||||
# This means the event we got back isn't valid JSON
|
|
||||||
# that can happen if runner is still partially
|
|
||||||
# writing an event file while it's rsyncing
|
|
||||||
# these event writes are _supposed_ to be atomic
|
|
||||||
# but it doesn't look like they actually are in
|
|
||||||
# practice
|
|
||||||
# in this scenario, just ignore this event and try it
|
|
||||||
# again on the next sync
|
|
||||||
pass
|
|
||||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
|
||||||
dispatcher.dispatch(event_data)
|
|
||||||
self.handled_events.add(event)
|
|
||||||
|
|
||||||
# handle artifacts
|
|
||||||
if event_data.get('event_data', {}).get('artifact_data', {}):
|
|
||||||
self.instance.artifacts = event_data['event_data']['artifact_data']
|
|
||||||
self.instance.save(update_fields=['artifacts'])
|
|
||||||
|
|
||||||
last_check = time.time()
|
last_check = time.time()
|
||||||
|
|
||||||
@@ -231,6 +199,10 @@ class IsolatedManager(object):
|
|||||||
with open(rc_path, 'r') as f:
|
with open(rc_path, 'r') as f:
|
||||||
rc = int(f.readline())
|
rc = int(f.readline())
|
||||||
|
|
||||||
|
# consume events one last time just to be sure we didn't miss anything
|
||||||
|
# in the final sync
|
||||||
|
self.consume_events(dispatcher)
|
||||||
|
|
||||||
# emit an EOF event
|
# emit an EOF event
|
||||||
event_data = {
|
event_data = {
|
||||||
'event': 'EOF',
|
'event': 'EOF',
|
||||||
@@ -241,6 +213,41 @@ class IsolatedManager(object):
|
|||||||
|
|
||||||
return status, rc
|
return status, rc
|
||||||
|
|
||||||
|
def consume_events(self, dispatcher):
|
||||||
|
# discover new events and ingest them
|
||||||
|
events_path = self.path_to('artifacts', self.ident, 'job_events')
|
||||||
|
|
||||||
|
# it's possible that `events_path` doesn't exist *yet*, because runner
|
||||||
|
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
|
||||||
|
# only attempt to consume events if any were rsynced back
|
||||||
|
if os.path.exists(events_path):
|
||||||
|
for event in set(os.listdir(events_path)) - self.handled_events:
|
||||||
|
path = os.path.join(events_path, event)
|
||||||
|
if os.path.exists(path):
|
||||||
|
try:
|
||||||
|
event_data = json.load(
|
||||||
|
open(os.path.join(events_path, event), 'r')
|
||||||
|
)
|
||||||
|
except json.decoder.JSONDecodeError:
|
||||||
|
# This means the event we got back isn't valid JSON
|
||||||
|
# that can happen if runner is still partially
|
||||||
|
# writing an event file while it's rsyncing
|
||||||
|
# these event writes are _supposed_ to be atomic
|
||||||
|
# but it doesn't look like they actually are in
|
||||||
|
# practice
|
||||||
|
# in this scenario, just ignore this event and try it
|
||||||
|
# again on the next sync
|
||||||
|
pass
|
||||||
|
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||||
|
dispatcher.dispatch(event_data)
|
||||||
|
self.handled_events.add(event)
|
||||||
|
|
||||||
|
# handle artifacts
|
||||||
|
if event_data.get('event_data', {}).get('artifact_data', {}):
|
||||||
|
self.instance.artifacts = event_data['event_data']['artifact_data']
|
||||||
|
self.instance.save(update_fields=['artifacts'])
|
||||||
|
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
# If the job failed for any reason, make a last-ditch effort at cleanup
|
# If the job failed for any reason, make a last-ditch effort at cleanup
|
||||||
extravars = {
|
extravars = {
|
||||||
|
|||||||
Reference in New Issue
Block a user