Merge pull request #3555 from ryanpetrello/even-more-iso-fixes

fix a variety of bugs in isolated support

Reviewed-by: https://github.com/softwarefactory-project-zuul[bot]
This commit is contained in:
softwarefactory-project-zuul[bot] 2019-03-29 15:13:23 +00:00 committed by GitHub
commit a4ec149344
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 45 additions and 24 deletions

View File

@ -187,28 +187,7 @@ class IsolatedManager(object):
self.private_data_dir,
extravars=extravars)
status, rc = runner_obj.status, runner_obj.rc
# discover new events and ingest them
events_path = self.path_to('artifacts', self.ident, 'job_events')
# it's possible that `events_path` doesn't exist *yet*, because runner
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
# only attempt to consume events if any were rsynced back
if os.path.exists(events_path):
for event in set(os.listdir(events_path)) - self.handled_events:
path = os.path.join(events_path, event)
if os.path.exists(path):
event_data = json.load(
open(os.path.join(events_path, event), 'r')
)
event_data.setdefault(self.event_data_key, self.instance.id)
dispatcher.dispatch(event_data)
self.handled_events.add(event)
# handle artifacts
if event_data.get('event_data', {}).get('artifact_data', {}):
self.instance.artifacts = event_data['event_data']['artifact_data']
self.instance.save(update_fields=['artifacts'])
self.consume_events(dispatcher)
last_check = time.time()
@ -220,6 +199,10 @@ class IsolatedManager(object):
with open(rc_path, 'r') as f:
rc = int(f.readline())
# consume events one last time just to be sure we didn't miss anything
# in the final sync
self.consume_events(dispatcher)
# emit an EOF event
event_data = {
'event': 'EOF',
@ -230,6 +213,41 @@ class IsolatedManager(object):
return status, rc
def consume_events(self, dispatcher):
# discover new events and ingest them
events_path = self.path_to('artifacts', self.ident, 'job_events')
# it's possible that `events_path` doesn't exist *yet*, because runner
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
# only attempt to consume events if any were rsynced back
if os.path.exists(events_path):
for event in set(os.listdir(events_path)) - self.handled_events:
path = os.path.join(events_path, event)
if os.path.exists(path):
try:
event_data = json.load(
open(os.path.join(events_path, event), 'r')
)
except json.decoder.JSONDecodeError:
# This means the event we got back isn't valid JSON
# that can happen if runner is still partially
# writing an event file while it's rsyncing
# these event writes are _supposed_ to be atomic
# but it doesn't look like they actually are in
# practice
# in this scenario, just ignore this event and try it
# again on the next sync
pass
event_data.setdefault(self.event_data_key, self.instance.id)
dispatcher.dispatch(event_data)
self.handled_events.add(event)
# handle artifacts
if event_data.get('event_data', {}).get('artifact_data', {}):
self.instance.artifacts = event_data['event_data']['artifact_data']
self.instance.save(update_fields=['artifacts'])
def cleanup(self):
# If the job failed for any reason, make a last-ditch effort at cleanup
extravars = {

View File

@ -1187,14 +1187,17 @@ class BaseTask(object):
if self.instance.is_isolated() is True:
module_args = None
if 'module_args' in params:
# if it's adhoc, copy the module args
module_args = ansible_runner.utils.args2cmdline(
params.get('module_args'),
)
else:
# otherwise, it's a playbook, so copy the project dir
copy_tree(cwd, os.path.join(private_data_dir, 'project'))
shutil.move(
params.pop('inventory'),
os.path.join(private_data_dir, 'inventory')
)
copy_tree(cwd, os.path.join(private_data_dir, 'project'))
ansible_runner.utils.dump_artifacts(params)
manager_instance = isolated_manager.IsolatedManager(
cancelled_callback=lambda: self.update_model(self.instance.pk).cancel_flag

View File

@ -18,7 +18,7 @@
src: "{{src}}"
dest: "{{dest}}"
- stat: path="{{src}}/env/ssh_key"
- local_action: stat path="{{src}}/env/ssh_key"
register: key
- name: create a named pipe for secret environment data