process host facts for iso runs

* Move isolated clean to our final run hook
* ISO and non-iso code path now share the post-fact-processing code
This commit is contained in:
chris meyers
2019-03-29 13:50:29 -04:00
parent 21e5179a84
commit 71fcb1a82c
3 changed files with 21 additions and 25 deletions

View File

@@ -366,5 +366,4 @@ class IsolatedManager(object):
event_data = {'event': 'EOF', 'final_counter': 0} event_data = {'event': 'EOF', 'final_counter': 0}
event_data.setdefault(self.event_data_key, self.instance.id) event_data.setdefault(self.event_data_key, self.instance.id)
CallbackQueueDispatcher().dispatch(event_data) CallbackQueueDispatcher().dispatch(event_data)
self.cleanup()
return status, rc return status, rc

View File

@@ -995,7 +995,7 @@ class BaseTask(object):
Hook for any steps to run before job/task is marked as complete. Hook for any steps to run before job/task is marked as complete.
''' '''
def final_run_hook(self, instance, status, private_data_dir, fact_modification_times): def final_run_hook(self, instance, status, private_data_dir, fact_modification_times, isolated_manager_instance=None):
''' '''
Hook for any steps to run after job/task is marked as complete. Hook for any steps to run after job/task is marked as complete.
''' '''
@@ -1088,6 +1088,7 @@ class BaseTask(object):
''' '''
self.safe_env = {} self.safe_env = {}
private_data_dir = None private_data_dir = None
isolated_manager_instance = None
try: try:
isolated = self.instance.is_isolated() isolated = self.instance.is_isolated()
@@ -1199,17 +1200,17 @@ class BaseTask(object):
os.path.join(private_data_dir, 'inventory') os.path.join(private_data_dir, 'inventory')
) )
ansible_runner.utils.dump_artifacts(params) ansible_runner.utils.dump_artifacts(params)
manager_instance = isolated_manager.IsolatedManager( isolated_manager_instance = isolated_manager.IsolatedManager(
cancelled_callback=lambda: self.update_model(self.instance.pk).cancel_flag cancelled_callback=lambda: self.update_model(self.instance.pk).cancel_flag
) )
status, rc = manager_instance.run(self.instance, status, rc = isolated_manager_instance.run(self.instance,
private_data_dir, private_data_dir,
params.get('playbook'), params.get('playbook'),
params.get('module'), params.get('module'),
module_args, module_args,
event_data_key=self.event_data_key, event_data_key=self.event_data_key,
ident=str(self.instance.pk)) ident=str(self.instance.pk))
self.event_ct = len(manager_instance.handled_events) self.event_ct = len(isolated_manager_instance.handled_events)
else: else:
res = ansible_runner.interface.run(**params) res = ansible_runner.interface.run(**params)
status = res.status status = res.status
@@ -1239,7 +1240,7 @@ class BaseTask(object):
**extra_update_fields) **extra_update_fields)
try: try:
self.final_run_hook(self.instance, status, private_data_dir, fact_modification_times) self.final_run_hook(self.instance, status, private_data_dir, fact_modification_times, isolated_manager_instance=isolated_manager_instance)
except Exception: except Exception:
logger.exception('{} Final run hook errored.'.format(self.instance.log_format)) logger.exception('{} Final run hook errored.'.format(self.instance.log_format))
@@ -1568,7 +1569,7 @@ class RunJob(BaseTask):
('project_update', local_project_sync.name, local_project_sync.id))) ('project_update', local_project_sync.name, local_project_sync.id)))
raise raise
def final_run_hook(self, job, status, private_data_dir, fact_modification_times): def final_run_hook(self, job, status, private_data_dir, fact_modification_times, isolated_manager_instance=None):
super(RunJob, self).final_run_hook(job, status, private_data_dir, fact_modification_times) super(RunJob, self).final_run_hook(job, status, private_data_dir, fact_modification_times)
if not private_data_dir: if not private_data_dir:
# If there's no private data dir, that means we didn't get into the # If there's no private data dir, that means we didn't get into the
@@ -1580,7 +1581,8 @@ class RunJob(BaseTask):
os.path.join(private_data_dir, 'artifacts', str(job.id), 'fact_cache'), os.path.join(private_data_dir, 'artifacts', str(job.id), 'fact_cache'),
fact_modification_times, fact_modification_times,
) )
if isolated_manager_instance:
isolated_manager_instance.cleanup()
try: try:
inventory = job.inventory inventory = job.inventory
except Inventory.DoesNotExist: except Inventory.DoesNotExist:
@@ -2340,6 +2342,11 @@ class RunAdHocCommand(BaseTask):
''' '''
return getattr(settings, 'AWX_PROOT_ENABLED', False) return getattr(settings, 'AWX_PROOT_ENABLED', False)
def final_run_hook(self, adhoc_job, status, private_data_dir, fact_modification_times, isolated_manager_instance=None):
super(RunAdHocCommand, self).final_run_hook(adhoc_job, status, private_data_dir, fact_modification_times)
if isolated_manager_instance:
isolated_manager_instance.cleanup()
@task() @task()
class RunSystemJob(BaseTask): class RunSystemJob(BaseTask):

View File

@@ -18,20 +18,10 @@
src: "{{src}}/artifacts/" src: "{{src}}/artifacts/"
dest: "{{src}}/artifacts/" dest: "{{src}}/artifacts/"
mode: pull mode: pull
delete: yes
recursive: yes recursive: yes
- name: Fail if previous check determined that process is not alive. - name: Fail if previous check determined that process is not alive.
fail: fail:
msg: "isolated task is still running" msg: "isolated task is still running"
when: "is_alive.rc == 0" when: "is_alive.rc == 0"
- stat: path="{{src}}/facts/"
register: fact_cache
- name: Copy gathered facts from the isolated host.
synchronize:
src: "{{src}}/facts/"
dest: "{{src}}/facts/"
delete: yes # delete fact cache records that go missing via clear_facts
mode: pull
when: fact_cache.stat.exists