From 3ea642f212f4a88438e0efba020185a60454c885 Mon Sep 17 00:00:00 2001 From: Ryan Petrello Date: Tue, 12 May 2020 09:38:59 -0400 Subject: [PATCH] properly handle host summary bulk updates if hosts go missing --- awx/main/models/events.py | 18 ++++---- .../tests/functional/models/test_events.py | 44 ++++++++++++++++++- 2 files changed, 51 insertions(+), 11 deletions(-) diff --git a/awx/main/models/events.py b/awx/main/models/events.py index 80d530a2c3..57de629a8f 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -489,25 +489,23 @@ class JobEvent(BasePlaybookEvent): return job = self.job - from awx.main.models.jobs import JobHostSummary # circular import + from awx.main.models import Host, JobHostSummary # circular import + existing = Host.objects.filter(id__in=self.host_map.values()).values_list('id', flat=True) + summaries = dict() for host in hostnames: + host_id = self.host_map.get(host, None) + if host_id not in existing: + host_id = None host_stats = {} for stat in ('changed', 'dark', 'failures', 'ignored', 'ok', 'processed', 'rescued', 'skipped'): try: host_stats[stat] = self.event_data.get(stat, {}).get(host, 0) except AttributeError: # in case event_data[stat] isn't a dict. pass - host_id = self.host_map.get(host, None) - summaries.setdefault( - (host_id, host), - JobHostSummary(created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host) + summaries[(host_id, host)] = JobHostSummary( + created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host, **host_stats ) - host_summary = summaries[(host_id, host)] - - for stat, value in host_stats.items(): - if getattr(host_summary, stat) != value: - setattr(host_summary, stat, value) JobHostSummary.objects.bulk_create(summaries.values()) diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py index 0d2530b968..6dd0cac06a 100644 --- a/awx/main/tests/functional/models/test_events.py +++ b/awx/main/tests/functional/models/test_events.py @@ -67,7 +67,7 @@ def test_parent_failed(emit, event): @pytest.mark.django_db def test_host_summary_generation(): - hostnames = [f'Host {i}' for i in range(5000)] + hostnames = [f'Host {i}' for i in range(500)] inv = Inventory() inv.save() Host.objects.bulk_create([ @@ -107,3 +107,45 @@ def test_host_summary_generation(): assert s.processed == 0 assert s.rescued == 0 assert s.skipped == 0 + + +@pytest.mark.django_db +def test_host_summary_generation_with_deleted_hosts(): + hostnames = [f'Host {i}' for i in range(10)] + inv = Inventory() + inv.save() + Host.objects.bulk_create([ + Host(created=now(), modified=now(), name=h, inventory_id=inv.id) + for h in hostnames + ]) + j = Job(inventory=inv) + j.save() + host_map = dict((host.name, host.id) for host in inv.hosts.all()) + + # delete half of the hosts during the playbook run + for h in inv.hosts.all()[:5]: + h.delete() + + JobEvent.create_from_data( + job_id=j.pk, + parent_uuid='abc123', + event='playbook_on_stats', + event_data={ + 'ok': dict((hostname, len(hostname)) for hostname in hostnames), + 'changed': {}, + 'dark': {}, + 'failures': {}, + 'ignored': {}, + 'processed': {}, + 'rescued': {}, + 'skipped': {}, + }, + host_map=host_map + ).save() + + + ids = sorted([s.host_id or -1 for s in j.job_host_summaries.order_by('id').all()]) + names = sorted([s.host_name for s in j.job_host_summaries.all()]) + assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10] + assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', + 'Host 6', 'Host 7', 'Host 8', 'Host 9']