update job cleanup tests for sqlite-based execution

This commit is contained in:
Jim Ladd 2021-05-13 14:28:24 -07:00
parent 84af610a1f
commit e371de38ed
2 changed files with 72 additions and 4 deletions

View File

@ -16,6 +16,65 @@ def app_post_migration(sender, app_config, **kwargs):
if 'result_stdout_text' not in cols:
cur.execute('ALTER TABLE main_unifiedjob ADD COLUMN result_stdout_text TEXT')
# we also need to make sure that the `_unpartitioned_<event>` tables are present.
# these tables represent old job event tables that were renamed / preserved during a
# migration which introduces partitioned event tables
# https://github.com/ansible/awx/issues/9039
for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
table_entries = cur.execute(f'SELECT count(*) from sqlite_master WHERE tbl_name="_unpartitioned_{tblname}";').fetchone()[0]
if table_entries > 0:
continue
if tblname == 'main_adhoccommandevent':
unique_columns = """host_name character varying(1024) NOT NULL,
event character varying(100) NOT NULL,
failed boolean NOT NULL,
changed boolean NOT NULL,
host_id integer,
ad_hoc_command_id integer NOT NULL
"""
elif tblname == 'main_inventoryupdateevent':
unique_columns = "inventory_update_id integer NOT NULL"
elif tblname == 'main_jobevent':
unique_columns = """event character varying(100) NOT NULL,
failed boolean NOT NULL,
changed boolean NOT NULL,
host_name character varying(1024) NOT NULL,
play character varying(1024) NOT NULL,
role character varying(1024) NOT NULL,
task character varying(1024) NOT NULL,
host_id integer,
job_id integer NOT NULL,
playbook character varying(1024) NOT NULL
"""
elif tblname == 'main_projectupdateevent':
unique_columns = """event character varying(100) NOT NULL,
failed boolean NOT NULL,
changed boolean NOT NULL,
playbook character varying(1024) NOT NULL,
play character varying(1024) NOT NULL,
role character varying(1024) NOT NULL,
task character varying(1024) NOT NULL,
project_update_id integer NOT NULL
"""
elif tblname == 'main_systemjobevent':
unique_columns = "system_job_id integer NOT NULL"
cur.execute(
f"""CREATE TABLE _unpartitioned_{tblname} (
id bigint NOT NULL,
created timestamp with time zone NOT NULL,
modified timestamp with time zone NOT NULL,
event_data text NOT NULL,
counter integer NOT NULL,
end_line integer NOT NULL,
start_line integer NOT NULL,
stdout text NOT NULL,
uuid character varying(1024) NOT NULL,
verbosity integer NOT NULL,
{unique_columns});
"""
)
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
post_migrate.connect(app_post_migration, sender=apps.get_app_config('main'))

View File

@ -2,10 +2,12 @@ import pytest
from datetime import datetime, timedelta
from pytz import timezone
from collections import OrderedDict
from unittest import mock
from django.db.models.deletion import Collector, SET_NULL, CASCADE
from django.core.management import call_command
from awx.main.management.commands import cleanup_jobs
from awx.main.utils.deletion import AWXCollector
from awx.main.models import JobTemplate, User, Job, JobEvent, Notification, WorkflowJobNode, JobHostSummary
@ -32,19 +34,20 @@ def setup_environment(inventory, project, machine_credential, host, notification
notification.save()
for i in range(3):
# create jobs with current time
job1 = jt.create_job()
job1.created = datetime.now(tz=timezone('UTC'))
job1.save()
# create jobs with current time
JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
# sqlite does not support partitioning so we cannot test partition-based jobevent cleanup
# JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
new_jobs.append(job1)
job2 = jt.create_job()
# create jobs 10 days ago
job2 = jt.create_job()
job2.created = datetime.now(tz=timezone('UTC')) - timedelta(days=days)
job2.save()
job2.dependent_jobs.add(job1)
JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
# JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
old_jobs.append(job2)
jt.last_job = job2
@ -62,7 +65,13 @@ def setup_environment(inventory, project, machine_credential, host, notification
return (old_jobs, new_jobs, days_str)
# sqlite does not support table partitioning so we mock out the methods responsible for pruning
# job event partitions during the job cleanup task
# https://github.com/ansible/awx/issues/9039
@pytest.mark.django_db
@mock.patch.object(cleanup_jobs.DeleteMeta, 'identify_excluded_partitions', mock.MagicMock())
@mock.patch.object(cleanup_jobs.DeleteMeta, 'find_partitions_to_drop', mock.MagicMock())
@mock.patch.object(cleanup_jobs.DeleteMeta, 'drop_partitions', mock.MagicMock())
def test_cleanup_jobs(setup_environment):
(old_jobs, new_jobs, days_str) = setup_environment