Merge pull request #6176 from Ladas/send_also_workflows_as_part_of_unified_jobs

Send also workflows as part of unified jobs and send all changes to jobs

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
This commit is contained in:
softwarefactory-project-zuul[bot] 2020-04-13 18:41:36 +00:00 committed by GitHub
commit c414fd68a0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 79 additions and 3 deletions

View File

@ -281,9 +281,9 @@ def copy_tables(since, full_path):
main_unifiedjob.instance_group_id
FROM main_unifiedjob
JOIN django_content_type ON main_unifiedjob.polymorphic_ctype_id = django_content_type.id
JOIN main_organization ON main_organization.id = main_unifiedjob.organization_id
WHERE main_unifiedjob.created > {}
AND main_unifiedjob.launch_type != 'sync'
LEFT JOIN main_organization ON main_organization.id = main_unifiedjob.organization_id
WHERE (main_unifiedjob.created > {0} OR main_unifiedjob.finished > {0})
AND main_unifiedjob.launch_type != 'sync'
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
_copy_table(table='unified_jobs', query=unified_job_query, path=full_path)

View File

@ -0,0 +1,76 @@
import pytest
import tempfile
import os
import shutil
import csv
from django.utils.timezone import now
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
from awx.main.analytics import collectors
from awx.main.models import (
ProjectUpdate,
InventorySource,
)
@pytest.fixture
def sqlite_copy_expert(request):
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
path = tempfile.mkdtemp(prefix='copied_tables')
def write_stdout(self, sql, fd):
# Would be cool if we instead properly disected the SQL query and verified
# it that way. But instead, we just take the nieve approach here.
assert sql.startswith('COPY (')
assert sql.endswith(') TO STDOUT WITH CSV HEADER')
sql = sql.replace('COPY (', '')
sql = sql.replace(') TO STDOUT WITH CSV HEADER', '')
# Remove JSON style queries
# TODO: could replace JSON style queries with sqlite kind of equivalents
sql_new = []
for line in sql.split('\n'):
if line.find('main_jobevent.event_data::') == -1:
sql_new.append(line)
sql = '\n'.join(sql_new)
self.execute(sql)
results = self.fetchall()
headers = [i[0] for i in self.description]
csv_handle = csv.writer(fd, delimiter=',', quoting=csv.QUOTE_ALL, escapechar='\\', lineterminator='\n')
csv_handle.writerow(headers)
csv_handle.writerows(results)
setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout)
request.addfinalizer(lambda: shutil.rmtree(path))
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, 'copy_expert'))
return path
@pytest.mark.django_db
def test_copy_tables_unified_job_query(sqlite_copy_expert, project, inventory, job_template):
'''
Ensure that various unified job types are in the output of the query.
'''
time_start = now()
inv_src = InventorySource.objects.create(name="inventory_update1", inventory=inventory, source='gce')
project_update_name = ProjectUpdate.objects.create(project=project, name="project_update1").name
inventory_update_name = inv_src.create_unified_job().name
job_name = job_template.create_unified_job().name
with tempfile.TemporaryDirectory() as tmpdir:
collectors.copy_tables(time_start, tmpdir)
with open(os.path.join(tmpdir, 'unified_jobs_table.csv')) as f:
lines = ''.join([l for l in f])
assert project_update_name in lines
assert inventory_update_name in lines
assert job_name in lines