AAP-32143 Make the JT name uniqueness enforced at the database level (#15956)

* Make the JT name uniqueness enforced at the database level

* Forgot demo project fixture

* New approach, done by adding a new field

* Update for linters and failures

* Fix logical error in migration test

* Revert some test changes based on review comment

* Do not rename first template, add test

* Avoid name-too-long rename errors

* Insert migration into place

* Move existing files with git

* Bump migrations of existing

* Update migration test

* Awkward bump

* Fix migration file link

* update test reference again
This commit is contained in:
Alan Rominger 2025-05-14 23:30:23 -04:00 committed by GitHub
parent 20a512bdd9
commit 01eb162378
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 297 additions and 39 deletions

View File

@ -7,7 +7,7 @@ import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0203_delete_token_cleanup_job'),
('main', '0198_alter_inventorysource_source_and_more'),
]
operations = [

View File

@ -0,0 +1,50 @@
# Generated by Django 4.2.20 on 2025-04-22 15:54
import logging
from django.db import migrations, models
from awx.main.migrations._db_constraints import _rename_duplicates
logger = logging.getLogger(__name__)
def rename_jts(apps, schema_editor):
cls = apps.get_model('main', 'JobTemplate')
_rename_duplicates(cls)
def rename_projects(apps, schema_editor):
cls = apps.get_model('main', 'Project')
_rename_duplicates(cls)
def change_inventory_source_org_unique(apps, schema_editor):
cls = apps.get_model('main', 'InventorySource')
r = cls.objects.update(org_unique=False)
logger.info(f'Set database constraint rule for {r} inventory source objects')
class Migration(migrations.Migration):
dependencies = [
('main', '0199_inventorygroupvariableswithhistory_and_more'),
]
operations = [
migrations.RunPython(rename_jts, migrations.RunPython.noop),
migrations.RunPython(rename_projects, migrations.RunPython.noop),
migrations.AddField(
model_name='unifiedjobtemplate',
name='org_unique',
field=models.BooleanField(blank=True, default=True, editable=False, help_text='Used internally to selectively enforce database constraint on name'),
),
migrations.RunPython(change_inventory_source_org_unique, migrations.RunPython.noop),
migrations.AddConstraint(
model_name='unifiedjobtemplate',
constraint=models.UniqueConstraint(
condition=models.Q(('org_unique', True)), fields=('polymorphic_ctype', 'name', 'organization'), name='ujt_hard_name_constraint'
),
),
]

View File

@ -5,7 +5,7 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0198_alter_inventorysource_source_and_more'),
('main', '0200_template_name_constraint'),
]
operations = [

View File

@ -5,7 +5,7 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0199_delete_profile'),
('main', '0201_delete_profile'),
]
operations = [

View File

@ -6,7 +6,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0200_remove_sso_app_content'),
('main', '0202_remove_sso_app_content'),
]
operations = [

View File

@ -6,7 +6,7 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0201_alter_inventorysource_source_and_more'),
('main', '0203_alter_inventorysource_source_and_more'),
]
operations = [

View File

@ -8,7 +8,7 @@ from awx.main.migrations._create_system_jobs import delete_clear_tokens_sjt
class Migration(migrations.Migration):
dependencies = [
('main', '0202_alter_oauth2application_unique_together_and_more'),
('main', '0204_alter_oauth2application_unique_together_and_more'),
]
operations = [

View File

@ -0,0 +1,25 @@
import logging
from django.db.models import Count
logger = logging.getLogger(__name__)
def _rename_duplicates(cls):
field = cls._meta.get_field('name')
max_len = field.max_length
for organization_id in cls.objects.order_by().values_list('organization_id', flat=True).distinct():
duplicate_data = cls.objects.values('name').filter(organization_id=organization_id).annotate(count=Count('name')).order_by().filter(count__gt=1)
for data in duplicate_data:
name = data['name']
for idx, ujt in enumerate(cls.objects.filter(name=name, organization_id=organization_id).order_by('created')):
if idx > 0:
suffix = f'_dup{idx}'
max_chars = max_len - len(suffix)
if len(ujt.name) >= max_chars:
ujt.name = ujt.name[:max_chars] + suffix
else:
ujt.name = ujt.name + suffix
logger.info(f'Renaming duplicate {cls._meta.model_name} to `{ujt.name}` because of duplicate name entry')
ujt.save(update_fields=['name'])

View File

@ -1120,8 +1120,10 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
def save(self, *args, **kwargs):
# if this is a new object, inherit organization from its inventory
if not self.pk and self.inventory and self.inventory.organization_id and not self.organization_id:
self.organization_id = self.inventory.organization_id
if not self.pk:
self.org_unique = False # needed to exclude from unique (name, organization) constraint
if self.inventory and self.inventory.organization_id and not self.organization_id:
self.organization_id = self.inventory.organization_id
# If update_fields has been specified, add our field names to it,
# if it hasn't been specified, then we're just doing a normal save.

View File

@ -358,26 +358,6 @@ class JobTemplate(
update_fields.append('organization_id')
return super(JobTemplate, self).save(*args, **kwargs)
def validate_unique(self, exclude=None):
"""Custom over-ride for JT specifically
because organization is inferred from project after full_clean is finished
thus the organization field is not yet set when validation happens
"""
errors = []
for ut in JobTemplate.SOFT_UNIQUE_TOGETHER:
kwargs = {'name': self.name}
if self.project:
kwargs['organization'] = self.project.organization_id
else:
kwargs['organization'] = None
qs = JobTemplate.objects.filter(**kwargs)
if self.pk:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.append('%s with this (%s) combination already exists.' % (JobTemplate.__name__, ', '.join(set(ut) - {'polymorphic_ctype'})))
if errors:
raise ValidationError(errors)
def create_unified_job(self, **kwargs):
prevent_slicing = kwargs.pop('_prevent_slicing', False)
slice_ct = self.get_effective_slice_ct(kwargs)
@ -404,6 +384,26 @@ class JobTemplate(
WorkflowJobNode.objects.create(**create_kwargs)
return job
def validate_unique(self, exclude=None):
"""Custom over-ride for JT specifically
because organization is inferred from project after full_clean is finished
thus the organization field is not yet set when validation happens
"""
errors = []
for ut in JobTemplate.SOFT_UNIQUE_TOGETHER:
kwargs = {'name': self.name}
if self.project:
kwargs['organization'] = self.project.organization_id
else:
kwargs['organization'] = None
qs = JobTemplate.objects.filter(**kwargs)
if self.pk:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.append('%s with this (%s) combination already exists.' % (JobTemplate.__name__, ', '.join(set(ut) - {'polymorphic_ctype'})))
if errors:
raise ValidationError(errors)
def get_absolute_url(self, request=None):
return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request)

View File

@ -18,6 +18,7 @@ from collections import OrderedDict
# Django
from django.conf import settings
from django.db import models, connection, transaction
from django.db.models.constraints import UniqueConstraint
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import gettext_lazy as _
from django.utils.timezone import now
@ -111,7 +112,10 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
ordering = ('name',)
# unique_together here is intentionally commented out. Please make sure sub-classes of this model
# contain at least this uniqueness restriction: SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')]
# unique_together = [('polymorphic_ctype', 'name', 'organization')]
# Unique name constraint - note that inventory source model is excluded from this constraint entirely
constraints = [
UniqueConstraint(fields=['polymorphic_ctype', 'name', 'organization'], condition=models.Q(org_unique=True), name='ujt_hard_name_constraint')
]
old_pk = models.PositiveIntegerField(
null=True,
@ -180,6 +184,9 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
)
labels = models.ManyToManyField("Label", blank=True, related_name='%(class)s_labels')
instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='UnifiedJobTemplateInstanceGroupMembership')
org_unique = models.BooleanField(
blank=True, default=True, editable=False, help_text=_('Used internally to selectively enforce database constraint on name')
)
def get_absolute_url(self, request=None):
real_instance = self.get_real_instance()

View File

@ -0,0 +1,56 @@
import pytest
from awx.main.migrations._db_constraints import _rename_duplicates
from awx.main.models import JobTemplate
@pytest.mark.django_db
def test_rename_job_template_duplicates(organization, project):
ids = []
for i in range(5):
jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project)
ids.append(jt.id) # saved in order of creation
# Hack to first allow duplicate names of JT to test migration
JobTemplate.objects.filter(id__in=ids).update(org_unique=False)
# Set all JTs to the same name
JobTemplate.objects.filter(id__in=ids).update(name='same_name_for_test')
_rename_duplicates(JobTemplate)
first_jt = JobTemplate.objects.get(id=ids[0])
assert first_jt.name == 'same_name_for_test'
for i, pk in enumerate(ids):
if i == 0:
continue
jt = JobTemplate.objects.get(id=pk)
# Name should be set based on creation order
assert jt.name == f'same_name_for_test_dup{i}'
@pytest.mark.django_db
def test_rename_job_template_name_too_long(organization, project):
ids = []
for i in range(3):
jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project)
ids.append(jt.id) # saved in order of creation
JobTemplate.objects.filter(id__in=ids).update(org_unique=False)
chars = 512
# Set all JTs to the same reaaaaaaly long name
JobTemplate.objects.filter(id__in=ids).update(name='A' * chars)
_rename_duplicates(JobTemplate)
first_jt = JobTemplate.objects.get(id=ids[0])
assert first_jt.name == 'A' * chars
for i, pk in enumerate(ids):
if i == 0:
continue
jt = JobTemplate.objects.get(id=pk)
assert jt.name.endswith(f'dup{i}')
assert len(jt.name) <= 512

View File

@ -393,7 +393,7 @@ def test_dependency_isolation(organization):
this should keep dependencies isolated"""
with mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.update'):
updating_projects = [
Project.objects.create(name='iso-proj', organization=organization, scm_url='https://foo.invalid', scm_type='git', scm_update_on_launch=True)
Project.objects.create(name=f'iso-proj{i}', organization=organization, scm_url='https://foo.invalid', scm_type='git', scm_update_on_launch=True)
for i in range(2)
]

View File

@ -43,7 +43,7 @@ def test_job_template_copy(
c.save()
assert get(reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), alice, expect=200).data['can_copy'] is True
jt_copy_pk_alice = post(
reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), {'name': 'new jt name'}, alice, expect=201
reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), {'name': 'new jt name alice'}, alice, expect=201
).data['id']
jt_copy_admin = type(job_template_with_survey_passwords).objects.get(pk=jt_copy_pk)
@ -53,7 +53,7 @@ def test_job_template_copy(
assert jt_copy_alice.created_by == alice
for jt_copy in (jt_copy_admin, jt_copy_alice):
assert jt_copy.name == 'new jt name'
assert jt_copy.name.startswith('new jt name')
assert jt_copy.project == project
assert jt_copy.inventory == inventory
assert jt_copy.playbook == job_template_with_survey_passwords.playbook

View File

@ -106,3 +106,37 @@ class TestMigrationSmoke:
)
DABPermission = new_state.apps.get_model('dab_rbac', 'DABPermission')
assert not DABPermission.objects.filter(codename='view_executionenvironment').exists()
# Test create a Project with a duplicate name
Organization = new_state.apps.get_model('main', 'Organization')
Project = new_state.apps.get_model('main', 'Project')
org = Organization.objects.create(name='duplicate-obj-organization', created=now(), modified=now())
proj_ids = []
for i in range(3):
proj = Project.objects.create(name='duplicate-project-name', organization=org, created=now(), modified=now())
proj_ids.append(proj.id)
# The uniqueness rules will not apply to InventorySource
Inventory = new_state.apps.get_model('main', 'Inventory')
InventorySource = new_state.apps.get_model('main', 'InventorySource')
inv = Inventory.objects.create(name='migration-test-inv', organization=org, created=now(), modified=now())
InventorySource.objects.create(name='migration-test-src', source='file', inventory=inv, organization=org, created=now(), modified=now())
new_state = migrator.apply_tested_migration(
('main', '0200_template_name_constraint'),
)
for i, proj_id in enumerate(proj_ids):
proj = Project.objects.get(id=proj_id)
if i == 0:
assert proj.name == 'duplicate-project-name'
else:
assert proj.name != 'duplicate-project-name'
assert proj.name.startswith('duplicate-project-name')
# The inventory source had this field set to avoid the constrains
InventorySource = new_state.apps.get_model('main', 'InventorySource')
inv_src = InventorySource.objects.get(name='migration-test-src')
assert inv_src.org_unique is False
Project = new_state.apps.get_model('main', 'Project')
for proj in Project.objects.all():
assert proj.org_unique is True

View File

@ -436,21 +436,22 @@ def test_project_list_ordering_by_name(get, order_by, expected_names, organizati
@pytest.mark.parametrize('order_by', ('name', '-name'))
@pytest.mark.django_db
def test_project_list_ordering_with_duplicate_names(get, order_by, organization_factory):
def test_project_list_ordering_with_duplicate_names(get, order_by, admin):
# why? because all the '1' mean that all the names are the same, you can't sort based on that,
# meaning you have to fall back on the default sort order, which in this case, is ID
'ensure sorted order of project list is maintained correctly when the project names the same'
objects = organization_factory(
'org1',
projects=['1', '1', '1', '1', '1'],
superusers=['admin'],
)
from awx.main.models import Organization
projects = []
for i in range(5):
projects.append(Project.objects.create(name='1', organization=Organization.objects.create(name=f'org{i}')))
project_ids = {}
for x in range(3):
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
results = get(reverse('api:project_list'), user=admin, QUERY_STRING='order_by=%s' % order_by).data['results']
project_ids[x] = [proj['id'] for proj in results]
assert project_ids[0] == project_ids[1] == project_ids[2]
assert project_ids[0] == sorted(project_ids[0])
assert set(project_ids[0]) == set([proj.id for proj in projects])
@pytest.mark.django_db

View File

@ -0,0 +1,77 @@
import multiprocessing
import json
import pytest
import requests
from requests.auth import HTTPBasicAuth
from django.db import connection
from awx.main.models import User, JobTemplate
def create_in_subprocess(project_id, ready_event, continue_event, admin_auth):
connection.connect()
print('setting ready event')
ready_event.set()
print('waiting for continue event')
continue_event.wait()
if JobTemplate.objects.filter(name='test_jt_duplicate_name').exists():
for jt in JobTemplate.objects.filter(name='test_jt_duplicate_name'):
jt.delete()
assert JobTemplate.objects.filter(name='test_jt_duplicate_name').count() == 0
jt_data = {'name': 'test_jt_duplicate_name', 'project': project_id, 'playbook': 'hello_world.yml', 'ask_inventory_on_launch': True}
response = requests.post('http://localhost:8013/api/v2/job_templates/', json=jt_data, auth=admin_auth)
# should either have a conflict or create
assert response.status_code in (400, 201)
print(f'Subprocess got {response.status_code}')
if response.status_code == 400:
print(json.dumps(response.json(), indent=2))
return response.status_code
@pytest.fixture
def admin_for_test():
user, created = User.objects.get_or_create(username='admin_for_test', defaults={'is_superuser': True})
if created:
user.set_password('for_test_123!')
user.save()
print(f'Created user {user.username}')
return user
@pytest.fixture
def admin_auth(admin_for_test):
return HTTPBasicAuth(admin_for_test.username, 'for_test_123!')
def test_jt_duplicate_name(admin_auth, demo_proj):
N_processes = 5
ready_events = [multiprocessing.Event() for _ in range(N_processes)]
continue_event = multiprocessing.Event()
processes = []
for i in range(N_processes):
p = multiprocessing.Process(target=create_in_subprocess, args=(demo_proj.id, ready_events[i], continue_event, admin_auth))
processes.append(p)
p.start()
# Assure both processes are connected and have loaded their host list
for e in ready_events:
print('waiting on subprocess ready event')
e.wait()
# Begin the bulk_update queries
print('setting the continue event for the workers')
continue_event.set()
# if a Deadloack happens it will probably be surfaced by result here
print('waiting on the workers to finish the creation')
for p in processes:
p.join()
assert JobTemplate.objects.filter(name='test_jt_duplicate_name').count() == 1

View File

@ -114,6 +114,12 @@ def demo_inv(default_org):
return inventory
@pytest.fixture(scope='session')
def demo_proj(default_org):
proj, _ = Project.objects.get_or_create(name='Demo Project', defaults={'organization': default_org})
return proj
@pytest.fixture
def podman_image_generator():
"""