Merge pull request #4862 from AlanCoding/poly_morphin_time

Unified JT and Unified Job queryset refactor
This commit is contained in:
Alan Rominger 2017-01-25 13:05:48 -05:00 committed by GitHub
commit 4feb7fcb8d
5 changed files with 114 additions and 73 deletions

View File

@ -1826,25 +1826,22 @@ class JobEventAccess(BaseAccess):
class UnifiedJobTemplateAccess(BaseAccess):
'''
I can see a unified job template whenever I can see the same project,
inventory source or job template. Unified job templates do not include
projects without SCM configured or inventory sources without a cloud
source.
inventory source, WFJT, or job template. Unified job templates do not include
inventory sources without a cloud source.
'''
model = UnifiedJobTemplate
def get_queryset(self):
qs = self.model.objects.all()
project_qs = self.user.get_queryset(Project).filter(scm_type__in=[s[0] for s in Project.SCM_TYPE_CHOICES])
inventory_source_qs = self.user.get_queryset(InventorySource).filter(source__in=CLOUD_INVENTORY_SOURCES)
job_template_qs = self.user.get_queryset(JobTemplate)
system_job_template_qs = self.user.get_queryset(SystemJobTemplate)
workflow_job_template_qs = self.user.get_queryset(WorkflowJobTemplate)
qs = qs.filter(Q(Project___in=project_qs) |
Q(InventorySource___in=inventory_source_qs) |
Q(JobTemplate___in=job_template_qs) |
Q(systemjobtemplate__in=system_job_template_qs) |
Q(workflowjobtemplate__in=workflow_job_template_qs))
if self.user.is_superuser or self.user.is_system_auditor:
qs = self.model.objects.all()
else:
qs = self.model.objects.filter(
Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role')) |
Q(inventorysource__inventory__id__in=Inventory._accessible_pk_qs(
Inventory, self.user, 'read_role')))
qs = qs.exclude(inventorysource__source="")
qs = qs.select_related(
'created_by',
'modified_by',
@ -1883,25 +1880,23 @@ class UnifiedJobAccess(BaseAccess):
model = UnifiedJob
def get_queryset(self):
qs = self.model.objects.all()
project_update_qs = self.user.get_queryset(ProjectUpdate)
inventory_update_qs = self.user.get_queryset(InventoryUpdate).filter(source__in=CLOUD_INVENTORY_SOURCES)
job_qs = self.user.get_queryset(Job)
ad_hoc_command_qs = self.user.get_queryset(AdHocCommand)
system_job_qs = self.user.get_queryset(SystemJob)
workflow_job_qs = self.user.get_queryset(WorkflowJob)
qs = qs.filter(Q(ProjectUpdate___in=project_update_qs) |
Q(InventoryUpdate___in=inventory_update_qs) |
Q(Job___in=job_qs) |
Q(AdHocCommand___in=ad_hoc_command_qs) |
Q(SystemJob___in=system_job_qs) |
Q(WorkflowJob___in=workflow_job_qs))
qs = qs.select_related(
if self.user.is_superuser or self.user.is_system_auditor:
qs = self.model.objects.all()
else:
inv_pk_qs = Inventory._accessible_pk_qs(Inventory, self.user, 'read_role')
org_auditor_qs = Organization.objects.filter(
Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
qs = self.model.objects.filter(
Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')) |
Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs) |
Q(adhoccommand__inventory__id__in=inv_pk_qs) |
Q(job__inventory__organization__in=org_auditor_qs) |
Q(job__project__organization__in=org_auditor_qs)
)
qs = qs.prefetch_related(
'created_by',
'modified_by',
'unified_job_node__workflow_job',
)
qs = qs.prefetch_related(
'unified_job_template',
)

View File

@ -38,7 +38,7 @@ class ResourceMixin(models.Model):
return ResourceMixin._accessible_objects(cls, accessor, role_field)
@staticmethod
def _accessible_objects(cls, accessor, role_field):
def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
if type(accessor) == User:
ancestor_roles = accessor.roles.all()
elif type(accessor) == Role:
@ -47,14 +47,22 @@ class ResourceMixin(models.Model):
accessor_type = ContentType.objects.get_for_model(accessor)
ancestor_roles = Role.objects.filter(content_type__pk=accessor_type.id,
object_id=accessor.id)
qs = cls.objects.filter(pk__in =
RoleAncestorEntry.objects.filter(
ancestor__in=ancestor_roles,
content_type_id = ContentType.objects.get_for_model(cls).id,
role_field = role_field
).values_list('object_id').distinct()
)
return qs
if content_types is None:
ct_kwarg = dict(content_type_id = ContentType.objects.get_for_model(cls).id)
else:
ct_kwarg = dict(content_type_id__in = content_types)
return RoleAncestorEntry.objects.filter(
ancestor__in = ancestor_roles,
role_field = role_field,
**ct_kwarg
).values_list('object_id').distinct()
@staticmethod
def _accessible_objects(cls, accessor, role_field):
return cls.objects.filter(pk__in = ResourceMixin._accessible_pk_qs(cls, accessor, role_field))
def get_permissions(self, accessor):

View File

@ -20,6 +20,7 @@ from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now
from django.utils.encoding import smart_text
from django.apps import apps
from django.contrib.contenttypes.models import ContentType
# Django-Polymorphic
from polymorphic import PolymorphicModel
@ -30,6 +31,7 @@ from djcelery.models import TaskMeta
# AWX
from awx.main.models.base import * # noqa
from awx.main.models.schedules import Schedule
from awx.main.models.mixins import ResourceMixin
from awx.main.utils import (
decrypt_field, _inventory_updates,
copy_model_by_class, copy_m2m_relationships
@ -166,6 +168,20 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
else:
return super(UnifiedJobTemplate, self).unique_error_message(model_class, unique_check)
@classmethod
def accessible_pk_qs(cls, accessor, role_field):
'''
A re-implementation of accessible pk queryset for the "normal" unified JTs.
Does not return inventory sources or system JTs, these should
be handled inside of get_queryset where it is utilized.
'''
ujt_names = [c.__name__.lower() for c in cls.__subclasses__()
if c.__name__.lower() not in ['inventorysource', 'systemjobtemplate']]
subclass_content_types = list(ContentType.objects.filter(
model__in=ujt_names).values_list('id', flat=True))
return ResourceMixin._accessible_pk_qs(cls, accessor, role_field, content_types=subclass_content_types)
def _perform_unique_checks(self, unique_checks):
# Handle the list of unique fields returned above. Replace with an
# appropriate error message for the remaining field(s) in the unique

View File

@ -1,15 +1,15 @@
resource medium
organizations 500
users 5000
teams 500
projects 1000
job-templates 2000
credentials 2000
inventories 2000
inventory-groups 500
inventory-hosts 2500
wfjts 100
nodes 1000
labels 1000
jobs 1000
job-events 1000
resource medium Jan2017 jobs1k jobs10k jobs50k jobs100k
organizations 500 1 1 1 1 1
users 5000 3 3 3 3 3
teams 500 2 2 2 2 2
projects 1000 30 30 30 30 30
job_templates 2000 127 127 127 127 127
credentials 2000 50 50 50 50 50
inventories 2000 6 6 6 6 6
inventory_groups 500 15 15 15 15 15
inventory_hosts 2500 15 15 15 15 15
wfjts 100 0 0 0 0 0
nodes 1000 0 0 0 0 0
labels 1000 0 0 0 0 0
jobs 1000 157208 1000 10000 50000 100000
job_events 1000 3370942 20000 200000 1000000 2000000
1 resource medium Jan2017 jobs1k jobs10k jobs50k jobs100k
2 organizations 500 1 1 1 1 1
3 users 5000 3 3 3 3 3
4 teams 500 2 2 2 2 2
5 projects 1000 30 30 30 30 30
6 job-templates job_templates 2000 127 127 127 127 127
7 credentials 2000 50 50 50 50 50
8 inventories 2000 6 6 6 6 6
9 inventory-groups inventory_groups 500 15 15 15 15 15
10 inventory-hosts inventory_hosts 2500 15 15 15 15 15
11 wfjts 100 0 0 0 0 0
12 nodes 1000 0 0 0 0 0
13 labels 1000 0 0 0 0 0
14 jobs 1000 157208 1000 10000 50000 100000
15 job-events job_events 1000 3370942 20000 200000 1000000 2000000

View File

@ -7,6 +7,8 @@ import sys
# Python
from collections import defaultdict
from optparse import make_option, OptionParser
from datetime import datetime
import logging
# Django
@ -84,6 +86,7 @@ options = vars(options)
if options['preset']:
print ' Using preset data numbers set ' + str(options['preset'])
# Read the numbers of resources from presets file, if provided
presets_filename = os.path.abspath(os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'presets.tsv'))
@ -182,6 +185,9 @@ def mock_save(self, *args, **kwargs):
PrimordialModel.save = mock_save
startTime = datetime.now()
try:
with transaction.atomic():
@ -488,7 +494,8 @@ try:
if project_idx == 0 and i == 0:
job_template.admin_role.members.add(jt_admin)
project_idx += 1
print('')
if n > 0:
print('')
print('# Creating %d Workflow Job Templates' % n_wfjts)
org_idx = 0
@ -509,7 +516,8 @@ try:
wfjt._is_new = _
wfjts.append(wfjt)
org_idx += 1
print('')
if n:
print('')
print('# Creating %d Workflow Job Template nodes' % n_nodes)
wfjt_idx = 0
@ -559,7 +567,8 @@ try:
parent_node.success_nodes.add(node)
parent_idx = (parent_idx + 7) % len(wfjt_nodes)
wfjt_idx += 1
print('')
if n:
print('')
print('# Creating %d Labels' % n_labels)
org_idx = 0
@ -578,7 +587,8 @@ try:
)
labels.append(label)
org_idx += 1
print('')
if n:
print('')
label_gen = yield_choice(labels)
print('# Adding labels to job templates')
@ -603,22 +613,28 @@ try:
wfjt.labels.add(next(label_gen))
wfjt_idx += 1
# Disable logging here, because it will mess up output format
logger = logging.getLogger('awx.main')
logger.propagate = False
print('# Creating %d jobs' % n_jobs)
group_idx = 0
job_template_idx = 0
job_i = 0
for n in spread(n_jobs, n_job_templates):
job_template = job_templates[job_template_idx]
for i in range(n):
sys.stdout.write('\r Assigning %d to %s: %d ' % (n, job_template.name, i+ 1))
sys.stdout.flush()
job_stat = 'successful'
if len(jobs) % 4 == 0:
job_stat = 'failed'
elif len(jobs) % 11 == 0:
job_stat = 'canceled'
else:
job_stat = 'successful'
job, _ = Job.objects.get_or_create(
job_template=job_template,
status=job_stat, name=job_template.name,
status=job_stat, name="%s-%d" % (job_template.name, job_i),
project=job_template.project, inventory=job_template.inventory,
credential=job_template.credential,
cloud_credential=job_template.cloud_credential,
@ -626,25 +642,28 @@ try:
)
job._is_new = _
jobs.append(job)
job_i += 1
if not job._is_new:
group_idx += 1
continue
if i == n:
if i + 1 == n:
job_template.last_job = job
if job_template.pk % 5 == 0:
job_template.current_job = job
job_template.save()
with transaction.atomic():
if job_template.inventory:
inv_groups = [g for g in job_template.inventory.groups.all()]
if len(inv_groups):
JobHostSummary.objects.bulk_create([
JobHostSummary(
job=job, host=h, host_name=h.name, processed=1,
created=now(), modified=now()
)
for h in inv_groups[group_idx % len(inv_groups)].hosts.all()[:100]
])
if job._is_new:
with transaction.atomic():
if job_template.inventory:
inv_groups = [g for g in job_template.inventory.groups.all()]
if len(inv_groups):
JobHostSummary.objects.bulk_create([
JobHostSummary(
job=job, host=h, host_name=h.name, processed=1,
created=now(), modified=now()
)
for h in inv_groups[group_idx % len(inv_groups)].hosts.all()[:100]
])
group_idx += 1
job_template_idx += 1
if n:
@ -654,11 +673,11 @@ try:
job_idx = 0
for n in spread(n_job_events, n_jobs):
job = jobs[job_idx]
# Check if job already has events, for idempotence
if not job._is_new:
continue
sys.stdout.write('\r Creating %d job events for job %d' % (n, job.id))
sys.stdout.flush()
# Check if job already has events, for idempotence
JobEvent.objects.bulk_create([
JobEvent(
created=now(),
@ -677,3 +696,6 @@ try:
except Rollback:
print('Rolled back changes')
pass
print('')
print('script execution time: {}'.format(datetime.now() - startTime))