fix access problems (#15)

* fix access problems and add  Add bulk job max settings to api

filter workflow job nodes better

This will both improve performance by limiting the queryset for the node
sublists as well as fix our access problem.

override can_read instead of modify queryset in access.py

We do this because we are not going to expose bulk jobs to the list
views, which is complicatd and has poor performance implications.

Instead, we just care about individual Workflows that clients get linked
to not being broken.

fix comment

remove the get functions from the conf.py for bulk api max value

comment the api expose of the bulk job variables

reformt conf.py with make black

trailing space

add more assertion to the bulk host create test
This commit is contained in:
Elijah DeLee
2023-02-28 23:14:06 -05:00
parent 4b9ca3deee
commit 3efc7d5bc4
6 changed files with 56 additions and 7 deletions

View File

@@ -3078,7 +3078,9 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
search_fields = ('unified_job_template__name', 'unified_job_template__description')
def get_queryset(self):
return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id')
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobTemplateJobsList(SubListAPIView):
@@ -3172,7 +3174,9 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
search_fields = ('unified_job_template__name', 'unified_job_template__description')
def get_queryset(self):
return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id')
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobCancel(GenericCancelView):

View File

@@ -1999,10 +1999,14 @@ class WorkflowJobNodeAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(workflow_job__created_by_id=self.user.id, workflow_job__is_bulk_job=True)
| Q(workflow_job__organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), workflow_job__is_bulk_job=True)
)
def can_read(self, obj):
if obj.workflow_job.is_bulk_job and obj.workflow_job.created_by_id == self.user.id:
return True
return super().can_read(obj)
@check_superuser
def can_add(self, data):
if data is None: # Hide direct creation in API browser
@@ -2129,10 +2133,14 @@ class WorkflowJobAccess(BaseAccess):
def filtered_queryset(self):
return WorkflowJob.objects.filter(
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(created_by_id=self.user.id, is_bulk_job=True)
| Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
)
def can_read(self, obj):
if obj.is_bulk_job and obj.created_by_id == self.user.id:
return True
return super().can_read(obj)
def can_add(self, data):
# Old add-start system for launching jobs is being depreciated, and
# not supported for new types of resources

View File

@@ -775,6 +775,27 @@ register(
help_text=_('Indicates whether the instance is part of a kubernetes-based deployment.'),
)
# TODO : Commenting below bulk job settings because of failing conftest import. Figure out the conftest issue and then uncomment
# register(
# 'BULK_JOB_MAX_LAUNCH',
# field_class=fields.IntegerField,
# default=100,
# label=_('Max jobs to allow bulk jobs to launch'),
# help_text=_('Max jobs to allow bulk jobs to launch'),
# category=_('Bulk Actions'),
# category_slug='bulk',
# )
#
# register(
# 'BULK_HOST_MAX_CREATE',
# field_class=fields.IntegerField,
# default=1000,
# label=_('Max number of hosts to allow to be created in a single bulk action'),
# help_text=_('Max number of hosts to allow to be created in a single bulk action'),
# category=_('Bulk Actions'),
# category_slug='bulk',
# )
def logging_validate(serializer, attrs):
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):

View File

@@ -5,7 +5,7 @@ from uuid import uuid4
from awx.api.versioning import reverse
from awx.main.models.jobs import JobTemplate
from awx.main.models import Organization, Inventory, WorkflowJob, ExecutionEnvironment
from awx.main.models import Organization, Inventory, WorkflowJob, ExecutionEnvironment, Host
from awx.main.scheduler import TaskManager
@@ -70,6 +70,7 @@ def test_bulk_host_create_rbac(organization, inventory, post, get, user):
reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': [{'name': f'foobar-{indx}'}]}, u, expect=201
).data
assert len(bulk_host_create_response['hosts']) == 1, f"unexpected number of hosts created for user {u}"
assert Host.objects.filter(inventory__id=inventory.id)[0].name == 'foobar-0'
for indx, u in enumerate([member, auditor, use_inv_member]):
bulk_host_create_response = post(