From 66fb5ab07adba1c9304ca7a747aceaff21903c92 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 18 Jun 2014 12:51:53 -0500 Subject: [PATCH] Paginate the job_tasks page. This commit adds pagination to the /api/v1/jobs/{id}/job_tasks/ page, which previously lacked it. AC-1340 --- awx/api/utils/__init__.py | 0 awx/api/utils/decorators.py | 66 +++++++++++++++++++++++++ awx/api/views.py | 97 +++++++++++++++++++++++++++++++------ 3 files changed, 147 insertions(+), 16 deletions(-) create mode 100644 awx/api/utils/__init__.py create mode 100644 awx/api/utils/decorators.py diff --git a/awx/api/utils/__init__.py b/awx/api/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/api/utils/decorators.py b/awx/api/utils/decorators.py new file mode 100644 index 0000000000..79162d4b3e --- /dev/null +++ b/awx/api/utils/decorators.py @@ -0,0 +1,66 @@ +# Copyright (c) 2014 Ansible, Inc. +# All Rights Reserved. + +import collections +import copy +import functools + +from rest_framework.response import Response +from rest_framework.settings import api_settings + +def paginated(method): + """Given an method with a Django REST Framework API method signature + (e.g. `def get(self, request, ...):`), abstract out boilerplate pagination + duties. + + This causes the method to receive two additional keyword arguments: + `limit`, and `offset`. The method expects a two-tuple to be + returned, with a result list as the first item, and the total number + of results (across all pages) as the second item. + """ + @functools.wraps(method) + def func(self, request, *args, **kwargs): + # Manually spin up pagination. + # How many results do we show? + limit = api_settings.PAGINATE_BY + if request.QUERY_PARAMS.get(api_settings.PAGINATE_BY_PARAM, False): + limit = request.QUERY_PARAMS[api_settings.PAGINATE_BY_PARAM] + if api_settings.MAX_PAGINATE_BY: + limit = min(api_settings.MAX_PAGINATE_BY, limit) + limit = int(limit) + + # What page are we on? + page = int(request.QUERY_PARAMS.get('page', 1)) + offset = (page - 1) * limit + + # Add the limit, offset, and page variables to the keyword arguments + # being sent to the underlying method. + kwargs['limit'] = limit + kwargs['offset'] = offset + + # Okay, call the underlying method. + results, count = method(self, request, *args, **kwargs) + + # Determine the next and previous pages, if any. + prev, next_ = None, None + if page > 1: + get_copy = copy.copy(request.GET) + get_copy['page'] = page - 1 + prev = '%s?%s' % (request.path, get_copy.urlencode()) + if count > offset + limit: + get_copy = copy.copy(request.GET) + get_copy['page'] = page + 1 + next_ = '%s?%s' % (request.path, get_copy.urlencode()) + + # Compile the results into a dictionary with pagination + # information. + answer = collections.OrderedDict(( + ('count', count), + ('next', next_), + ('previous', prev), + ('results', results), + )) + + # Okay, we're done; return response data. + return Response(answer) + return func diff --git a/awx/api/views.py b/awx/api/views.py index 70f2d157ec..2bb2f3165b 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -21,6 +21,7 @@ from django.utils.timezone import now # Django REST Framework from rest_framework.authtoken.views import ObtainAuthToken from rest_framework.exceptions import PermissionDenied, ParseError +from rest_framework.pagination import BasePaginationSerializer from rest_framework.parsers import YAMLParser from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.renderers import YAMLRenderer @@ -42,6 +43,7 @@ from awx.api.authentication import JobTaskAuthentication from awx.api.permissions import * from awx.api.renderers import * from awx.api.serializers import * +from awx.api.utils.decorators import paginated from awx.api.generics import * from awx.api.generics import get_view_name @@ -1517,25 +1519,85 @@ class JobJobPlaysList(BaseJobEventsList): all_plays.append(play_details) return Response(all_plays) -class JobJobTasksList(BaseJobEventsList): +class JobJobTasksList(BaseJobEventsList): + """A view for displaying aggregate data about tasks within a job + and their completion status. + """ parent_model = Job authentication_classes = [JobTaskAuthentication] + \ api_settings.DEFAULT_AUTHENTICATION_CLASSES permission_classes = (JobTaskPermission,) new_in_150 = True - def get(self, request, *args, **kwargs): - tasks = [] + @paginated + def get(self, request, limit, offset, *args, **kwargs): + """Return aggregate data about each of the job tasks that is: + - an immediate child of the job event + - corresponding to the spinning up of a new task or playbook + """ + results = [] + + # Get the job and the parent task. + # If there's no event ID specified, this will return a 404. + # FIXME: Make this a good error message. job = get_object_or_404(self.parent_model, pk=self.kwargs['pk']) - parent_task = get_object_or_404(job.job_events, pk=int(request.QUERY_PARAMS.get('event_id', -1))) - for task_start_event in parent_task.children.filter(Q(event='playbook_on_task_start') | Q(event='playbook_on_setup')): - task_data = dict(id=task_start_event.id, name="Gathering Facts" if task_start_event.event == 'playbook_on_setup' else task_start_event.task, - created=task_start_event.created, modified=task_start_event.modified, - failed=False, changed=False, host_count=0, reported_hosts=0, successful_count=0, failed_count=0, - changed_count=0, skipped_count=0) - for child_event in task_start_event.children.all(): - if child_event.event == 'runner_on_failed': + parent_task = get_object_or_404(job.job_events, + pk=int(request.QUERY_PARAMS.get('event_id', -1)), + ) + + # Some events correspond to a playbook or task starting up, + # and these are what we're interested in here. + STARTING_EVENTS = ('playbook_on_task_start', 'playbook_on_setup') + + # We need to pull information about each start event. + # + # This is super tricky, because this table has a one-to-many + # relationship with itself (parent-child), and we're getting + # information for an arbitrary number of children. This means we + # need stats on grandchildren, sorted by child. + queryset = (JobEvent.objects.filter(parent__parent=parent_task, + parent__event__in=STARTING_EVENTS) + .values('parent__id', 'event', 'changed') + .annotate(num=Count('event')) + .order_by('parent__id')) + count = queryset.count() + + # The data above will come back in a list, but we are going to + # want to access it based on the parent id, so map it into a + # dictionary. + data = {} + for line in queryset[offset:offset + limit]: + parent_id = line.pop('parent__id') + data.setdefault(parent_id, []) + data[parent_id].append(line) + + # Iterate over the start events and compile information about each one. + qs = parent_task.children.filter(event__in=STARTING_EVENTS, + id__in=data.keys()) + for task_start_event in qs: + # Create initial task data. + task_data = { + 'changed': False, + 'changed_count': 0, + 'created': task_start_event.created, + 'failed': False, + 'failed_count': 0, + 'host_count': 0, + 'id': task_start_event.id, + 'modified': task_start_event.modified, + 'name': 'Gathering Facts' if + task_start_event.event == 'playbook_on_setup' else + task_start_event.task, + 'reported_hosts': 0, + 'skipped_count': 0, + 'successful_count': 0, + } + + # Iterate over the data compiled for this child event, and + # make appropriate changes to the task data. + for child_data in data.get(task_start_event.id, []): + if child_data['event'] == 'runner_on_failed': task_data['failed'] = True task_data['host_count'] += 1 task_data['reported_hosts'] += 1 @@ -1556,11 +1618,14 @@ class JobJobTasksList(BaseJobEventsList): task_data['host_count'] += 1 task_data['reported_hosts'] += 1 task_data['failed'] = True - task_data['failed_count'] += 1 - elif child_event.event == 'runner_on_no_hosts': - task_data['host_count'] += 1 - tasks.append(task_data) - return Response(tasks) + task_data['failed_count'] += child_data['num'] + elif child_data['event'] == 'runner_on_no_hosts': + task_data['host_count'] += child_data['num'] + results.append(task_data) + + # Done; return the results and count. + return results, count + class UnifiedJobTemplateList(ListAPIView):