mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 18:09:57 -03:30
Merge pull request #4961 from ansible/purge_old_events_endpoints
These old endpoints are hacky, ugly, and no longer used
This commit is contained in:
commit
90caa2fbb7
@ -205,8 +205,6 @@ job_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'job_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', 'job_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_plays/$', 'job_job_plays_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_tasks/$', 'job_job_tasks_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'job_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'job_notifications_list'),
|
||||
|
||||
198
awx/api/views.py
198
awx/api/views.py
@ -3474,204 +3474,6 @@ class JobJobEventsList(BaseJobEventsList):
|
||||
return qs.filter(Q(host__isnull=True) | Q(host__in=host_qs))
|
||||
|
||||
|
||||
class JobJobPlaysList(BaseJobEventsList):
|
||||
|
||||
parent_model = Job
|
||||
view_name = _('Job Plays List')
|
||||
new_in_200 = True
|
||||
deprecated = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
limit = kwargs.get('limit', 20)
|
||||
ordering = kwargs.get('ordering', None)
|
||||
offset = kwargs.get('offset', 0)
|
||||
all_plays = []
|
||||
job = Job.objects.filter(pk=self.kwargs['pk'])
|
||||
if not job.exists():
|
||||
return Response({'detail': 'Job not found.'}, status.HTTP_404_NOT_FOUND)
|
||||
job = job[0]
|
||||
|
||||
# Put together a queryset for relevant job events.
|
||||
qs = job.job_events.filter(event='playbook_on_play_start')
|
||||
if ordering is not None:
|
||||
qs = qs.order_by(ordering)
|
||||
|
||||
# This is a bit of a special case for filtering requested by the UI
|
||||
# doing this here for the moment until/unless we need to implement more
|
||||
# complex filtering (since we aren't under a serializer)
|
||||
|
||||
if "id__in" in request.query_params:
|
||||
qs = qs.filter(id__in=[int(filter_id) for filter_id in request.query_params["id__in"].split(",")])
|
||||
elif "id__gt" in request.query_params:
|
||||
qs = qs.filter(id__gt=request.query_params['id__gt'])
|
||||
elif "id__lt" in request.query_params:
|
||||
qs = qs.filter(id__lt=request.query_params['id__lt'])
|
||||
if "failed" in request.query_params:
|
||||
qs = qs.filter(failed=(request.query_params['failed'].lower() == 'true'))
|
||||
if "play__icontains" in request.query_params:
|
||||
qs = qs.filter(play__icontains=request.query_params['play__icontains'])
|
||||
|
||||
count = qs.count()
|
||||
|
||||
# Iterate over the relevant play events and get the details.
|
||||
for play_event in qs[offset:offset + limit]:
|
||||
play_details = dict(id=play_event.id, play=play_event.play, started=play_event.created, failed=play_event.failed, changed=play_event.changed)
|
||||
event_aggregates = JobEvent.objects.filter(parent__in=play_event.children.all()).values("event").annotate(Count("id")).order_by()
|
||||
change_aggregates = JobEvent.objects.filter(parent__in=play_event.children.all(), event='runner_on_ok').values("changed").annotate(Count("id")).order_by()
|
||||
failed_count = 0
|
||||
ok_count = 0
|
||||
changed_count = 0
|
||||
skipped_count = 0
|
||||
unreachable_count = 0
|
||||
for event_aggregate in event_aggregates:
|
||||
if event_aggregate['event'] == 'runner_on_failed':
|
||||
failed_count += event_aggregate['id__count']
|
||||
elif event_aggregate['event'] == 'runner_on_error':
|
||||
failed_count += event_aggregate['id_count']
|
||||
elif event_aggregate['event'] == 'runner_on_skipped':
|
||||
skipped_count = event_aggregate['id__count']
|
||||
elif event_aggregate['event'] == 'runner_on_unreachable':
|
||||
unreachable_count = event_aggregate['id__count']
|
||||
for change_aggregate in change_aggregates:
|
||||
if not change_aggregate['changed']:
|
||||
ok_count = change_aggregate['id__count']
|
||||
else:
|
||||
changed_count = change_aggregate['id__count']
|
||||
play_details['related'] = {'job_event': reverse('api:job_event_detail', args=(play_event.pk,))}
|
||||
play_details['type'] = 'job_event'
|
||||
play_details['ok_count'] = ok_count
|
||||
play_details['failed_count'] = failed_count
|
||||
play_details['changed_count'] = changed_count
|
||||
play_details['skipped_count'] = skipped_count
|
||||
play_details['unreachable_count'] = unreachable_count
|
||||
all_plays.append(play_details)
|
||||
return Response(dict(count=count, plays=all_plays))
|
||||
|
||||
|
||||
class JobJobTasksList(BaseJobEventsList):
|
||||
"""A view for displaying aggregate data about tasks within a job
|
||||
and their completion status.
|
||||
"""
|
||||
parent_model = Job
|
||||
view_name = _('Job Play Tasks List')
|
||||
new_in_200 = True
|
||||
deprecated = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Return aggregate data about each of the job tasks that is:
|
||||
- an immediate child of the job event
|
||||
- corresponding to the spinning up of a new task or playbook
|
||||
"""
|
||||
results = []
|
||||
limit = kwargs.get('limit', 20)
|
||||
ordering = kwargs.get('ordering', None)
|
||||
offset = kwargs.get('offset', 0)
|
||||
# Get the job and the parent task.
|
||||
# If there's no event ID specified, this will return a 404.
|
||||
job = Job.objects.filter(pk=self.kwargs['pk'])
|
||||
if not job.exists():
|
||||
return Response({'detail': _('Job not found.')}, status=status.HTTP_404_NOT_FOUND)
|
||||
job = job[0]
|
||||
|
||||
if 'event_id' not in request.query_params:
|
||||
return Response({"detail": _("'event_id' not provided.")}, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
parent_task = job.job_events.filter(pk=int(request.query_params.get('event_id', -1)))
|
||||
if not parent_task.exists():
|
||||
return Response({'detail': _('Parent event not found.')}, status.HTTP_404_NOT_FOUND)
|
||||
parent_task = parent_task[0]
|
||||
|
||||
STARTING_EVENTS = ('playbook_on_task_start', 'playbook_on_setup')
|
||||
queryset = JobEvent.get_startevent_queryset(parent_task, STARTING_EVENTS)
|
||||
|
||||
# The data above will come back in a list, but we are going to
|
||||
# want to access it based on the parent id, so map it into a
|
||||
# dictionary.
|
||||
data = {}
|
||||
for line in queryset[offset:offset + limit]:
|
||||
parent_id = line.pop('parent__id')
|
||||
data.setdefault(parent_id, [])
|
||||
data[parent_id].append(line)
|
||||
|
||||
# Iterate over the start events and compile information about each one
|
||||
# using their children.
|
||||
qs = parent_task.children.filter(event__in=STARTING_EVENTS,
|
||||
id__in=data.keys())
|
||||
|
||||
# This is a bit of a special case for id filtering requested by the UI
|
||||
# doing this here for the moment until/unless we need to implement more
|
||||
# complex filtering (since we aren't under a serializer)
|
||||
|
||||
if "id__in" in request.query_params:
|
||||
qs = qs.filter(id__in=[int(filter_id) for filter_id in request.query_params["id__in"].split(",")])
|
||||
elif "id__gt" in request.query_params:
|
||||
qs = qs.filter(id__gt=request.query_params['id__gt'])
|
||||
elif "id__lt" in request.query_params:
|
||||
qs = qs.filter(id__lt=request.query_params['id__lt'])
|
||||
if "failed" in request.query_params:
|
||||
qs = qs.filter(failed=(request.query_params['failed'].lower() == 'true'))
|
||||
if "task__icontains" in request.query_params:
|
||||
qs = qs.filter(task__icontains=request.query_params['task__icontains'])
|
||||
|
||||
if ordering is not None:
|
||||
qs = qs.order_by(ordering)
|
||||
|
||||
count = 0
|
||||
for task_start_event in qs:
|
||||
# Create initial task data.
|
||||
task_data = {
|
||||
'related': {'job_event': reverse('api:job_event_detail', args=(task_start_event.pk,))},
|
||||
'type': 'job_event',
|
||||
'changed': task_start_event.changed,
|
||||
'changed_count': 0,
|
||||
'created': task_start_event.created,
|
||||
'failed': task_start_event.failed,
|
||||
'failed_count': 0,
|
||||
'host_count': 0,
|
||||
'id': task_start_event.id,
|
||||
'modified': task_start_event.modified,
|
||||
'name': 'Gathering Facts' if task_start_event.event == 'playbook_on_setup' else task_start_event.task,
|
||||
'reported_hosts': 0,
|
||||
'skipped_count': 0,
|
||||
'unreachable_count': 0,
|
||||
'successful_count': 0,
|
||||
}
|
||||
|
||||
# Iterate over the data compiled for this child event, and
|
||||
# make appropriate changes to the task data.
|
||||
for child_data in data.get(task_start_event.id, []):
|
||||
if child_data['event'] == 'runner_on_failed':
|
||||
task_data['failed'] = True
|
||||
task_data['host_count'] += child_data['num']
|
||||
task_data['reported_hosts'] += child_data['num']
|
||||
task_data['failed_count'] += child_data['num']
|
||||
elif child_data['event'] == 'runner_on_ok':
|
||||
task_data['host_count'] += child_data['num']
|
||||
task_data['reported_hosts'] += child_data['num']
|
||||
if child_data['changed']:
|
||||
task_data['changed_count'] += child_data['num']
|
||||
task_data['changed'] = True
|
||||
else:
|
||||
task_data['successful_count'] += child_data['num']
|
||||
elif child_data['event'] == 'runner_on_unreachable':
|
||||
task_data['host_count'] += child_data['num']
|
||||
task_data['unreachable_count'] += child_data['num']
|
||||
elif child_data['event'] == 'runner_on_skipped':
|
||||
task_data['host_count'] += child_data['num']
|
||||
task_data['reported_hosts'] += child_data['num']
|
||||
task_data['skipped_count'] += child_data['num']
|
||||
elif child_data['event'] == 'runner_on_error':
|
||||
task_data['host_count'] += child_data['num']
|
||||
task_data['reported_hosts'] += child_data['num']
|
||||
task_data['failed'] = True
|
||||
task_data['failed_count'] += child_data['num']
|
||||
elif child_data['event'] == 'runner_on_no_hosts':
|
||||
task_data['host_count'] += child_data['num']
|
||||
count += 1
|
||||
results.append(task_data)
|
||||
return Response(dict(count=count, tasks=results))
|
||||
|
||||
|
||||
class AdHocCommandList(ListCreateAPIView):
|
||||
|
||||
model = AdHocCommand
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user