Initial scheduler integration

This commit is contained in:
Matthew Jones
2014-03-27 15:53:32 -04:00
parent 29a8b46e2f
commit 107fc85110
7 changed files with 144 additions and 61 deletions

View File

@@ -8,6 +8,7 @@ import socket
import urlparse
import logging
import os.path
from dateutil import rrule
# PyYAML
import yaml
@@ -503,6 +504,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
playbooks = reverse('api:project_playbooks', args=(obj.pk,)),
update = reverse('api:project_update_view', args=(obj.pk,)),
project_updates = reverse('api:project_updates_list', args=(obj.pk,)),
schedules = reverse('api:project_schedules_list', args=(obj.pk,)),
activity_stream = reverse('api:project_activity_stream_list', args=(obj.pk,)),
))
# Backwards compatibility.
@@ -861,6 +863,7 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
res.update(dict(
update = reverse('api:inventory_source_update_view', args=(obj.pk,)),
inventory_updates = reverse('api:inventory_source_updates_list', args=(obj.pk,)),
schedules = reverse('api:inventory_source_schedules_list', args=(obj.pk,)),
activity_stream = reverse('api:inventory_activity_stream_list', args=(obj.pk,)),
#hosts = reverse('api:inventory_source_hosts_list', args=(obj.pk,)),
#groups = reverse('api:inventory_source_groups_list', args=(obj.pk,)),
@@ -1081,6 +1084,7 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
res = super(JobTemplateSerializer, self).get_related(obj)
res.update(dict(
jobs = reverse('api:job_template_jobs_list', args=(obj.pk,)),
schedules = reverse('api:job_template_schedules_list', args=(obj.pk,)),
activity_stream = reverse('api:job_template_activity_stream_list', args=(obj.pk,)),
))
if obj.host_config_key:
@@ -1218,7 +1222,7 @@ class JobEventSerializer(BaseSerializer):
class ScheduleSerializer(BaseSerializer):
class Meta:
model = Schedule
fields = ('*', 'unified_job_template', 'enabled', 'dtstart', 'dtend',
@@ -1233,6 +1237,12 @@ class ScheduleSerializer(BaseSerializer):
res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
return res
def validate_rrule(self, attrs, source):
try:
sched_rule = rrule.rrulestr(attrs[source])
except Exception, e:
raise serializers.ValidationError("rrule parsing failed validation")
return attrs
class ActivityStreamSerializer(BaseSerializer):

View File

@@ -41,7 +41,7 @@ project_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/update/$', 'project_update_view'),
url(r'^(?P<pk>[0-9]+)/project_updates/$', 'project_updates_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'project_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', 'schedules_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', 'project_schedules_list'),
)
project_update_urls = patterns('awx.api.views',
@@ -104,7 +104,7 @@ inventory_source_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/update/$', 'inventory_source_update_view'),
url(r'^(?P<pk>[0-9]+)/inventory_updates/$', 'inventory_source_updates_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_source_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', 'schedules_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', 'inventory_source_schedules_list'),
#url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_source_groups_list'),
#url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_source_hosts_list'),
)
@@ -130,7 +130,7 @@ job_template_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/$', 'job_template_detail'),
url(r'^(?P<pk>[0-9]+)/jobs/$', 'job_template_jobs_list'),
url(r'^(?P<pk>[0-9]+)/callback/$', 'job_template_callback'),
url(r'^(?P<pk>[0-9]+)/schedules/$', 'schedules_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', 'job_template_schedules_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'),
)
@@ -155,6 +155,11 @@ job_event_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/hosts/$', 'job_event_hosts_list'),
)
schedule_urls = patterns('awx.api.views',
url(r'^$', 'schedule_list'),
url(r'^(?P<pk>[0-9]+)/$', 'schedule_detail'),
)
activity_stream_urls = patterns('awx.api.views',
url(r'^$', 'activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/$', 'activity_stream_detail'),
@@ -166,7 +171,7 @@ v1_urls = patterns('awx.api.views',
url(r'^authtoken/$', 'auth_token_view'),
url(r'^me/$', 'user_me_list'),
url(r'^dashboard/$', 'dashboard_view'),
url(r'^schedules/$', 'schedules_list'),
url(r'^schedules/$', include(schedule_urls)),
url(r'^unified_jobs/$','unified_jobs_list'),
url(r'^organizations/', include(organization_urls)),
url(r'^users/', include(user_urls)),

View File

@@ -92,7 +92,7 @@ class ApiV1RootView(APIView):
data['hosts'] = reverse('api:host_list')
data['job_templates'] = reverse('api:job_template_list')
data['jobs'] = reverse('api:job_list')
data['schedules'] = reverse('api:schedules_list')
data['schedules'] = reverse('api:schedule_list')
data['unified_jobs'] = reverse('api:unified_jobs_list')
data['activity_stream'] = reverse('api:activity_stream_list')
return Response(data)
@@ -237,63 +237,19 @@ class DashboardView(APIView):
'total': job_template_list.count()}
return Response(data)
class SchedulesList(APIView):
class ScheduleList(ListCreateAPIView):
view_name = "Schedules"
new_in_148 = True
def get(self, request, format=None):
data = {
'count': 3,
'next': None,
'previous': None,
'results': [{
'id': 1,
'url': '/api/v1/schedules/1/',
'related': {},
'summary_fields': {},
'created': "2014-02-10T19:13:11.910Z",
'modified': "2014-02-10T19:13:11.910Z",
'name': 'Test schedule',
'description': "We love chris",
'dtstart': '2014-03-20T14:30:57.123Z',
'dtend': '2015-03-20T14:30:57.123Z',
'rrule': 'FREQ=DAILY;COUNT=100;INTERVAL=4',
'job_template': 1,
'project': None,
'inventory_source': None},
{'id': 2,
'url': '/api/v1/schedules/2/',
'related': {},
'summary_fields': {},
'created': "2014-02-10T19:13:11.910Z",
'modified': "2014-02-10T19:13:11.910Z",
'name': 'Test schedule',
'description': "We love chris",
'dtstart': '2014-03-20T14:30:57.123Z',
'dtend': '2015-03-20T14:30:57.123Z',
'rrule': 'FREQ=DAILY;COUNT=100;INTERVAL=4',
'job_template': None,
'project': 1,
'inventory_source': None},
{'id': 3,
'url': '/api/v1/schedules/3/',
'related': {},
'summary_fields': {},
'created': "2014-02-10T19:13:11.910Z",
'modified': "2014-02-10T19:13:11.910Z",
'name': 'Test schedule',
'description': "We love chris",
'dtstart': '2014-03-20T14:30:57.123Z',
'dtend': '2015-03-20T14:30:57.123Z',
'rrule': 'FREQ=DAILY;COUNT=100;INTERVAL=4',
'job_template': None,
'project': None,
'inventory_source': 12}]}
return Response(data)
model = Schedule
serializer_class = ScheduleSerializer
def post(self, request):
return Response({})
class ScheduleDetail(RetrieveUpdateDestroyAPIView):
new_in_148 = True
model = Schedule
serializer_class = ScheduleSerializer
class UnifiedJobsList(APIView):
@@ -676,6 +632,17 @@ class ProjectTeamsList(SubListCreateAPIView):
parent_model = Project
relationship = 'teams'
class ProjectSchedulesList(SubListCreateAPIView):
view_name = "Project Schedules"
model = Schedule
serializer_class = ScheduleSerializer
parent_model = Project
relationship = 'schedules'
parent_key = 'unified_job_template'
new_in_148 = True
class ProjectActivityStreamList(SubListAPIView):
model = ActivityStream
@@ -1203,6 +1170,18 @@ class InventorySourceDetail(RetrieveUpdateAPIView):
serializer_class = InventorySourceSerializer
new_in_14 = True
class InventorySourceSchedulesList(SubListCreateAPIView):
view_name = "Inventory Source Schedules"
model = Schedule
serializer_class = ScheduleSerializer
parent_model = InventorySource
relationship = 'schedules'
parent_key = 'unified_job_template'
new_in_148 = True
class InventorySourceActivityStreamList(SubListAPIView):
model = ActivityStream
@@ -1281,6 +1260,17 @@ class JobTemplateDetail(RetrieveUpdateDestroyAPIView):
model = JobTemplate
serializer_class = JobTemplateSerializer
class JobTemplateSchedulesList(SubListCreateAPIView):
view_name = "Job Template Schedules"
model = Schedule
serializer_class = ScheduleSerializer
parent_model = JobTemplate
relationship = 'schedules'
parent_key = 'unified_job_template'
new_in_148 = True
class JobTemplateActivityStreamList(SubListAPIView):
model = ActivityStream

View File

@@ -1043,6 +1043,62 @@ class JobEventAccess(BaseAccess):
def can_delete(self, obj):
return False
class ScheduleAccess(BaseAccess):
'''
I can see a schedule if I can see it's related unified job, I can create them or update them if I have write access
'''
model = Schedule
def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct()
qs = qs.select_related('unified_job_template')
if self.user.is_superuser:
return qs
job_template_qs = self.user.get_queryset(JobTemplate)
inventory_source_qs = self.user.get_queryset(InventorySource)
project_qs = self.user.get_queryset(Project)
return qs | job_template_qs | inventory_source_qs | project_qs
def can_read(self, obj):
if self.user.is_superuser:
return True
if obj and obj.unified_job_template:
job_class = obj.unified_job_template._get_unified_job_class()
return self.user.can_access(job_class, 'read', obj.unified_job_template)
else:
return False
def can_add(self, data):
if self.user.is_superuser:
return True
pk = get_pk_from_dict(data, 'unified_job_template')
obj = get_object_or_400(UnifiedJobTemplate, pk=pk)
if obj:
job_class = obj._get_unified_job_class()
print("JC: " + str(job_class))
return self.user.can_access(job_class, 'change', obj, None)
else:
return False
def can_change(self, obj, data):
if self.user.is_superuser:
return True
if obj and obj.unified_job_template:
job_class = obj.unified_job_template._get_unified_job_class()
return self.user.can_access(job_class, 'change', obj.unified_job_template, data)
else:
return False
def can_delete(self, obj):
if self.user.is_superuser:
return True
if obj and obj.unified_job_template:
job_class = obj.unified_job_template._get_unified_job_class()
return self.user.can_access(job_class, 'change', obj.unified_job_template, None)
else:
return False
class ActivityStreamAccess(BaseAccess):
'''
I can see activity stream events only when I have permission on all objects included in the event
@@ -1173,4 +1229,5 @@ register_access(JobTemplate, JobTemplateAccess)
register_access(Job, JobAccess)
register_access(JobHostSummary, JobHostSummaryAccess)
register_access(JobEvent, JobEventAccess)
register_access(Schedule, ScheduleAccess)
register_access(ActivityStream, ActivityStreamAccess)

View File

@@ -8,7 +8,7 @@ from django.db import models
# AWX
from awx.main.models.base import *
from django.core.urlresolvers import reverse
logger = logging.getLogger('awx.main.models.schedule')
@@ -33,7 +33,7 @@ class Schedule(CommonModel):
default=True,
)
dtstart = models.DateTimeField(
)
dtend = models.DateTimeField(
null=True,
@@ -49,5 +49,9 @@ class Schedule(CommonModel):
editable=False,
)
def get_absolute_url(self):
return reverse('api:schedule_list')
#return reverse('api:schedule_detail', args=(self.pk,))
def save(self, *args, **kwargs):
super(Schedule, self).save(*args, **kwargs)

View File

@@ -32,6 +32,7 @@ from kombu import Connection, Exchange, Queue
# Celery
from celery import Celery, Task, task
from celery.execute import send_task
from djcelery.models import PeriodicTask, TaskMeta
# Django
from django.conf import settings
@@ -42,7 +43,7 @@ from django.utils.timezone import now
from django.utils.tzinfo import FixedOffset
# AWX
from awx.main.models import Job, JobEvent, ProjectUpdate, InventoryUpdate
from awx.main.models import Job, JobEvent, ProjectUpdate, InventoryUpdate, Schedule
from awx.main.utils import get_ansible_version, decrypt_field, update_scm_url
__all__ = ['RunJob', 'RunProjectUpdate', 'RunInventoryUpdate', 'handle_work_error']
@@ -51,6 +52,15 @@ logger = logging.getLogger('awx.main.tasks')
# FIXME: Cleanly cancel task when celery worker is stopped.
@task(bind=True)
def tower_periodic_scheduler(self):
run_now = now()
periodic_task = PeriodicTask.objects.get(task='awx.main.tasks.tower_periodic_scheduler')
print("Last run was: " + str(periodic_task.last_run_at))
periodic_task.last_run_at = run_now
periodic_task.save()
@task()
def notify_task_runner(metadata_dict):
time.sleep(1)

View File

@@ -3,6 +3,7 @@
import os
import sys
from datetime import timedelta
# Update this module's local settings from the global settings module.
from django.conf import global_settings
@@ -285,6 +286,12 @@ CELERYD_TASK_SOFT_TIME_LIMIT = None
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYBEAT_MAX_LOOP_INTERVAL = 60
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERYBEAT_SCHEDULE = {
'tower_scheduler': {
'task': 'awx.main.tasks.tower_periodic_scheduler',
'schedule': timedelta(seconds=30)
},
}
# Any ANSIBLE_* settings will be passed to the subprocess environment by the
# celery task.