mirror of
https://github.com/ansible/awx.git
synced 2026-01-13 19:10:07 -03:30
Adding some dashboard time-series graphs and new qsstats vendored dependency
This commit is contained in:
parent
d99e762f36
commit
f39939b152
@ -177,6 +177,7 @@ v1_urls = patterns('awx.api.views',
|
||||
url(r'^authtoken/$', 'auth_token_view'),
|
||||
url(r'^me/$', 'user_me_list'),
|
||||
url(r'^dashboard/$', 'dashboard_view'),
|
||||
url(r'^dashboard/graphs/$', 'dashboard_graph_view'),
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^organizations/', include(organization_urls)),
|
||||
url(r'^users/', include(user_urls)),
|
||||
|
||||
@ -3,6 +3,8 @@
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
import dateutil
|
||||
import time
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
@ -11,7 +13,7 @@ import sys
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.db.models import Q, Count
|
||||
from django.db.models import Q, Count, Sum
|
||||
|
||||
from django.db import IntegrityError
|
||||
from django.shortcuts import get_object_or_404
|
||||
@ -34,6 +36,9 @@ from rest_framework import status
|
||||
from ansi2html import Ansi2HTMLConverter
|
||||
from ansi2html.style import SCHEME
|
||||
|
||||
# QSStats
|
||||
import qsstats
|
||||
|
||||
# AWX
|
||||
from awx.main.task_engine import TaskSerializer
|
||||
from awx.main.models import *
|
||||
@ -147,7 +152,6 @@ class DashboardView(APIView):
|
||||
|
||||
def get(self, request, format=None):
|
||||
''' Show Dashboard Details '''
|
||||
|
||||
data = SortedDict()
|
||||
user_inventory = get_user_queryset(request.user, Inventory)
|
||||
inventory_with_failed_hosts = user_inventory.filter(hosts_with_active_failures__gt=0)
|
||||
@ -245,6 +249,57 @@ class DashboardView(APIView):
|
||||
'total': job_template_list.count()}
|
||||
return Response(data)
|
||||
|
||||
class DashboardGraphView(APIView):
|
||||
|
||||
view_name = "Dashboard Graphs"
|
||||
new_in_20 = True
|
||||
|
||||
def get(self, request, format=None):
|
||||
period = request.QUERY_PARAMS.get('period', 'month')
|
||||
job_type = request.QUERY_PARAMS.get('job_type', 'all')
|
||||
|
||||
# Working around a django 1.5 bug:
|
||||
# https://code.djangoproject.com/ticket/17260
|
||||
settings.USE_TZ = False
|
||||
|
||||
qs = User.objects.all()
|
||||
user_unified_jobs = get_user_queryset(request.user, UnifiedJob)
|
||||
user_hosts = get_user_queryset(request.user, Host)
|
||||
|
||||
success_qss = qsstats.QuerySetStats(user_unified_jobs.filter(status='successful'), 'finished')
|
||||
failed_qss = qsstats.QuerySetStats(user_unified_jobs.filter(status='failed'), 'finished')
|
||||
|
||||
created_hosts = qsstats.QuerySetStats(user_hosts, 'created')
|
||||
count_hosts = user_hosts.all().count()
|
||||
|
||||
start_date = datetime.datetime.now()
|
||||
if period == 'month':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(months=1)
|
||||
interval = 'days'
|
||||
elif period == 'week':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=1)
|
||||
interval = 'days'
|
||||
elif period == 'day':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(days=1)
|
||||
interval = 'hours'
|
||||
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": []}, "hosts": []}
|
||||
for element in success_qss.time_series(end_date, start_date, interval=interval):
|
||||
dashboard_data['jobs']['successful'].append([time.mktime(element[0].timetuple()),
|
||||
element[1]])
|
||||
for element in failed_qss.time_series(end_date, start_date, interval=interval):
|
||||
dashboard_data['jobs']['failed'].append([time.mktime(element[0].timetuple()),
|
||||
element[1]])
|
||||
last_delta = 0
|
||||
host_data = []
|
||||
for element in created_hosts.time_series(end_date, start_date, interval=interval)[::-1]:
|
||||
host_data.append([time.mktime(element[0].timetuple()),
|
||||
count_hosts - last_delta])
|
||||
count_hosts -= last_delta
|
||||
last_delta = element[1]
|
||||
dashboard_data['hosts'] = host_data[::-1]
|
||||
return Response(dashboard_data)
|
||||
|
||||
class ScheduleList(ListAPIView):
|
||||
|
||||
view_name = "Schedules"
|
||||
|
||||
@ -24,6 +24,7 @@ django-polymorphic==0.5.3 (polymorphic/*)
|
||||
django-split-settings==0.1.1 (split_settings/*)
|
||||
django-taggit==0.11.2 (taggit/*)
|
||||
djangorestframework==2.3.13 (rest_framework/*)
|
||||
django-qsstats-magic==0.7.2 (django-qsstats-magic/*)
|
||||
gevent-socketio==0.3.5-rc1 (socketio/*)
|
||||
gevent-websocket==0.9.3 (geventwebsocket/*)
|
||||
httplib2==0.8 (httplib2/*)
|
||||
|
||||
178
awx/lib/site-packages/qsstats/__init__.py
Normal file
178
awx/lib/site-packages/qsstats/__init__.py
Normal file
@ -0,0 +1,178 @@
|
||||
__author__ = 'Matt Croydon, Mikhail Korobov, Pawel Tomasiewicz'
|
||||
__version__ = (0, 7, 0)
|
||||
|
||||
from functools import partial
|
||||
import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from dateutil.parser import parse
|
||||
|
||||
from django.db.models import Count
|
||||
from django.db import DatabaseError, transaction
|
||||
from django.conf import settings
|
||||
|
||||
from qsstats.utils import get_bounds, _to_datetime, _parse_interval, get_interval_sql, _remove_time
|
||||
from qsstats import compat
|
||||
from qsstats.exceptions import *
|
||||
|
||||
class QuerySetStats(object):
|
||||
"""
|
||||
Generates statistics about a queryset using Django aggregates. QuerySetStats
|
||||
is able to handle snapshots of data (for example this day, week, month, or
|
||||
year) or generate time series data suitable for graphing.
|
||||
"""
|
||||
def __init__(self, qs=None, date_field=None, aggregate=None, today=None):
|
||||
self.qs = qs
|
||||
self.date_field = date_field
|
||||
self.aggregate = aggregate or Count('id')
|
||||
self.today = today or self.update_today()
|
||||
|
||||
def _guess_engine(self):
|
||||
if hasattr(self.qs, 'db'): # django 1.2+
|
||||
engine_name = settings.DATABASES[self.qs.db]['ENGINE']
|
||||
else:
|
||||
engine_name = settings.DATABASE_ENGINE
|
||||
if 'mysql' in engine_name:
|
||||
return 'mysql'
|
||||
if 'postg' in engine_name: #postgres, postgis
|
||||
return 'postgresql'
|
||||
if 'sqlite' in engine_name:
|
||||
return 'sqlite'
|
||||
|
||||
# Aggregates for a specific period of time
|
||||
|
||||
def for_interval(self, interval, dt, date_field=None, aggregate=None):
|
||||
start, end = get_bounds(dt, interval)
|
||||
date_field = date_field or self.date_field
|
||||
kwargs = {'%s__range' % date_field : (start, end)}
|
||||
return self._aggregate(date_field, aggregate, kwargs)
|
||||
|
||||
def this_interval(self, interval, date_field=None, aggregate=None):
|
||||
method = getattr(self, 'for_%s' % interval)
|
||||
return method(self.today, date_field, aggregate)
|
||||
|
||||
# support for this_* and for_* methods
|
||||
def __getattr__(self, name):
|
||||
if name.startswith('for_'):
|
||||
return partial(self.for_interval, name[4:])
|
||||
if name.startswith('this_'):
|
||||
return partial(self.this_interval, name[5:])
|
||||
raise AttributeError
|
||||
|
||||
def time_series(self, start, end=None, interval='days',
|
||||
date_field=None, aggregate=None, engine=None):
|
||||
''' Aggregate over time intervals '''
|
||||
|
||||
end = end or self.today
|
||||
args = [start, end, interval, date_field, aggregate]
|
||||
engine = engine or self._guess_engine()
|
||||
sid = transaction.savepoint()
|
||||
try:
|
||||
return self._fast_time_series(*(args+[engine]))
|
||||
except (QuerySetStatsError, DatabaseError,):
|
||||
transaction.savepoint_rollback(sid)
|
||||
return self._slow_time_series(*args)
|
||||
|
||||
def _slow_time_series(self, start, end, interval='days',
|
||||
date_field=None, aggregate=None):
|
||||
''' Aggregate over time intervals using 1 sql query for one interval '''
|
||||
|
||||
num, interval = _parse_interval(interval)
|
||||
|
||||
if interval not in ['minutes', 'hours',
|
||||
'days', 'weeks',
|
||||
'months', 'years'] or num != 1:
|
||||
raise InvalidInterval('Interval is currently not supported.')
|
||||
|
||||
method = getattr(self, 'for_%s' % interval[:-1])
|
||||
|
||||
stat_list = []
|
||||
dt, end = _to_datetime(start), _to_datetime(end)
|
||||
while dt <= end:
|
||||
value = method(dt, date_field, aggregate)
|
||||
stat_list.append((dt, value,))
|
||||
dt = dt + relativedelta(**{interval : 1})
|
||||
return stat_list
|
||||
|
||||
def _fast_time_series(self, start, end, interval='days',
|
||||
date_field=None, aggregate=None, engine=None):
|
||||
''' Aggregate over time intervals using just 1 sql query '''
|
||||
|
||||
date_field = date_field or self.date_field
|
||||
aggregate = aggregate or self.aggregate
|
||||
engine = engine or self._guess_engine()
|
||||
|
||||
num, interval = _parse_interval(interval)
|
||||
|
||||
start, _ = get_bounds(start, interval.rstrip('s'))
|
||||
_, end = get_bounds(end, interval.rstrip('s'))
|
||||
interval_sql = get_interval_sql(date_field, interval, engine)
|
||||
|
||||
kwargs = {'%s__range' % date_field : (start, end)}
|
||||
aggregate_data = self.qs.extra(select = {'d': interval_sql}).\
|
||||
filter(**kwargs).order_by().values('d').\
|
||||
annotate(agg=aggregate)
|
||||
|
||||
today = _remove_time(compat.now())
|
||||
def to_dt(d):
|
||||
if isinstance(d, basestring):
|
||||
return parse(d, yearfirst=True, default=today)
|
||||
return d
|
||||
|
||||
data = dict((to_dt(item['d']), item['agg']) for item in aggregate_data)
|
||||
|
||||
stat_list = []
|
||||
dt = start
|
||||
while dt < end:
|
||||
idx = 0
|
||||
value = 0
|
||||
for i in range(num):
|
||||
value = value + data.get(dt, 0)
|
||||
if i == 0:
|
||||
stat_list.append((dt, value,))
|
||||
idx = len(stat_list) - 1
|
||||
elif i == num - 1:
|
||||
stat_list[idx] = (dt, value,)
|
||||
dt = dt + relativedelta(**{interval : 1})
|
||||
|
||||
return stat_list
|
||||
|
||||
# Aggregate totals using a date or datetime as a pivot
|
||||
|
||||
def until(self, dt, date_field=None, aggregate=None):
|
||||
return self.pivot(dt, 'lte', date_field, aggregate)
|
||||
|
||||
def until_now(self, date_field=None, aggregate=None):
|
||||
return self.pivot(compat.now(), 'lte', date_field, aggregate)
|
||||
|
||||
def after(self, dt, date_field=None, aggregate=None):
|
||||
return self.pivot(dt, 'gte', date_field, aggregate)
|
||||
|
||||
def after_now(self, date_field=None, aggregate=None):
|
||||
return self.pivot(compat.now(), 'gte', date_field, aggregate)
|
||||
|
||||
def pivot(self, dt, operator=None, date_field=None, aggregate=None):
|
||||
operator = operator or self.operator
|
||||
if operator not in ['lt', 'lte', 'gt', 'gte']:
|
||||
raise InvalidOperator("Please provide a valid operator.")
|
||||
|
||||
kwargs = {'%s__%s' % (date_field or self.date_field, operator) : dt}
|
||||
return self._aggregate(date_field, aggregate, kwargs)
|
||||
|
||||
# Utility functions
|
||||
def update_today(self):
|
||||
_now = compat.now()
|
||||
self.today = _remove_time(_now)
|
||||
return self.today
|
||||
|
||||
def _aggregate(self, date_field=None, aggregate=None, filter=None):
|
||||
date_field = date_field or self.date_field
|
||||
aggregate = aggregate or self.aggregate
|
||||
|
||||
if not date_field:
|
||||
raise DateFieldMissing("Please provide a date_field.")
|
||||
|
||||
if self.qs is None:
|
||||
raise QuerySetMissing("Please provide a queryset.")
|
||||
|
||||
agg = self.qs.filter(**filter).aggregate(agg=aggregate)
|
||||
return agg['agg']
|
||||
8
awx/lib/site-packages/qsstats/compat.py
Normal file
8
awx/lib/site-packages/qsstats/compat.py
Normal file
@ -0,0 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
try:
|
||||
from django.utils.timezone import now
|
||||
except ImportError:
|
||||
now = datetime.datetime.now
|
||||
17
awx/lib/site-packages/qsstats/exceptions.py
Normal file
17
awx/lib/site-packages/qsstats/exceptions.py
Normal file
@ -0,0 +1,17 @@
|
||||
class QuerySetStatsError(Exception):
|
||||
pass
|
||||
|
||||
class InvalidInterval(QuerySetStatsError):
|
||||
pass
|
||||
|
||||
class UnsupportedEngine(QuerySetStatsError):
|
||||
pass
|
||||
|
||||
class InvalidOperator(QuerySetStatsError):
|
||||
pass
|
||||
|
||||
class DateFieldMissing(QuerySetStatsError):
|
||||
pass
|
||||
|
||||
class QuerySetMissing(QuerySetStatsError):
|
||||
pass
|
||||
1
awx/lib/site-packages/qsstats/models.py
Normal file
1
awx/lib/site-packages/qsstats/models.py
Normal file
@ -0,0 +1 @@
|
||||
# Hello, testrunner!
|
||||
108
awx/lib/site-packages/qsstats/tests.py
Normal file
108
awx/lib/site-packages/qsstats/tests.py
Normal file
@ -0,0 +1,108 @@
|
||||
from __future__ import absolute_import
|
||||
import datetime
|
||||
|
||||
from django.test import TestCase
|
||||
from django.contrib.auth.models import User
|
||||
from qsstats import QuerySetStats, InvalidInterval, DateFieldMissing, QuerySetMissing
|
||||
from qsstats import compat
|
||||
from .utils import _remove_time
|
||||
|
||||
class QuerySetStatsTestCase(TestCase):
|
||||
def test_basic_today(self):
|
||||
# We'll be making sure that this user is found
|
||||
u1 = User.objects.create_user('u1', 'u1@example.com')
|
||||
# And that this user is not
|
||||
u2 = User.objects.create_user('u2', 'u2@example.com')
|
||||
u2.is_active = False
|
||||
u2.save()
|
||||
|
||||
# Create a QuerySet and QuerySetStats
|
||||
qs = User.objects.filter(is_active=True)
|
||||
qss = QuerySetStats(qs, 'date_joined')
|
||||
|
||||
# We should only see a single user
|
||||
self.assertEqual(qss.this_day(), 1)
|
||||
|
||||
def assertTimeSeriesWorks(self, today):
|
||||
seven_days_ago = today - datetime.timedelta(days=7)
|
||||
for j in range(1,8):
|
||||
for i in range(0,j):
|
||||
u = User.objects.create_user('p-%s-%s' % (j, i), 'p%s-%s@example.com' % (j, i))
|
||||
u.date_joined = today - datetime.timedelta(days=i)
|
||||
u.save()
|
||||
qs = User.objects.all()
|
||||
qss = QuerySetStats(qs, 'date_joined')
|
||||
time_series = qss.time_series(seven_days_ago, today)
|
||||
self.assertEqual([t[1] for t in time_series], [0, 1, 2, 3, 4, 5, 6, 7])
|
||||
|
||||
def test_time_series(self):
|
||||
_now = compat.now()
|
||||
today = _remove_time(_now)
|
||||
self.assertTimeSeriesWorks(today)
|
||||
|
||||
def test_time_series_naive(self):
|
||||
self.assertTimeSeriesWorks(datetime.date.today())
|
||||
|
||||
def test_time_series_weeks(self):
|
||||
day = datetime.date(year=2013, month=4, day=5)
|
||||
|
||||
u = User.objects.create_user('user', 'user@example.com')
|
||||
u.date_joined = day
|
||||
u.save()
|
||||
|
||||
qs = User.objects.all()
|
||||
qss = QuerySetStats(qs, 'date_joined')
|
||||
qss.time_series(day - datetime.timedelta(days=30), day, interval='weeks')
|
||||
|
||||
def test_until(self):
|
||||
now = compat.now()
|
||||
today = _remove_time(now)
|
||||
yesterday = today - datetime.timedelta(days=1)
|
||||
|
||||
u = User.objects.create_user('u', 'u@example.com')
|
||||
u.date_joined = today
|
||||
u.save()
|
||||
|
||||
qs = User.objects.all()
|
||||
qss = QuerySetStats(qs, 'date_joined')
|
||||
|
||||
self.assertEqual(qss.until(now), 1)
|
||||
self.assertEqual(qss.until(today), 1)
|
||||
self.assertEqual(qss.until(yesterday), 0)
|
||||
self.assertEqual(qss.until_now(), 1)
|
||||
|
||||
def test_after(self):
|
||||
now = compat.now()
|
||||
today = _remove_time(now)
|
||||
tomorrow = today + datetime.timedelta(days=1)
|
||||
|
||||
u = User.objects.create_user('u', 'u@example.com')
|
||||
u.date_joined = today
|
||||
u.save()
|
||||
|
||||
qs = User.objects.all()
|
||||
qss = QuerySetStats(qs, 'date_joined')
|
||||
|
||||
self.assertEqual(qss.after(today), 1)
|
||||
self.assertEqual(qss.after(now), 0)
|
||||
u.date_joined=tomorrow
|
||||
u.save()
|
||||
self.assertEqual(qss.after(now), 1)
|
||||
|
||||
# MC_TODO: aggregate_field tests
|
||||
|
||||
def test_query_set_missing(self):
|
||||
qss = QuerySetStats(date_field='foo')
|
||||
for method in ['this_day', 'this_month', 'this_year']:
|
||||
self.assertRaises(QuerySetMissing, getattr(qss, method))
|
||||
|
||||
def test_date_field_missing(self):
|
||||
qss = QuerySetStats(User.objects.all())
|
||||
for method in ['this_day', 'this_month', 'this_year']:
|
||||
self.assertRaises(DateFieldMissing, getattr(qss, method))
|
||||
|
||||
def test_invalid_interval(self):
|
||||
qss = QuerySetStats(User.objects.all(), 'date_joined')
|
||||
def _invalid():
|
||||
qss.time_series(qss.today, qss.today, interval='monkeys')
|
||||
self.assertRaises(InvalidInterval, _invalid)
|
||||
98
awx/lib/site-packages/qsstats/utils.py
Normal file
98
awx/lib/site-packages/qsstats/utils.py
Normal file
@ -0,0 +1,98 @@
|
||||
import datetime
|
||||
import re
|
||||
from dateutil.relativedelta import relativedelta, MO
|
||||
from qsstats.exceptions import InvalidInterval, UnsupportedEngine
|
||||
from qsstats import compat
|
||||
|
||||
def _remove_time(dt):
|
||||
tzinfo = getattr(dt, 'tzinfo', compat.now().tzinfo)
|
||||
return datetime.datetime(dt.year, dt.month, dt.day, tzinfo=tzinfo)
|
||||
|
||||
def _to_datetime(dt):
|
||||
if isinstance(dt, datetime.datetime):
|
||||
return dt
|
||||
return _remove_time(dt)
|
||||
|
||||
def _parse_interval(interval):
|
||||
num = 1
|
||||
match = re.match('(\d+)([A-Za-z]+)', interval)
|
||||
|
||||
if match:
|
||||
num = int(match.group(1))
|
||||
interval = match.group(2)
|
||||
return num, interval
|
||||
|
||||
def get_bounds(dt, interval):
|
||||
''' Returns interval bounds the datetime is in. '''
|
||||
|
||||
day = _to_datetime(_remove_time(dt))
|
||||
dt = _to_datetime(dt)
|
||||
|
||||
if interval == 'minute':
|
||||
begin = datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, tzinfo=dt.tzinfo)
|
||||
end = begin + relativedelta(minutes=1)
|
||||
elif interval == 'hour':
|
||||
begin = datetime.datetime(dt.year, dt.month, dt.day, dt.hour, tzinfo=dt.tzinfo)
|
||||
end = begin + relativedelta(hours=1)
|
||||
elif interval == 'day':
|
||||
begin = day
|
||||
end = day + relativedelta(days=1)
|
||||
elif interval == 'week':
|
||||
begin = day - relativedelta(weekday=MO(-1))
|
||||
end = begin + datetime.timedelta(days=7)
|
||||
elif interval == 'month':
|
||||
begin = datetime.datetime(dt.year, dt.month, 1, tzinfo=dt.tzinfo)
|
||||
end = begin + relativedelta(months=1)
|
||||
elif interval == 'year':
|
||||
begin = datetime.datetime(dt.year, 1, 1, tzinfo=dt.tzinfo)
|
||||
end = datetime.datetime(dt.year+1, 1, 1, tzinfo=dt.tzinfo)
|
||||
else:
|
||||
raise InvalidInterval('Inverval not supported.')
|
||||
end = end - relativedelta(microseconds=1)
|
||||
return begin, end
|
||||
|
||||
|
||||
def get_interval_sql(date_field, interval, engine):
|
||||
''' Returns SQL clause that calculates the beginning of interval
|
||||
date_field belongs to.
|
||||
'''
|
||||
|
||||
SQL = {
|
||||
'mysql': {
|
||||
'minutes': "DATE_FORMAT(`" + date_field +"`, '%%Y-%%m-%%d %%H:%%i')",
|
||||
'hours': "DATE_FORMAT(`" + date_field +"`, '%%Y-%%m-%%d %%H:00')",
|
||||
'days': "DATE_FORMAT(`" + date_field +"`, '%%Y-%%m-%%d')",
|
||||
'weeks': "DATE_FORMAT(DATE_SUB(`"+date_field+"`, INTERVAL(WEEKDAY(`"+date_field+"`)) DAY), '%%Y-%%m-%%d')",
|
||||
'months': "DATE_FORMAT(`" + date_field +"`, '%%Y-%%m-01')",
|
||||
'years': "DATE_FORMAT(`" + date_field +"`, '%%Y-01-01')",
|
||||
},
|
||||
'postgresql': {
|
||||
'minutes': "date_trunc('minute', %s)" % date_field,
|
||||
'hours': "date_trunc('hour', %s)" % date_field,
|
||||
'days': "date_trunc('day', %s)" % date_field,
|
||||
'weeks': "date_trunc('week', %s)" % date_field,
|
||||
'months': "date_trunc('month', %s)" % date_field,
|
||||
'years': "date_trunc('year', %s)" % date_field,
|
||||
},
|
||||
'sqlite': {
|
||||
'minutes': "strftime('%%Y-%%m-%%d %%H:%%M', `" + date_field + "`)",
|
||||
'hours': "strftime('%%Y-%%m-%%d %%H:00', `" + date_field + "`)",
|
||||
'days': "strftime('%%Y-%%m-%%d', `" + date_field + "`)",
|
||||
'weeks': "strftime('%%Y-%%m-%%d', julianday(`" + date_field + "`) - strftime('%%w', `" + date_field + "`) + 1)",
|
||||
'months': "strftime('%%Y-%%m-01', `" + date_field + "`)",
|
||||
'years': "strftime('%%Y-01-01', `" + date_field + "`)",
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
try:
|
||||
engine_sql = SQL[engine]
|
||||
except KeyError:
|
||||
msg = '%s DB engine is not supported. Supported engines are: %s' % (engine, ", ".join(SQL.keys()))
|
||||
raise UnsupportedEngine(msg)
|
||||
|
||||
try:
|
||||
return engine_sql[interval]
|
||||
except KeyError:
|
||||
raise InvalidInterval('Interval is not supported for %s DB backend.' % engine)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user