mirror of
https://github.com/ansible/awx.git
synced 2026-01-09 15:02:07 -03:30
add postgres Fact model, update views, tests
* awx.main.models Fact added * view host fact and timeline updated to use new Postgres Fact model instead of Mongo * Removed license set start Mongo logic * added View tests * added Model tests * Removed mongo fact unit tests * point at modified jsonbfield that supports sqlite storage driver * postgresify fact cache receiver * test OPTIONS endpoint * Note: single fact view not implemented yet.
This commit is contained in:
parent
56b0da30f1
commit
7ffe46fc74
@ -32,7 +32,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView', 'DestroyAPIView',
|
||||
'MongoAPIView', 'MongoListAPIView']
|
||||
'SubDetailAPIView',
|
||||
'ParentMixin',]
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
|
||||
@ -200,28 +201,6 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
d['settings'] = settings
|
||||
return d
|
||||
|
||||
|
||||
class MongoAPIView(GenericAPIView):
|
||||
|
||||
def get_parent_object(self):
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
return get_object_or_404(self.parent_model, **parent_filter)
|
||||
|
||||
def check_parent_access(self, parent=None):
|
||||
parent = parent or self.get_parent_object()
|
||||
parent_access = getattr(self, 'parent_access', 'read')
|
||||
if parent_access in ('read', 'delete'):
|
||||
args = (self.parent_model, parent_access, parent)
|
||||
else:
|
||||
args = (self.parent_model, parent_access, parent, None)
|
||||
if not self.request.user.can_access(*args):
|
||||
raise PermissionDenied()
|
||||
|
||||
class MongoListAPIView(generics.ListAPIView, MongoAPIView):
|
||||
pass
|
||||
|
||||
class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
@ -258,7 +237,25 @@ class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||
# Base class for a list view that allows creating new objects.
|
||||
pass
|
||||
|
||||
class SubListAPIView(ListAPIView):
|
||||
class ParentMixin(object):
|
||||
|
||||
def get_parent_object(self):
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
return get_object_or_404(self.parent_model, **parent_filter)
|
||||
|
||||
def check_parent_access(self, parent=None):
|
||||
parent = parent or self.get_parent_object()
|
||||
parent_access = getattr(self, 'parent_access', 'read')
|
||||
if parent_access in ('read', 'delete'):
|
||||
args = (self.parent_model, parent_access, parent)
|
||||
else:
|
||||
args = (self.parent_model, parent_access, parent, None)
|
||||
if not self.request.user.can_access(*args):
|
||||
raise PermissionDenied()
|
||||
|
||||
class SubListAPIView(ListAPIView, ParentMixin):
|
||||
# Base class for a read-only sublist view.
|
||||
|
||||
# Subclasses should define at least:
|
||||
@ -278,22 +275,6 @@ class SubListAPIView(ListAPIView):
|
||||
})
|
||||
return d
|
||||
|
||||
def get_parent_object(self):
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
return get_object_or_404(self.parent_model, **parent_filter)
|
||||
|
||||
def check_parent_access(self, parent=None):
|
||||
parent = parent or self.get_parent_object()
|
||||
parent_access = getattr(self, 'parent_access', 'read')
|
||||
if parent_access in ('read', 'delete'):
|
||||
args = (self.parent_model, parent_access, parent)
|
||||
else:
|
||||
args = (self.parent_model, parent_access, parent, None)
|
||||
if not self.request.user.can_access(*args):
|
||||
raise PermissionDenied()
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
@ -430,6 +411,9 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
else:
|
||||
return self.attach(request, *args, **kwargs)
|
||||
|
||||
class SubDetailAPIView(generics.RetrieveAPIView, GenericAPIView, ParentMixin):
|
||||
pass
|
||||
|
||||
class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
|
||||
pass
|
||||
|
||||
|
||||
@ -10,8 +10,6 @@ from collections import OrderedDict
|
||||
from dateutil import rrule
|
||||
from ast import literal_eval
|
||||
|
||||
from rest_framework_mongoengine.serializers import DocumentSerializer
|
||||
|
||||
# PyYAML
|
||||
import yaml
|
||||
|
||||
@ -46,8 +44,6 @@ from awx.main.conf import tower_settings
|
||||
from awx.api.license import feature_enabled
|
||||
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, EncryptedPasswordField, VerbatimField
|
||||
|
||||
from awx.fact.models import * # noqa
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@ -482,18 +478,19 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
return ret
|
||||
|
||||
|
||||
class BaseFactSerializer(DocumentSerializer):
|
||||
class BaseFactSerializer(BaseSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
|
||||
def get_fields(self):
|
||||
ret = super(BaseFactSerializer, self).get_fields()
|
||||
if 'module' in ret and feature_enabled('system_tracking'):
|
||||
choices = [(o, o.title()) for o in FactVersion.objects.all().only('module').distinct('module')]
|
||||
ret['module'] = serializers.ChoiceField(source='module', choices=choices, read_only=True, required=False)
|
||||
# TODO: the values_list may pull in a LOT of entries before the distinct is called
|
||||
modules = Fact.objects.all().values_list('module', flat=True).distinct()
|
||||
choices = [(o, o.title()) for o in modules]
|
||||
ret['module'] = serializers.ChoiceField(choices=choices, read_only=True, required=False)
|
||||
return ret
|
||||
|
||||
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -2290,28 +2287,31 @@ class AuthTokenSerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class FactVersionSerializer(BaseFactSerializer):
|
||||
related = serializers.SerializerMethodField('get_related')
|
||||
|
||||
class Meta:
|
||||
model = FactVersion
|
||||
fields = ('related', 'module', 'timestamp',)
|
||||
model = Fact
|
||||
fields = ('related', 'module', 'timestamp')
|
||||
read_only_fields = ('*',)
|
||||
|
||||
def get_related(self, obj):
|
||||
host_obj = self.context.get('host_obj')
|
||||
res = {}
|
||||
res = super(FactVersionSerializer, self).get_related(obj)
|
||||
params = {
|
||||
'datetime': timestamp_apiformat(obj.timestamp),
|
||||
'module': obj.module,
|
||||
}
|
||||
res.update(dict(
|
||||
fact_view = build_url('api:host_fact_compare_view', args=(host_obj.pk,), get=params),
|
||||
))
|
||||
res['fact_view'] = build_url('api:host_fact_compare_view', args=(obj.host.pk,), get=params)
|
||||
return res
|
||||
|
||||
|
||||
class FactSerializer(BaseFactSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Fact
|
||||
depth = 2
|
||||
fields = ('timestamp', 'host', 'module', 'fact')
|
||||
# TODO: Consider adding in host to the fields list ?
|
||||
fields = ('related', 'timestamp', 'module', 'facts', 'id', 'summary_fields', 'host')
|
||||
read_only_fields = ('*',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(FactSerializer, self).get_related(obj)
|
||||
res['host'] = obj.host.get_absolute_url()
|
||||
return res
|
||||
|
||||
|
||||
@ -91,8 +91,8 @@ host_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'host_single_fact_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
|
||||
)
|
||||
|
||||
group_urls = patterns('awx.api.views',
|
||||
|
||||
164
awx/api/views.py
164
awx/api/views.py
@ -42,9 +42,6 @@ from rest_framework import status
|
||||
from rest_framework_yaml.parsers import YAMLParser
|
||||
from rest_framework_yaml.renderers import YAMLRenderer
|
||||
|
||||
# MongoEngine
|
||||
import mongoengine
|
||||
|
||||
# QSStats
|
||||
import qsstats
|
||||
|
||||
@ -61,7 +58,6 @@ from awx.main.access import get_user_queryset
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.api.authentication import TaskAuthentication, TokenGetAuthentication
|
||||
from awx.api.utils.decorators import paginated
|
||||
from awx.api.filters import MongoFilterBackend
|
||||
from awx.api.generics import get_view_name
|
||||
from awx.api.generics import * # noqa
|
||||
from awx.api.license import feature_enabled, feature_exists, LicenseForbids
|
||||
@ -70,7 +66,6 @@ from awx.main.utils import * # noqa
|
||||
from awx.api.permissions import * # noqa
|
||||
from awx.api.renderers import * # noqa
|
||||
from awx.api.serializers import * # noqa
|
||||
from awx.fact.models import * # noqa
|
||||
from awx.main.utils import emit_websocket_notification
|
||||
from awx.main.conf import tower_settings
|
||||
|
||||
@ -250,32 +245,11 @@ class ApiV1ConfigView(APIView):
|
||||
# FIX: Log
|
||||
return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Sanity check: If this license includes system tracking, make
|
||||
# sure that we have a valid MongoDB to point to, and complain if
|
||||
# we do not.
|
||||
if ('features' in license_data and 'system_tracking' in license_data['features'] and
|
||||
license_data['features']['system_tracking'] and settings.MONGO_HOST == NotImplemented):
|
||||
return Response({
|
||||
'error': 'This license supports system tracking, which '
|
||||
'requires MongoDB to be installed. Since you are '
|
||||
'running in an HA environment, you will need to '
|
||||
'provide a MongoDB instance. Please re-run the '
|
||||
'installer prior to installing this license.'
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# If the license is valid, write it to disk.
|
||||
if license_data['valid_key']:
|
||||
tower_settings.LICENSE = data_actual
|
||||
|
||||
# Spawn a task to ensure that MongoDB is started (or stopped)
|
||||
# as appropriate, based on whether the license uses it.
|
||||
if license_data['features']['system_tracking']:
|
||||
mongodb_control.delay('start')
|
||||
else:
|
||||
mongodb_control.delay('stop')
|
||||
|
||||
# Done; return the response.
|
||||
return Response(license_data)
|
||||
|
||||
return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
@ -1125,33 +1099,6 @@ class InventoryScanJobTemplateList(SubListAPIView):
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(job_type=PERM_INVENTORY_SCAN, inventory=parent)
|
||||
|
||||
class InventorySingleFactView(MongoAPIView):
|
||||
|
||||
model = Fact
|
||||
parent_model = Inventory
|
||||
new_in_220 = True
|
||||
serializer_class = FactSerializer
|
||||
filter_backends = (MongoFilterBackend,)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
# Sanity check: Does the license allow system tracking?
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise LicenseForbids('Your license does not permit use '
|
||||
'of system tracking.')
|
||||
|
||||
fact_key = request.query_params.get("fact_key", None)
|
||||
fact_value = request.query_params.get("fact_value", None)
|
||||
datetime_spec = request.query_params.get("timestamp", None)
|
||||
module_spec = request.query_params.get("module", None)
|
||||
|
||||
if fact_key is None or fact_value is None or module_spec is None:
|
||||
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
|
||||
inventory_obj = self.get_parent_object()
|
||||
fact_data = Fact.get_single_facts([h.name for h in inventory_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec)
|
||||
return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else []))
|
||||
|
||||
|
||||
class HostList(ListCreateAPIView):
|
||||
|
||||
model = Host
|
||||
@ -1225,88 +1172,43 @@ class HostActivityStreamList(SubListAPIView):
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(Q(host=parent) | Q(inventory=parent.inventory))
|
||||
|
||||
class HostFactVersionsList(MongoListAPIView):
|
||||
class HostFactVersionsList(ListAPIView, ParentMixin):
|
||||
|
||||
model = Fact
|
||||
serializer_class = FactVersionSerializer
|
||||
parent_model = Host
|
||||
new_in_220 = True
|
||||
filter_backends = (MongoFilterBackend,)
|
||||
|
||||
def get_queryset(self):
|
||||
from_spec = self.request.query_params.get('from', None)
|
||||
to_spec = self.request.query_params.get('to', None)
|
||||
module_spec = self.request.query_params.get('module', None)
|
||||
|
||||
if not feature_enabled("system_tracking"):
|
||||
raise LicenseForbids("Your license does not permit use "
|
||||
"of system tracking.")
|
||||
|
||||
host = self.get_parent_object()
|
||||
self.check_parent_access(host)
|
||||
from_spec = self.request.query_params.get('from', None)
|
||||
to_spec = self.request.query_params.get('to', None)
|
||||
module_spec = self.request.query_params.get('module', None)
|
||||
|
||||
try:
|
||||
fact_host = FactHost.objects.get(hostname=host.name, inventory_id=host.inventory.pk)
|
||||
except FactHost.DoesNotExist:
|
||||
return None
|
||||
except mongoengine.ConnectionError:
|
||||
return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST)
|
||||
if from_spec:
|
||||
from_spec = dateutil.parser.parse(from_spec)
|
||||
if to_spec:
|
||||
to_spec = dateutil.parser.parse(to_spec)
|
||||
|
||||
kv = {
|
||||
'host': fact_host.id,
|
||||
}
|
||||
if module_spec is not None:
|
||||
kv['module'] = module_spec
|
||||
if from_spec is not None:
|
||||
from_actual = dateutil.parser.parse(from_spec)
|
||||
kv['timestamp__gt'] = from_actual
|
||||
if to_spec is not None:
|
||||
to_actual = dateutil.parser.parse(to_spec)
|
||||
kv['timestamp__lte'] = to_actual
|
||||
|
||||
return FactVersion.objects.filter(**kv).order_by("-timestamp")
|
||||
host_obj = self.get_parent_object()
|
||||
|
||||
return Fact.get_timeline(host_obj.id, module=module_spec, ts_from=from_spec, ts_to=to_spec)
|
||||
|
||||
def list(self, *args, **kwargs):
|
||||
queryset = self.get_queryset() or []
|
||||
try:
|
||||
serializer = FactVersionSerializer(queryset, many=True, context=dict(host_obj=self.get_parent_object()))
|
||||
except mongoengine.ConnectionError:
|
||||
return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(results=serializer.data))
|
||||
return Response(dict(results=self.serializer_class(queryset, many=True).data))
|
||||
|
||||
class HostSingleFactView(MongoAPIView):
|
||||
class HostFactCompareView(SubDetailAPIView):
|
||||
|
||||
model = Fact
|
||||
parent_model = Host
|
||||
new_in_220 = True
|
||||
serializer_class = FactSerializer
|
||||
filter_backends = (MongoFilterBackend,)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
# Sanity check: Does the license allow system tracking?
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise LicenseForbids('Your license does not permit use '
|
||||
'of system tracking.')
|
||||
|
||||
fact_key = request.query_params.get("fact_key", None)
|
||||
fact_value = request.query_params.get("fact_value", None)
|
||||
datetime_spec = request.query_params.get("timestamp", None)
|
||||
module_spec = request.query_params.get("module", None)
|
||||
|
||||
if fact_key is None or fact_value is None or module_spec is None:
|
||||
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
|
||||
host_obj = self.get_parent_object()
|
||||
fact_data = Fact.get_single_facts([host_obj.name], fact_key, fact_value, datetime_actual, module_spec)
|
||||
return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else []))
|
||||
|
||||
class HostFactCompareView(MongoAPIView):
|
||||
|
||||
new_in_220 = True
|
||||
parent_model = Host
|
||||
serializer_class = FactSerializer
|
||||
filter_backends = (MongoFilterBackend,)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
# Sanity check: Does the license allow system tracking?
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise LicenseForbids('Your license does not permit use '
|
||||
@ -1317,10 +1219,11 @@ class HostFactCompareView(MongoAPIView):
|
||||
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
|
||||
|
||||
host_obj = self.get_parent_object()
|
||||
fact_entry = Fact.get_host_version(host_obj.name, host_obj.inventory.pk, datetime_actual, module_spec)
|
||||
host_data = FactSerializer(fact_entry).data if fact_entry is not None else {}
|
||||
|
||||
return Response(host_data)
|
||||
fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual)
|
||||
if not fact_entry:
|
||||
return Response({'detail': 'Fact not found'}, status=status.HTTP_404_NOT_FOUND)
|
||||
return Response(self.serializer_class(instance=fact_entry).data)
|
||||
|
||||
class GroupList(ListCreateAPIView):
|
||||
|
||||
@ -1470,33 +1373,6 @@ class GroupDetail(RetrieveUpdateDestroyAPIView):
|
||||
obj.mark_inactive_recursive()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class GroupSingleFactView(MongoAPIView):
|
||||
|
||||
model = Fact
|
||||
parent_model = Group
|
||||
new_in_220 = True
|
||||
serializer_class = FactSerializer
|
||||
filter_backends = (MongoFilterBackend,)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
# Sanity check: Does the license allow system tracking?
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise LicenseForbids('Your license does not permit use '
|
||||
'of system tracking.')
|
||||
|
||||
fact_key = request.query_params.get("fact_key", None)
|
||||
fact_value = request.query_params.get("fact_value", None)
|
||||
datetime_spec = request.query_params.get("timestamp", None)
|
||||
module_spec = request.query_params.get("module", None)
|
||||
|
||||
if fact_key is None or fact_value is None or module_spec is None:
|
||||
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
|
||||
group_obj = self.get_parent_object()
|
||||
fact_data = Fact.get_single_facts([h.name for h in group_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec)
|
||||
return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else []))
|
||||
|
||||
class InventoryGroupsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Group
|
||||
|
||||
@ -9,9 +9,11 @@ from datetime import datetime
|
||||
# Django
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.conf import settings
|
||||
#from django.core.exceptions import Does
|
||||
|
||||
# AWX
|
||||
from awx.fact.models.fact import * # noqa
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
from awx.main.socket import Socket
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver')
|
||||
@ -47,35 +49,34 @@ class FactCacheReceiver(object):
|
||||
# ansible v2 will not emit this message. Thus, this can be removed at that time.
|
||||
if 'module_setup' in facts_data and len(facts_data) == 1:
|
||||
logger.info('Received module_setup message')
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
host = FactHost.objects.get(hostname=hostname, inventory_id=inventory_id)
|
||||
except FactHost.DoesNotExist:
|
||||
logger.info('Creating new host <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
|
||||
host = FactHost(hostname=hostname, inventory_id=inventory_id)
|
||||
host.save()
|
||||
logger.info('Created new host <%s>' % (host.id))
|
||||
except FactHost.MultipleObjectsReturned:
|
||||
query = "db['fact_host'].find(hostname=%s, inventory_id=%s)" % (hostname, inventory_id)
|
||||
logger.warn('Database inconsistent. Multiple FactHost "%s" exist. Try the query %s to find the records.' % (hostname, query))
|
||||
host_obj = Host.objects.get(name=hostname, inventory__id=inventory_id)
|
||||
except Fact.DoesNotExist:
|
||||
logger.warn('Failed to intake fact. Host does not exist <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
|
||||
return
|
||||
except Fact.MultipleObjectsReturned:
|
||||
logger.warn('Database inconsistent. Multiple Hosts found for <hostname, inventory_id> <%s, %s>.' % (hostname, inventory_id))
|
||||
return None
|
||||
except Exception, e:
|
||||
logger.error("Exception communicating with Fact Cache Database: %s" % str(e))
|
||||
return
|
||||
return None
|
||||
|
||||
(module, facts) = self.process_facts(facts_data)
|
||||
(module_name, facts) = self.process_facts(facts_data)
|
||||
self.timestamp = datetime.fromtimestamp(date_key, None)
|
||||
|
||||
try:
|
||||
# Update existing Fact entry
|
||||
version_obj = FactVersion.objects.get(timestamp=self.timestamp, host=host, module=module)
|
||||
Fact.objects(id=version_obj.fact.id).update_one(fact=facts)
|
||||
logger.info('Updated existing fact <%s>' % (version_obj.fact.id))
|
||||
except FactVersion.DoesNotExist:
|
||||
# Update existing Fact entry
|
||||
fact_obj = Fact.get_host_fact(host_obj.id, module_name, self.timestamp)
|
||||
if fact_obj:
|
||||
fact_obj.facts = facts
|
||||
fact_obj.save()
|
||||
logger.info('Updated existing fact <%s>' % (fact_obj.id))
|
||||
else:
|
||||
# Create new Fact entry
|
||||
(fact_obj, version_obj) = Fact.add_fact(self.timestamp, facts, host, module)
|
||||
logger.info('Created new fact <fact, fact_version> <%s, %s>' % (fact_obj.id, version_obj.id))
|
||||
fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts)
|
||||
logger.info('Created new fact <fact_id, module> <%s, %s>' % (fact_obj.id, module_name))
|
||||
return fact_obj
|
||||
|
||||
def run_receiver(self, use_processing_threads=True):
|
||||
with Socket('fact_cache', 'r') as facts:
|
||||
|
||||
@ -17,8 +17,6 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('timestamp', models.DateTimeField(default=None, editable=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('modified', models.DateTimeField(auto_now=True)),
|
||||
('module', models.CharField(max_length=128)),
|
||||
('facts', jsonbfield.fields.JSONField(default={}, blank=True)),
|
||||
('host', models.ForeignKey(related_name='facts', to='main.Host')),
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.db import models
|
||||
from jsonbfield.fields import JSONField
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.models import Host
|
||||
from jsonbfield.fields import JSONField
|
||||
|
||||
__all__ = ('Fact', )
|
||||
|
||||
@ -13,16 +13,19 @@ class Fact(models.Model):
|
||||
Facts are stored as JSON dictionaries.
|
||||
"""
|
||||
host = models.ForeignKey(
|
||||
Host,
|
||||
'Host',
|
||||
related_name='facts',
|
||||
db_index=True,
|
||||
on_delete=models.CASCADE,
|
||||
help_text=_('Host for the facts that the fact scan captured.'),
|
||||
)
|
||||
timestamp = models.DateTimeField(
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text=_('Date and time of the corresponding fact scan gathering time.')
|
||||
)
|
||||
timestamp = models.DateTimeField(default=None, editable=False)
|
||||
created = models.DateTimeField(editable=False, auto_now_add=True)
|
||||
modified = models.DateTimeField(editable=False, auto_now=True)
|
||||
module = models.CharField(max_length=128)
|
||||
facts = JSONField(blank=True, default={})
|
||||
facts = JSONField(blank=True, default={}, help_text=_('Arbitrary JSON structure of module facts captured at timestamp for a single host.'))
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@ -30,3 +33,32 @@ class Fact(models.Model):
|
||||
["timestamp", "module", "host"],
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_host_fact(host_id, module, timestamp):
|
||||
qs = Fact.objects.filter(host__id=host_id, module=module, timestamp__lte=timestamp).order_by('-timestamp')
|
||||
if qs:
|
||||
return qs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_timeline(host_id, module=None, ts_from=None, ts_to=None):
|
||||
kwargs = {
|
||||
'host__id': host_id,
|
||||
}
|
||||
if module:
|
||||
kwargs['module'] = module
|
||||
if ts_from and ts_to and ts_from == ts_to:
|
||||
kwargs['timestamp'] = ts_from
|
||||
else:
|
||||
if ts_from:
|
||||
kwargs['timestamp__gt'] = ts_from
|
||||
if ts_to:
|
||||
kwargs['timestamp__lte'] = ts_to
|
||||
return Fact.objects.filter(**kwargs).order_by('-timestamp').only('timestamp', 'module').order_by('-timestamp', 'module')
|
||||
|
||||
@staticmethod
|
||||
def add_fact(host_id, module, timestamp, facts):
|
||||
fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts)
|
||||
fact_obj.save()
|
||||
return fact_obj
|
||||
|
||||
283
awx/main/tests/functional/ansible.json
Normal file
283
awx/main/tests/functional/ansible.json
Normal file
@ -0,0 +1,283 @@
|
||||
{
|
||||
"ansible_all_ipv4_addresses": [
|
||||
"172.17.0.7"
|
||||
],
|
||||
"ansible_all_ipv6_addresses": [
|
||||
"fe80::42:acff:fe11:7"
|
||||
],
|
||||
"ansible_architecture": "x86_64",
|
||||
"ansible_bios_date": "12/01/2006",
|
||||
"ansible_bios_version": "VirtualBox",
|
||||
"ansible_cmdline": {
|
||||
"BOOT_IMAGE": "/boot/vmlinuz64",
|
||||
"base": true,
|
||||
"console": "tty0",
|
||||
"initrd": "/boot/initrd.img",
|
||||
"loglevel": "3",
|
||||
"noembed": true,
|
||||
"nomodeset": true,
|
||||
"norestore": true,
|
||||
"user": "docker",
|
||||
"waitusb": "10:LABEL=boot2docker-data"
|
||||
},
|
||||
"ansible_date_time": {
|
||||
"date": "2016-02-02",
|
||||
"day": "02",
|
||||
"epoch": "1454424257",
|
||||
"hour": "14",
|
||||
"iso8601": "2016-02-02T14:44:17Z",
|
||||
"iso8601_basic": "20160202T144417348424",
|
||||
"iso8601_basic_short": "20160202T144417",
|
||||
"iso8601_micro": "2016-02-02T14:44:17.348496Z",
|
||||
"minute": "44",
|
||||
"month": "02",
|
||||
"second": "17",
|
||||
"time": "14:44:17",
|
||||
"tz": "UTC",
|
||||
"tz_offset": "+0000",
|
||||
"weekday": "Tuesday",
|
||||
"weekday_number": "2",
|
||||
"weeknumber": "05",
|
||||
"year": "2016"
|
||||
},
|
||||
"ansible_default_ipv4": {
|
||||
"address": "172.17.0.7",
|
||||
"alias": "eth0",
|
||||
"broadcast": "global",
|
||||
"gateway": "172.17.0.1",
|
||||
"interface": "eth0",
|
||||
"macaddress": "02:42:ac:11:00:07",
|
||||
"mtu": 1500,
|
||||
"netmask": "255.255.0.0",
|
||||
"network": "172.17.0.0",
|
||||
"type": "ether"
|
||||
},
|
||||
"ansible_default_ipv6": {},
|
||||
"ansible_devices": {
|
||||
"sda": {
|
||||
"holders": [],
|
||||
"host": "",
|
||||
"model": "VBOX HARDDISK",
|
||||
"partitions": {
|
||||
"sda1": {
|
||||
"sectors": "510015555",
|
||||
"sectorsize": 512,
|
||||
"size": "243.19 GB",
|
||||
"start": "1975995"
|
||||
},
|
||||
"sda2": {
|
||||
"sectors": "1975932",
|
||||
"sectorsize": 512,
|
||||
"size": "964.81 MB",
|
||||
"start": "63"
|
||||
}
|
||||
},
|
||||
"removable": "0",
|
||||
"rotational": "0",
|
||||
"scheduler_mode": "deadline",
|
||||
"sectors": "512000000",
|
||||
"sectorsize": "512",
|
||||
"size": "244.14 GB",
|
||||
"support_discard": "0",
|
||||
"vendor": "ATA"
|
||||
},
|
||||
"sr0": {
|
||||
"holders": [],
|
||||
"host": "",
|
||||
"model": "CD-ROM",
|
||||
"partitions": {},
|
||||
"removable": "1",
|
||||
"rotational": "1",
|
||||
"scheduler_mode": "deadline",
|
||||
"sectors": "61440",
|
||||
"sectorsize": "2048",
|
||||
"size": "120.00 MB",
|
||||
"support_discard": "0",
|
||||
"vendor": "VBOX"
|
||||
}
|
||||
},
|
||||
"ansible_distribution": "Ubuntu",
|
||||
"ansible_distribution_major_version": "14",
|
||||
"ansible_distribution_release": "trusty",
|
||||
"ansible_distribution_version": "14.04",
|
||||
"ansible_dns": {
|
||||
"nameservers": [
|
||||
"8.8.8.8"
|
||||
]
|
||||
},
|
||||
"ansible_domain": "",
|
||||
"ansible_env": {
|
||||
"HOME": "/root",
|
||||
"HOSTNAME": "ede894599989",
|
||||
"LANG": "en_US.UTF-8",
|
||||
"LC_ALL": "en_US.UTF-8",
|
||||
"LC_MESSAGES": "en_US.UTF-8",
|
||||
"LESSCLOSE": "/usr/bin/lesspipe %s %s",
|
||||
"LESSOPEN": "| /usr/bin/lesspipe %s",
|
||||
"LS_COLORS": "",
|
||||
"OLDPWD": "/ansible",
|
||||
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"PWD": "/ansible/examples",
|
||||
"SHLVL": "1",
|
||||
"_": "/usr/local/bin/ansible",
|
||||
"container": "docker"
|
||||
},
|
||||
"ansible_eth0": {
|
||||
"active": true,
|
||||
"device": "eth0",
|
||||
"ipv4": {
|
||||
"address": "172.17.0.7",
|
||||
"broadcast": "global",
|
||||
"netmask": "255.255.0.0",
|
||||
"network": "172.17.0.0"
|
||||
},
|
||||
"ipv6": [
|
||||
{
|
||||
"address": "fe80::42:acff:fe11:7",
|
||||
"prefix": "64",
|
||||
"scope": "link"
|
||||
}
|
||||
],
|
||||
"macaddress": "02:42:ac:11:00:07",
|
||||
"mtu": 1500,
|
||||
"promisc": false,
|
||||
"type": "ether"
|
||||
},
|
||||
"ansible_fips": false,
|
||||
"ansible_form_factor": "Other",
|
||||
"ansible_fqdn": "ede894599989",
|
||||
"ansible_hostname": "ede894599989",
|
||||
"ansible_interfaces": [
|
||||
"lo",
|
||||
"eth0"
|
||||
],
|
||||
"ansible_kernel": "4.1.12-boot2docker",
|
||||
"ansible_lo": {
|
||||
"active": true,
|
||||
"device": "lo",
|
||||
"ipv4": {
|
||||
"address": "127.0.0.1",
|
||||
"broadcast": "host",
|
||||
"netmask": "255.0.0.0",
|
||||
"network": "127.0.0.0"
|
||||
},
|
||||
"ipv6": [
|
||||
{
|
||||
"address": "::1",
|
||||
"prefix": "128",
|
||||
"scope": "host"
|
||||
}
|
||||
],
|
||||
"mtu": 65536,
|
||||
"promisc": false,
|
||||
"type": "loopback"
|
||||
},
|
||||
"ansible_lsb": {
|
||||
"codename": "trusty",
|
||||
"description": "Ubuntu 14.04.3 LTS",
|
||||
"id": "Ubuntu",
|
||||
"major_release": "14",
|
||||
"release": "14.04"
|
||||
},
|
||||
"ansible_machine": "x86_64",
|
||||
"ansible_memfree_mb": 3746,
|
||||
"ansible_memory_mb": {
|
||||
"nocache": {
|
||||
"free": 8896,
|
||||
"used": 3638
|
||||
},
|
||||
"real": {
|
||||
"free": 3746,
|
||||
"total": 12534,
|
||||
"used": 8788
|
||||
},
|
||||
"swap": {
|
||||
"cached": 0,
|
||||
"free": 4048,
|
||||
"total": 4048,
|
||||
"used": 0
|
||||
}
|
||||
},
|
||||
"ansible_memtotal_mb": 12534,
|
||||
"ansible_mounts": [
|
||||
{
|
||||
"device": "/dev/sda1",
|
||||
"fstype": "ext4",
|
||||
"mount": "/etc/resolv.conf",
|
||||
"options": "rw,relatime,data=ordered",
|
||||
"size_available": 201281392640,
|
||||
"size_total": 256895700992,
|
||||
"uuid": "NA"
|
||||
},
|
||||
{
|
||||
"device": "/dev/sda1",
|
||||
"fstype": "ext4",
|
||||
"mount": "/etc/hostname",
|
||||
"options": "rw,relatime,data=ordered",
|
||||
"size_available": 201281392640,
|
||||
"size_total": 256895700992,
|
||||
"uuid": "NA"
|
||||
},
|
||||
{
|
||||
"device": "/dev/sda1",
|
||||
"fstype": "ext4",
|
||||
"mount": "/etc/hosts",
|
||||
"options": "rw,relatime,data=ordered",
|
||||
"size_available": 201281392640,
|
||||
"size_total": 256895700992,
|
||||
"uuid": "NA"
|
||||
}
|
||||
],
|
||||
"ansible_nodename": "ede894599989",
|
||||
"ansible_os_family": "Debian",
|
||||
"ansible_pkg_mgr": "apt",
|
||||
"ansible_processor": [
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
|
||||
"GenuineIntel",
|
||||
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz"
|
||||
],
|
||||
"ansible_processor_cores": 8,
|
||||
"ansible_processor_count": 1,
|
||||
"ansible_processor_threads_per_core": 1,
|
||||
"ansible_processor_vcpus": 8,
|
||||
"ansible_product_name": "VirtualBox",
|
||||
"ansible_product_serial": "0",
|
||||
"ansible_product_uuid": "25C5EA5A-1DF1-48D9-A2C6-81227DA153C0",
|
||||
"ansible_product_version": "1.2",
|
||||
"ansible_python_version": "2.7.6",
|
||||
"ansible_selinux": false,
|
||||
"ansible_service_mgr": "upstart",
|
||||
"ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBALF0xsM8UMXgSKiWNw4t19wxbxLnxQX742t/dIM0O8YLx+/lIP+Q69Dv5uoVt0zKV39eFziRlCh96qj2KYkGEJ6XfVZFnhpculL2Pv2CPpSwKuQ1vTbDO/xxUrvY+bHpfNJf9Rh69bFEE2pTsjomFPCgp8M0qGaFtwg6czSaeBONAAAAFQCGEfVtj97JiexTVRqgQITYlFp/eQAAAIEAg+S9qWn+AIb3amwVoLL/usQYOPCmZY9RVPzpkjJ6OG+HI4B7cXeauPtNTJwT0f9vGEqzf4mPpmS+aCShj6iwdmJ+cOwR5+SJlNalab3CMBoXKVLbT1J2XWFlK0szKKnoReP96IDbkAkGQ3fkm4jz0z6Wy0u6wOQVNcd4G5cwLZ4AAACAFvBm+H1LwNrwWBjWio+ayhglZ4Y25mLMEn2+dqBz0gLK5szEbft1HMPOWIVHvl6vi3v34pAJHKpxXpkLlNliTn8iw9BzCOrgP4V8sp2/85mxEuCdI1w/QERj9cHu5iS2pZ0cUwDE3pfuuGBB3IEliaJyaapowdrM8lN12jQl11E=",
|
||||
"ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHiYp4e9RfXpxDcEWpK4EuXPHW9++xcFI9hiB0TYAZgxEF9RIgwfucpPawFk7HIFoNc7EXQMlryilLSbg155KWM=",
|
||||
"ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAILclD2JaC654azEsAfcHRIOA2Ig9/Qk6MX80i/VCEdSH",
|
||||
"ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDeSUGxZaZsgBsezld0mj3HcbAwx6aykGnejceBjcs6lVwSGMHevofzSXIQDPYBhZoyWNl0PYAHv6AsQ8+3khd2SitUMJAuHSz1ZjgHCCGQP9ijXTKHn+lWCKA8rhLG/dwYwiouoOPZfn1G+erbKO6XiVbELrrf2RadnMGuMinESIOKVj3IunXsaGRMsDOQferOnUf7MvH7xpQnoySyQ1+p4rGruaohWG+Y2cDo7+B2FylPVbrpRDDJkfbt4J96WHx0KOdD0qzOicQP8JqDflqQPJJCWcgrvjQOSe4gXdPB6GZDtBl2qgQRwt1IgizPMm+b7Bwbd2VDe1TeWV2gT/7H",
|
||||
"ansible_swapfree_mb": 4048,
|
||||
"ansible_swaptotal_mb": 4048,
|
||||
"ansible_system": "Linux",
|
||||
"ansible_system_vendor": "innotek GmbH",
|
||||
"ansible_uptime_seconds": 178398,
|
||||
"ansible_user_dir": "/root",
|
||||
"ansible_user_gecos": "root",
|
||||
"ansible_user_gid": 0,
|
||||
"ansible_user_id": "root",
|
||||
"ansible_user_shell": "/bin/bash",
|
||||
"ansible_user_uid": 0,
|
||||
"ansible_userspace_architecture": "x86_64",
|
||||
"ansible_userspace_bits": "64",
|
||||
"ansible_virtualization_role": "guest",
|
||||
"ansible_virtualization_type": "docker",
|
||||
"module_setup": true
|
||||
}
|
||||
239
awx/main/tests/functional/api/test_fact_versions.py
Normal file
239
awx/main/tests/functional/api/test_fact_versions.py
Normal file
@ -0,0 +1,239 @@
|
||||
# Python
|
||||
import mock
|
||||
import pytest
|
||||
from datetime import timedelta
|
||||
import urlparse
|
||||
import urllib
|
||||
|
||||
# AWX
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.api.views import (
|
||||
HostFactVersionsList,
|
||||
)
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
|
||||
# Django
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
def mock_feature_enabled(feature, bypass_database=None):
|
||||
return True
|
||||
|
||||
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1):
|
||||
hosts = hosts(host_count=host_count)
|
||||
fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = get(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id, params=get_params)
|
||||
|
||||
return (hosts[0], response)
|
||||
|
||||
def check_url(url1_full, fact_known, module):
|
||||
url1_split = urlparse.urlsplit(url1_full)
|
||||
url1 = url1_split.path
|
||||
url1_params = urlparse.parse_qsl(url1_split.query)
|
||||
|
||||
url2 = reverse('api:host_fact_compare_view', args=(fact_known.host.pk,))
|
||||
url2_params = [('module', module), ('datetime', timestamp_apiformat(fact_known.timestamp))]
|
||||
|
||||
assert url1 == url2
|
||||
assert urllib.urlencode(url1_params) == urllib.urlencode(url2_params)
|
||||
|
||||
def check_response_facts(facts_known, response):
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
assert fact_known.module == response.data['results'][i]['module']
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['results'][i]['timestamp']
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_no_facts_db(hosts, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = get(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id)
|
||||
|
||||
response_expected = {
|
||||
'results': []
|
||||
}
|
||||
assert response_expected == response.data
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'from': epoch,
|
||||
'to': epoch,
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
|
||||
results = response.data['results']
|
||||
assert 'related' in results[0]
|
||||
assert 'timestamp' in results[0]
|
||||
assert 'module' in results[0]
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.skipif(True, reason="Options fix landed in devel but not here. Enable this after this pr gets merged.")
|
||||
def test_basic_options_fields(hosts, fact_scans, options, user):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = options(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id)
|
||||
|
||||
#import json
|
||||
#print(json.dumps(response.data))
|
||||
assert 'related' in response.data
|
||||
assert 'id' in response.data
|
||||
assert 'facts' in response.data
|
||||
assert 'module' in response.data
|
||||
assert 'host' in response.data
|
||||
assert isinstance(response.data['host'], int)
|
||||
assert 'summary_fields' in response.data
|
||||
assert 'host' in response.data['summary_fields']
|
||||
assert 'name' in response.data['summary_fields']['host']
|
||||
assert 'description' in response.data['summary_fields']['host']
|
||||
assert 'host' in response.data['related']
|
||||
assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_related_fact_view(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch)
|
||||
facts_known = Fact.get_timeline(host.id)
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(response.data['results'])
|
||||
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_multiple_hosts(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, host_count=3)
|
||||
facts_known = Fact.get_timeline(host.id)
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(response.data['results'])
|
||||
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_to_from(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'from': epoch - timedelta(days=10),
|
||||
'to': epoch + timedelta(days=10),
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, ts_from=search['from'], ts_to=search['to'])
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_module(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'module': 'packages',
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, module=search['module'])
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_from(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'from': epoch + timedelta(days=1),
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, ts_from=search['from'])
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_to(hosts, fact_scans, get, user):
|
||||
epoch = timezone.now()
|
||||
search = {
|
||||
'to': epoch + timedelta(days=1),
|
||||
}
|
||||
|
||||
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
|
||||
facts_known = Fact.get_timeline(host.id, ts_to=search['to'])
|
||||
assert 6 == len(facts_known)
|
||||
assert 6 == len(response.data['results'])
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
team_obj.users.add(user_obj)
|
||||
|
||||
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
|
||||
response = get(HostFactVersionsList, user_obj, url, pk=hosts[0].id)
|
||||
return response
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_normal_user_403(hosts, fact_scans, get, user, team):
|
||||
user_bob = user('bob', False)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_bob, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
assert "You do not have permission to perform this action." == response.data['detail']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_super_user_ok(hosts, fact_scans, get, user, team):
|
||||
user_super = user('bob', True)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_super, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
organization.admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
org2 = organizations(1)
|
||||
org2[0].admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
|
||||
158
awx/main/tests/functional/api/test_fact_view.py
Normal file
158
awx/main/tests/functional/api/test_fact_view.py
Normal file
@ -0,0 +1,158 @@
|
||||
import mock
|
||||
import pytest
|
||||
import json
|
||||
|
||||
from awx.api.views import (
|
||||
HostFactCompareView,
|
||||
)
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
def mock_feature_enabled(feature, bypass_database=None):
|
||||
return True
|
||||
|
||||
# TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it
|
||||
def find_fact(facts, host_id, module_name, timestamp):
|
||||
for f in facts:
|
||||
if f.host_id == host_id and f.module == module_name and f.timestamp == timestamp:
|
||||
return f
|
||||
raise RuntimeError('fact <%s, %s, %s> not found in %s', (host_id, module_name, timestamp, facts))
|
||||
|
||||
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name='ansible', get_params={}):
|
||||
hosts = hosts(host_count=1)
|
||||
facts = fact_scans(fact_scans=1, timestamp_epoch=epoch)
|
||||
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id, params=get_params)
|
||||
|
||||
fact_known = find_fact(facts, hosts[0].id, module_name, epoch)
|
||||
return (fact_known, response)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_no_fact_found(hosts, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id)
|
||||
|
||||
expected_response = {
|
||||
"detail": "Fact not found"
|
||||
}
|
||||
assert 404 == response.status_code
|
||||
assert expected_response == response.data
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id)
|
||||
|
||||
assert 'related' in response.data
|
||||
assert 'id' in response.data
|
||||
assert 'facts' in response.data
|
||||
assert 'module' in response.data
|
||||
assert 'host' in response.data
|
||||
assert isinstance(response.data['host'], int)
|
||||
assert 'summary_fields' in response.data
|
||||
assert 'host' in response.data['summary_fields']
|
||||
assert 'name' in response.data['summary_fields']['host']
|
||||
assert 'description' in response.data['summary_fields']['host']
|
||||
assert 'host' in response.data['related']
|
||||
assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_content(hosts, fact_scans, get, user, fact_ansible_json):
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user)
|
||||
|
||||
assert fact_known.host_id == response.data['host']
|
||||
assert fact_ansible_json == json.loads(response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert fact_known.module == response.data['module']
|
||||
|
||||
def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name):
|
||||
params = {
|
||||
'module': module_name
|
||||
}
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params)
|
||||
|
||||
assert fact_json == json.loads(response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert module_name == response.data['module']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_module_packages(hosts, fact_scans, get, user, fact_packages_json):
|
||||
_test_search_by_module(hosts, fact_scans, get, user, fact_packages_json, 'packages')
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_module_services(hosts, fact_scans, get, user, fact_services_json):
|
||||
_test_search_by_module(hosts, fact_scans, get, user, fact_services_json, 'services')
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packages_json):
|
||||
epoch = timezone.now()
|
||||
module_name = 'packages'
|
||||
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, epoch=epoch, get_params=dict(module=module_name, datetime=epoch))
|
||||
|
||||
assert fact_known.id == response.data['id']
|
||||
|
||||
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
|
||||
team_obj.users.add(user_obj)
|
||||
|
||||
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
|
||||
response = get(HostFactCompareView, user_obj, url, pk=hosts[0].id)
|
||||
return response
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_normal_user_403(hosts, fact_scans, get, user, team):
|
||||
user_bob = user('bob', False)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_bob, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
assert "You do not have permission to perform this action." == response.data['detail']
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_super_user_ok(hosts, fact_scans, get, user, team):
|
||||
user_super = user('bob', True)
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_super, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
organization.admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team):
|
||||
user_admin = user('johnson', False)
|
||||
org2 = organizations(1)
|
||||
org2[0].admins.add(user_admin)
|
||||
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
|
||||
20
awx/main/tests/functional/api/test_host_detail.py
Normal file
20
awx/main/tests/functional/api/test_host_detail.py
Normal file
@ -0,0 +1,20 @@
|
||||
# TODO: As of writing this our only concern is ensuring that the fact feature is reflected in the Host endpoint.
|
||||
# Other host tests should live here to make this test suite more complete.
|
||||
import pytest
|
||||
|
||||
from awx.api.views import (
|
||||
HostDetail,
|
||||
)
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
|
||||
url = reverse('api:host_detail', args=(hosts[0].pk,))
|
||||
response = get(HostDetail, user('admin', True), url, pk=hosts[0].id)
|
||||
|
||||
assert 'related' in response.data
|
||||
assert 'fact_versions' in response.data['related']
|
||||
assert reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) == response.data['related']['fact_versions']
|
||||
|
||||
0
awx/main/tests/functional/commands/__init__.py
Normal file
0
awx/main/tests/functional/commands/__init__.py
Normal file
109
awx/main/tests/functional/commands/conftest.py
Normal file
109
awx/main/tests/functional/commands/conftest.py
Normal file
@ -0,0 +1,109 @@
|
||||
import pytest
|
||||
import time
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_base(inventory, hosts):
|
||||
host_objs = hosts(1)
|
||||
return {
|
||||
'host': host_objs[0].name,
|
||||
'date_key': time.mktime(datetime.utcnow().timetuple()),
|
||||
'facts' : { },
|
||||
'inventory_id': inventory.id
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_small(fact_msg_base):
|
||||
fact_msg_base['facts'] = {
|
||||
'packages': {
|
||||
"accountsservice": [
|
||||
{
|
||||
"architecture": "amd64",
|
||||
"name": "accountsservice",
|
||||
"source": "apt",
|
||||
"version": "0.6.35-0ubuntu7.1"
|
||||
}
|
||||
],
|
||||
"acpid": [
|
||||
{
|
||||
"architecture": "amd64",
|
||||
"name": "acpid",
|
||||
"source": "apt",
|
||||
"version": "1:2.0.21-1ubuntu2"
|
||||
}
|
||||
],
|
||||
"adduser": [
|
||||
{
|
||||
"architecture": "all",
|
||||
"name": "adduser",
|
||||
"source": "apt",
|
||||
"version": "3.113+nmu3ubuntu3"
|
||||
}
|
||||
],
|
||||
},
|
||||
'services': [
|
||||
{
|
||||
"name": "acpid",
|
||||
"source": "sysv",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"name": "apparmor",
|
||||
"source": "sysv",
|
||||
"state": "stopped"
|
||||
},
|
||||
{
|
||||
"name": "atd",
|
||||
"source": "sysv",
|
||||
"state": "running"
|
||||
},
|
||||
{
|
||||
"name": "cron",
|
||||
"source": "sysv",
|
||||
"state": "running"
|
||||
}
|
||||
],
|
||||
'ansible': {
|
||||
'ansible_fact_simple': 'hello world',
|
||||
'ansible_fact_complex': {
|
||||
'foo': 'bar',
|
||||
'hello': [
|
||||
'scooby',
|
||||
'dooby',
|
||||
'doo'
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
return fact_msg_base
|
||||
|
||||
|
||||
'''
|
||||
Facts sent from ansible to our fact cache reciever.
|
||||
The fact module type is implicit i.e
|
||||
|
||||
Note: The 'ansible' module is an expection to this rule.
|
||||
It is NOT nested in a dict, and thus does NOT contain a first-level
|
||||
key of 'ansible'
|
||||
|
||||
{
|
||||
'fact_module_name': { ... },
|
||||
}
|
||||
'''
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_ansible(fact_msg_base, fact_ansible_json):
|
||||
fact_msg_base['facts'] = fact_ansible_json
|
||||
return fact_msg_base
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_packages(fact_msg_base, fact_packages_json):
|
||||
fact_msg_base['facts']['packages'] = fact_packages_json
|
||||
return fact_msg_base
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_services(fact_msg_base, fact_services_json):
|
||||
fact_msg_base['facts']['services'] = fact_services_json
|
||||
return fact_msg_base
|
||||
|
||||
@ -0,0 +1,95 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
# Django
|
||||
|
||||
# AWX
|
||||
from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
|
||||
# TODO: Check that timestamp and other attributes are as expected
|
||||
def check_process_fact_message_module(fact_returned, data, module_name):
|
||||
date_key = data['date_key']
|
||||
|
||||
# Ensure 1, and only 1, fact created
|
||||
timestamp = datetime.fromtimestamp(date_key, None)
|
||||
assert 1 == Fact.objects.all().count()
|
||||
|
||||
host_obj = Host.objects.get(name=data['host'], inventory__id=data['inventory_id'])
|
||||
assert host_obj is not None
|
||||
fact_known = Fact.get_host_fact(host_obj.id, module_name, timestamp)
|
||||
assert fact_known is not None
|
||||
assert fact_known == fact_returned
|
||||
|
||||
assert host_obj == fact_returned.host
|
||||
if module_name == 'ansible':
|
||||
assert data['facts'] == fact_returned.facts
|
||||
else:
|
||||
assert data['facts'][module_name] == fact_returned.facts
|
||||
assert timestamp == fact_returned.timestamp
|
||||
assert module_name == fact_returned.module
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_ansible(fact_msg_ansible):
|
||||
receiver = FactCacheReceiver()
|
||||
fact_returned = receiver.process_fact_message(fact_msg_ansible)
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible')
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_packages(fact_msg_packages):
|
||||
receiver = FactCacheReceiver()
|
||||
fact_returned = receiver.process_fact_message(fact_msg_packages)
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages')
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_services(fact_msg_services):
|
||||
receiver = FactCacheReceiver()
|
||||
fact_returned = receiver.process_fact_message(fact_msg_services)
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_services, 'services')
|
||||
|
||||
'''
|
||||
We pickypack our fact sending onto the Ansible fact interface.
|
||||
The interface is <hostname, facts>. Where facts is a json blob of all the facts.
|
||||
This makes it hard to decipher what facts are new/changed.
|
||||
Because of this, we handle the same fact module data being sent multiple times
|
||||
and just keep the newest version.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible):
|
||||
#epoch = timezone.now()
|
||||
epoch = datetime.fromtimestamp(fact_msg_ansible['date_key'])
|
||||
fact_scans(fact_scans=1, timestamp_epoch=epoch)
|
||||
key = 'ansible.overwrite'
|
||||
value = 'hello world'
|
||||
|
||||
receiver = FactCacheReceiver()
|
||||
receiver.process_fact_message(fact_msg_ansible)
|
||||
|
||||
fact_msg_ansible['facts'][key] = value
|
||||
fact_returned = receiver.process_fact_message(fact_msg_ansible)
|
||||
|
||||
fact_obj = Fact.objects.get(id=fact_returned.id)
|
||||
assert key in fact_obj.facts
|
||||
assert json.loads(fact_obj.facts) == fact_msg_ansible['facts']
|
||||
assert value == json.loads(fact_obj.facts)[key]
|
||||
|
||||
# Ensure that the message flows from the socket through to process_fact_message()
|
||||
@pytest.mark.django_db
|
||||
def test_run_receiver(mocker, fact_msg_ansible):
|
||||
mocker.patch("awx.main.socket.Socket.listen", return_value=[fact_msg_ansible])
|
||||
|
||||
receiver = FactCacheReceiver()
|
||||
mocker.patch.object(receiver, 'process_fact_message', return_value=None)
|
||||
|
||||
receiver.run_receiver(use_processing_threads=False)
|
||||
|
||||
receiver.process_fact_message.assert_called_once_with(fact_msg_ansible)
|
||||
@ -1,13 +1,42 @@
|
||||
import pytest
|
||||
import mock
|
||||
import json
|
||||
import os
|
||||
|
||||
from awx.main.models.organization import Organization
|
||||
from datetime import timedelta
|
||||
|
||||
from awx.main.models.organization import Organization, Permission
|
||||
from awx.main.models.base import PERM_INVENTORY_READ
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.models.fact import Fact
|
||||
|
||||
from django.utils import timezone
|
||||
from django.contrib.auth.models import User
|
||||
from django.conf import settings
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
force_authenticate,
|
||||
)
|
||||
|
||||
'''
|
||||
Disable all django model signals.
|
||||
'''
|
||||
@pytest.fixture(scope="session", autouse=False)
|
||||
def disable_signals():
|
||||
mocked = mock.patch('django.dispatch.Signal.send', autospec=True)
|
||||
mocked.start()
|
||||
|
||||
'''
|
||||
FIXME: Not sure how "far" just setting the BROKER_URL will get us.
|
||||
We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py)
|
||||
|
||||
Allows django signal code to execute without the need for redis
|
||||
'''
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def celery_memory_broker():
|
||||
settings.BROKER_URL='memory://localhost/'
|
||||
|
||||
@pytest.fixture
|
||||
def user():
|
||||
def u(name, is_superuser=False):
|
||||
@ -35,9 +64,23 @@ def post():
|
||||
|
||||
@pytest.fixture
|
||||
def get():
|
||||
def rf(_cls, _user, _url, pk=None, middleware=None):
|
||||
def rf(_cls, _user, _url, pk=None, params={}, middleware=None):
|
||||
view = _cls.as_view()
|
||||
request = APIRequestFactory().get(_url, format='json')
|
||||
request = APIRequestFactory().get(_url, params, format='json')
|
||||
if middleware:
|
||||
middleware.process_request(request)
|
||||
force_authenticate(request, user=_user)
|
||||
response = view(request, pk=pk)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def options():
|
||||
def rf(_cls, _user, _url, pk=None, params={}, middleware=None):
|
||||
view = _cls.as_view()
|
||||
request = APIRequestFactory().options(_url, params, format='json')
|
||||
if middleware:
|
||||
middleware.process_request(request)
|
||||
force_authenticate(request, user=_user)
|
||||
@ -54,3 +97,80 @@ def instance(settings):
|
||||
@pytest.fixture
|
||||
def organization(instance):
|
||||
return Organization.objects.create(name="test-org", description="test-org-desc")
|
||||
|
||||
@pytest.fixture
|
||||
def organizations(instance):
|
||||
def rf(organization_count=1):
|
||||
orgs = []
|
||||
for i in xrange(0, organization_count):
|
||||
o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc")
|
||||
orgs.append(o)
|
||||
return orgs
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def inventory(organization):
|
||||
return organization.inventories.create(name="test-inv")
|
||||
|
||||
@pytest.fixture
|
||||
def group(inventory):
|
||||
return inventory.groups.create(name='group-1')
|
||||
|
||||
@pytest.fixture
|
||||
def hosts(group):
|
||||
def rf(host_count=1):
|
||||
hosts = []
|
||||
for i in xrange(0, host_count):
|
||||
name = '%s-host-%s' % (group.name, i)
|
||||
(host, created) = group.inventory.hosts.get_or_create(name=name)
|
||||
if created:
|
||||
group.hosts.add(host)
|
||||
hosts.append(host)
|
||||
return hosts
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def fact_scans(group, fact_ansible_json, fact_packages_json, fact_services_json):
|
||||
def rf(fact_scans=1, timestamp_epoch=timezone.now()):
|
||||
facts_json = {}
|
||||
facts = []
|
||||
module_names = ['ansible', 'services', 'packages']
|
||||
timestamp_current = timestamp_epoch
|
||||
|
||||
facts_json['ansible'] = fact_ansible_json
|
||||
facts_json['packages'] = fact_packages_json
|
||||
facts_json['services'] = fact_services_json
|
||||
|
||||
for i in xrange(0, fact_scans):
|
||||
for host in group.hosts.all():
|
||||
for module_name in module_names:
|
||||
facts.append(Fact.objects.create(host=host, timestamp=timestamp_current, module=module_name, facts=facts_json[module_name]))
|
||||
timestamp_current += timedelta(days=1)
|
||||
return facts
|
||||
return rf
|
||||
|
||||
def _fact_json(module_name):
|
||||
current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
with open('%s/%s.json' % (current_dir, module_name)) as f:
|
||||
return json.load(f)
|
||||
|
||||
@pytest.fixture
|
||||
def fact_ansible_json():
|
||||
return _fact_json('ansible')
|
||||
|
||||
@pytest.fixture
|
||||
def fact_packages_json():
|
||||
return _fact_json('packages')
|
||||
|
||||
@pytest.fixture
|
||||
def fact_services_json():
|
||||
return _fact_json('services')
|
||||
|
||||
@pytest.fixture
|
||||
def team(organization):
|
||||
return organization.teams.create(name='test-team')
|
||||
|
||||
@pytest.fixture
|
||||
def permission_inv_read(organization, inventory, team):
|
||||
return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ)
|
||||
|
||||
|
||||
111
awx/main/tests/functional/models/fact/test_get_host_fact.py
Normal file
111
awx/main/tests/functional/models/fact/test_get_host_fact.py
Normal file
@ -0,0 +1,111 @@
|
||||
import pytest
|
||||
|
||||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
|
||||
from awx.main.models import Fact
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_newest_scan_exact(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch:
|
||||
fact_known = f
|
||||
break
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', epoch)
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Show me the most recent state of the sytem at any point of time.
|
||||
or, said differently
|
||||
For any timestamp, get the first scan that is <= the timestamp.
|
||||
'''
|
||||
|
||||
'''
|
||||
Ensure most recent scan run is the scan returned.
|
||||
Query by future date.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_newest_scan_less_than(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
timestamp_future = epoch + timedelta(days=10)
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=2):
|
||||
fact_known = f
|
||||
break
|
||||
assert fact_known is not None
|
||||
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_future)
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_query_middle_of_timeline(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
timestamp_middle = epoch + timedelta(days=1, hours=3)
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=1):
|
||||
fact_known = f
|
||||
break
|
||||
assert fact_known is not None
|
||||
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_middle)
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Query time less than any fact scan. Should return None
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_query_result_empty(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
timestamp_less = epoch - timedelta(days=1)
|
||||
hosts = hosts(host_count=2)
|
||||
fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_less)
|
||||
|
||||
assert fact_found is None
|
||||
|
||||
'''
|
||||
Query by fact module other than 'ansible'
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_by_module(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
fact_known_services = None
|
||||
fact_known_packages = None
|
||||
for f in facts:
|
||||
if f.host_id == hosts[0].id:
|
||||
if f.module == 'services' and f.timestamp == epoch:
|
||||
fact_known_services = f
|
||||
elif f.module == 'packages' and f.timestamp == epoch:
|
||||
fact_known_packages = f
|
||||
assert fact_known_services is not None
|
||||
assert fact_known_packages is not None
|
||||
|
||||
fact_found_services = Fact.get_host_fact(hosts[0].id, 'services', epoch)
|
||||
fact_found_packages = Fact.get_host_fact(hosts[0].id, 'packages', epoch)
|
||||
|
||||
assert fact_found_services == fact_known_services
|
||||
assert fact_found_packages == fact_known_packages
|
||||
|
||||
129
awx/main/tests/functional/models/fact/test_get_timeline.py
Normal file
129
awx/main/tests/functional/models/fact/test_get_timeline.py
Normal file
@ -0,0 +1,129 @@
|
||||
import pytest
|
||||
|
||||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
|
||||
from awx.main.models import Fact
|
||||
|
||||
def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now(), module_name='ansible', ts_known=None):
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
|
||||
facts_known = []
|
||||
for f in facts:
|
||||
if f.host.id == hosts[0].id:
|
||||
if module_name and f.module != module_name:
|
||||
continue
|
||||
if ts_known and f.timestamp != ts_known:
|
||||
continue
|
||||
facts_known.append(f)
|
||||
fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to)
|
||||
return (facts_known, fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_all(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch)
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_all_ansible(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(fact_objs)
|
||||
|
||||
for i in xrange(len(facts_known) - 1, 0):
|
||||
assert facts_known[i].id == fact_objs[i].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_db(hosts, fact_scans):
|
||||
hosts = hosts(host_count=2)
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to)
|
||||
|
||||
assert 0 == len(fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_results(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=100)
|
||||
ts_to = epoch - timedelta(days=50)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
|
||||
assert 0 == len(fact_objs)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_exact_same_equal(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_to = ts_from = epoch + timedelta(days=1)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch)
|
||||
assert 1 == len(facts_known)
|
||||
assert 1 == len(fact_objs)
|
||||
|
||||
assert facts_known[0].id == fact_objs[0].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_exact_from_exclusive_to_inclusive(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch + timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=2)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch)
|
||||
|
||||
assert 1 == len(facts_known)
|
||||
assert 1 == len(fact_objs)
|
||||
|
||||
assert facts_known[0].id == fact_objs[0].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_to_lte(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_to = epoch + timedelta(days=1)
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=ts_to, epoch=epoch)
|
||||
facts_known_subset = filter(lambda x: x.timestamp <= ts_to, facts_known)
|
||||
|
||||
assert 2 == len(facts_known_subset)
|
||||
assert 2 == len(fact_objs)
|
||||
|
||||
for i in xrange(0, len(fact_objs)):
|
||||
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_from_gt(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=ts_from, ts_to=None, epoch=epoch)
|
||||
facts_known_subset = filter(lambda x: x.timestamp > ts_from, facts_known)
|
||||
|
||||
assert 2 == len(facts_known_subset)
|
||||
assert 2 == len(fact_objs)
|
||||
|
||||
for i in xrange(0, len(fact_objs)):
|
||||
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_ts(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=epoch)
|
||||
assert 3 == len(facts_known)
|
||||
assert 3 == len(fact_objs)
|
||||
|
||||
for i in xrange(len(facts_known) - 1, 0):
|
||||
assert facts_known[i].id == fact_objs[i].id
|
||||
|
||||
|
||||
2922
awx/main/tests/functional/packages.json
Normal file
2922
awx/main/tests/functional/packages.json
Normal file
File diff suppressed because it is too large
Load Diff
697
awx/main/tests/functional/services.json
Normal file
697
awx/main/tests/functional/services.json
Normal file
@ -0,0 +1,697 @@
|
||||
[
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "iprdump"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "iprinit"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "iprupdate"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "stopped",
|
||||
"name": "netconsole"
|
||||
},
|
||||
{
|
||||
"source": "sysv",
|
||||
"state": "running",
|
||||
"name": "network"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "arp-ethers.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "auditd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "autovt@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "avahi-daemon.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "blk-availability.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "brandbot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "console-getty.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "console-shell.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "cpupower.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "crond.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.fedoraproject.FirewallD1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.freedesktop.Avahi.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.hostname1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.locale1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.login1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.machine1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.freedesktop.NetworkManager.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dbus-org.freedesktop.nm-dispatcher.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus-org.freedesktop.timedate1.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dbus.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "debug-shell.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "dhcpd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dhcpd6.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dhcrelay.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dm-event.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dnsmasq.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-cmdline.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-initqueue.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-mount.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-mount.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-pivot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-trigger.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-pre-udev.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "dracut-shutdown.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "ebtables.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "emergency.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "firewalld.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "getty@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "halt-local.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-cleanup.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-parse-etc.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-switch-root.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "initrd-udevadm-cleanup-db.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "irqbalance.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "kdump.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "kmod-static-nodes.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "lvm2-lvmetad.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "lvm2-monitor.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "lvm2-pvscan@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "messagebus.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "microcode.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "named-setup-rndc.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "named.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "NetworkManager-dispatcher.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "NetworkManager-wait-online.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "NetworkManager.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "ntpd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "ntpdate.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "openvpn@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-halt.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-kexec.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-poweroff.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-quit-wait.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-quit.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-read-write.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-reboot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-start.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "plymouth-switch-root.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "polkit.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "postfix.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "quotaon.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rc-local.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rdisc.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rescue.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-autorelabel-mark.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-autorelabel.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-configure.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-dmesg.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-domainname.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-import-state.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-loadmodules.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "rhel-readonly.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "rsyslog.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "serial-getty@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "sshd-keygen.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "sshd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "sshd@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-ask-password-console.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-ask-password-plymouth.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-ask-password-wall.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-backlight@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-binfmt.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-fsck-root.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-fsck@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-halt.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-hibernate.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-hostnamed.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-hybrid-sleep.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-initctl.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-journal-flush.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-journald.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-kexec.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-localed.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-logind.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-machined.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-modules-load.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-nspawn@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-poweroff.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-quotacheck.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-random-seed.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "systemd-readahead-collect.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-readahead-done.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "systemd-readahead-drop.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "systemd-readahead-replay.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-reboot.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-remount-fs.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-shutdownd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-suspend.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-sysctl.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-timedated.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-tmpfiles-clean.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-tmpfiles-setup-dev.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-tmpfiles-setup.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-udev-settle.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-udev-trigger.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-udevd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-update-utmp-runlevel.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-update-utmp.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-user-sessions.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "systemd-vconsole-setup.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "teamd@.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "tuned.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "running",
|
||||
"name": "vmtoolsd.service"
|
||||
},
|
||||
{
|
||||
"source": "systemd",
|
||||
"state": "stopped",
|
||||
"name": "wpa_supplicant.service"
|
||||
}
|
||||
]
|
||||
File diff suppressed because one or more lines are too long
@ -1,242 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
import unittest2 as unittest
|
||||
|
||||
# Django
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.tests.base import BaseLiveServerTest
|
||||
from awx.fact.models import * # noqa
|
||||
from awx.fact.tests.base import BaseFactTestMixin, FactScanBuilder, TEST_FACT_ANSIBLE, TEST_FACT_PACKAGES, TEST_FACT_SERVICES
|
||||
from awx.main.utils import build_url
|
||||
|
||||
__all__ = ['FactVersionApiTest', 'FactViewApiTest', 'SingleFactApiTest',]
|
||||
|
||||
class FactApiBaseTest(BaseLiveServerTest, BaseFactTestMixin):
|
||||
def setUp(self):
|
||||
super(FactApiBaseTest, self).setUp()
|
||||
self.create_test_license_file()
|
||||
self.setup_instances()
|
||||
self.setup_users()
|
||||
self.organization = self.make_organization(self.super_django_user)
|
||||
self.organization.admins.add(self.normal_django_user)
|
||||
self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory')
|
||||
self.host = self.inventory.hosts.create(name='host.example.com')
|
||||
self.host2 = self.inventory.hosts.create(name='host2.example.com')
|
||||
self.host3 = self.inventory.hosts.create(name='host3.example.com')
|
||||
|
||||
def setup_facts(self, scan_count):
|
||||
self.builder = FactScanBuilder()
|
||||
self.builder.set_inventory_id(self.inventory.pk)
|
||||
self.builder.add_fact('ansible', TEST_FACT_ANSIBLE)
|
||||
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
|
||||
self.builder.add_fact('services', TEST_FACT_SERVICES)
|
||||
self.builder.add_hostname('host.example.com')
|
||||
self.builder.add_hostname('host2.example.com')
|
||||
self.builder.add_hostname('host3.example.com')
|
||||
self.builder.build(scan_count=scan_count, host_count=3)
|
||||
|
||||
self.fact_host = FactHost.objects.get(hostname=self.host.name)
|
||||
|
||||
class FactVersionApiTest(FactApiBaseTest):
|
||||
def check_equal(self, fact_versions, results):
|
||||
def find(element, set1):
|
||||
for e in set1:
|
||||
if all([ e.get(field) == element.get(field) for field in element.keys()]):
|
||||
return e
|
||||
return None
|
||||
|
||||
self.assertEqual(len(results), len(fact_versions))
|
||||
for v in fact_versions:
|
||||
v_dict = {
|
||||
'timestamp': timestamp_apiformat(v.timestamp),
|
||||
'module': v.module
|
||||
}
|
||||
e = find(v_dict, results)
|
||||
self.assertIsNotNone(e, "%s not found in %s" % (v_dict, results))
|
||||
|
||||
def get_list(self, fact_versions, params=None):
|
||||
url = build_url('api:host_fact_versions_list', args=(self.host.pk,), get=params)
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
|
||||
self.check_equal(fact_versions, response['results'])
|
||||
return response
|
||||
|
||||
def test_permission_list(self):
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user('admin'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('normal'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('other'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user('nobody'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user(None):
|
||||
self.get(url, expect=401)
|
||||
|
||||
def test_list_empty(self):
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertIn('results', response)
|
||||
self.assertIsInstance(response['results'], list)
|
||||
self.assertEqual(len(response['results']), 0)
|
||||
|
||||
def test_list_related_fact_view(self):
|
||||
self.setup_facts(2)
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
for entry in response['results']:
|
||||
self.assertIn('fact_view', entry['related'])
|
||||
self.get(entry['related']['fact_view'], expect=200)
|
||||
|
||||
def test_list(self):
|
||||
self.setup_facts(2)
|
||||
self.get_list(FactVersion.objects.filter(host=self.fact_host))
|
||||
|
||||
def test_list_module(self):
|
||||
self.setup_facts(10)
|
||||
self.get_list(FactVersion.objects.filter(host=self.fact_host, module='packages'), dict(module='packages'))
|
||||
|
||||
def test_list_time_from(self):
|
||||
self.setup_facts(10)
|
||||
|
||||
params = {
|
||||
'from': timestamp_apiformat(self.builder.get_timestamp(1)),
|
||||
}
|
||||
# 'to': timestamp_apiformat(self.builder.get_timestamp(3))
|
||||
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'])
|
||||
self.get_list(fact_versions, params)
|
||||
|
||||
def test_list_time_to(self):
|
||||
self.setup_facts(10)
|
||||
|
||||
params = {
|
||||
'to': timestamp_apiformat(self.builder.get_timestamp(3))
|
||||
}
|
||||
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__lte=params['to'])
|
||||
self.get_list(fact_versions, params)
|
||||
|
||||
def test_list_time_from_to(self):
|
||||
self.setup_facts(10)
|
||||
|
||||
params = {
|
||||
'from': timestamp_apiformat(self.builder.get_timestamp(1)),
|
||||
'to': timestamp_apiformat(self.builder.get_timestamp(3))
|
||||
}
|
||||
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'], timestamp__lte=params['to'])
|
||||
self.get_list(fact_versions, params)
|
||||
|
||||
|
||||
class FactViewApiTest(FactApiBaseTest):
|
||||
def check_equal(self, fact_obj, results):
|
||||
fact_dict = {
|
||||
'timestamp': timestamp_apiformat(fact_obj.timestamp),
|
||||
'module': fact_obj.module,
|
||||
'host': {
|
||||
'hostname': fact_obj.host.hostname,
|
||||
'inventory_id': fact_obj.host.inventory_id,
|
||||
'id': str(fact_obj.host.id)
|
||||
},
|
||||
'fact': fact_obj.fact
|
||||
}
|
||||
self.assertEqual(fact_dict, results)
|
||||
|
||||
def test_permission_view(self):
|
||||
url = reverse('api:host_fact_compare_view', args=(self.host.pk,))
|
||||
with self.current_user('admin'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('normal'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('other'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user('nobody'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user(None):
|
||||
self.get(url, expect=401)
|
||||
|
||||
def get_fact(self, fact_obj, params=None):
|
||||
url = build_url('api:host_fact_compare_view', args=(self.host.pk,), get=params)
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
|
||||
self.check_equal(fact_obj, response)
|
||||
|
||||
def test_view(self):
|
||||
self.setup_facts(2)
|
||||
self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible').order_by('-timestamp')[0])
|
||||
|
||||
def test_view_module_filter(self):
|
||||
self.setup_facts(2)
|
||||
self.get_fact(Fact.objects.filter(host=self.fact_host, module='services').order_by('-timestamp')[0], dict(module='services'))
|
||||
|
||||
def test_view_time_filter(self):
|
||||
self.setup_facts(6)
|
||||
ts = self.builder.get_timestamp(3)
|
||||
self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible', timestamp__lte=ts).order_by('-timestamp')[0],
|
||||
dict(datetime=ts))
|
||||
|
||||
|
||||
@unittest.skip("single fact query needs to be updated to use inventory_id attribute on host document")
|
||||
class SingleFactApiTest(FactApiBaseTest):
|
||||
def setUp(self):
|
||||
super(SingleFactApiTest, self).setUp()
|
||||
|
||||
self.group = self.inventory.groups.create(name='test-group')
|
||||
self.group.hosts.add(self.host, self.host2, self.host3)
|
||||
|
||||
def test_permission_list(self):
|
||||
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
|
||||
with self.current_user('admin'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('normal'):
|
||||
self.get(url, expect=200)
|
||||
with self.current_user('other'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user('nobody'):
|
||||
self.get(url, expect=403)
|
||||
with self.current_user(None):
|
||||
self.get(url, expect=401)
|
||||
|
||||
def _test_related(self, url):
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertTrue(len(response['results']) > 0)
|
||||
for entry in response['results']:
|
||||
self.assertIn('single_fact', entry['related'])
|
||||
# Requires fields
|
||||
self.get(entry['related']['single_fact'], expect=400)
|
||||
|
||||
def test_related_host_list(self):
|
||||
self.setup_facts(2)
|
||||
self._test_related(reverse('api:host_list'))
|
||||
|
||||
def test_related_group_list(self):
|
||||
self.setup_facts(2)
|
||||
self._test_related(reverse('api:group_list'))
|
||||
|
||||
def test_related_inventory_list(self):
|
||||
self.setup_facts(2)
|
||||
self._test_related(reverse('api:inventory_list'))
|
||||
|
||||
def test_params(self):
|
||||
self.setup_facts(2)
|
||||
params = {
|
||||
'module': 'packages',
|
||||
'fact_key': 'name',
|
||||
'fact_value': 'acpid',
|
||||
}
|
||||
url = build_url('api:inventory_single_fact_view', args=(self.inventory.pk,), get=params)
|
||||
with self.current_user(self.super_django_user):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertEqual(len(response['results']), 3)
|
||||
for entry in response['results']:
|
||||
self.assertEqual(entry['fact'][0]['name'], 'acpid')
|
||||
@ -662,10 +662,7 @@ ACTIVITY_STREAM_ENABLED = True
|
||||
ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC = False
|
||||
|
||||
# Internal API URL for use by inventory scripts and callback plugin.
|
||||
if 'devserver' in INSTALLED_APPS:
|
||||
INTERNAL_API_URL = 'http://127.0.0.1:%s' % DEVSERVER_DEFAULT_PORT
|
||||
else:
|
||||
INTERNAL_API_URL = 'http://127.0.0.1:8000'
|
||||
INTERNAL_API_URL = 'http://127.0.0.1:%s' % DEVSERVER_DEFAULT_PORT
|
||||
|
||||
# ZeroMQ callback settings.
|
||||
CALLBACK_CONSUMER_PORT = "tcp://127.0.0.1:5556"
|
||||
|
||||
@ -13,7 +13,6 @@ from split_settings.tools import optional, include
|
||||
# Load default settings.
|
||||
from defaults import * # NOQA
|
||||
|
||||
|
||||
MONGO_HOST = '127.0.0.1'
|
||||
MONGO_PORT = 27017
|
||||
MONGO_USERNAME = None
|
||||
@ -66,10 +65,13 @@ PASSWORD_HASHERS = (
|
||||
# Configure a default UUID for development only.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
STATSD_CLIENT = 'django_statsd.clients.normal'
|
||||
STATSD_HOST = 'graphite'
|
||||
STATSD_CLIENT = 'django_statsd.clients.null'
|
||||
STATSD_HOST = None
|
||||
STATSD_PREFIX = None
|
||||
#STATSD_CLIENT = 'django_statsd.clients.normal'
|
||||
#STATSD_HOST = 'graphite'
|
||||
STATSD_PORT = 8125
|
||||
STATSD_PREFIX = 'tower'
|
||||
#STATSD_PREFIX = 'tower'
|
||||
STATSD_MAXUDPSIZE = 512
|
||||
|
||||
# If there is an `/etc/tower/settings.py`, include it.
|
||||
|
||||
@ -4,3 +4,5 @@ python_paths = awx/lib/site-packages
|
||||
site_dirs = awx/lib/site-packages
|
||||
python_files = *.py
|
||||
addopts = --reuse-db
|
||||
markers =
|
||||
ac: access control test
|
||||
|
||||
@ -49,7 +49,7 @@ importlib==1.0.3
|
||||
ipaddress==1.0.14
|
||||
iso8601==0.1.10
|
||||
isodate==0.5.1
|
||||
git+https://github.com/chrismeyersfsu/django-jsonbfield@master#egg=django-jsonbfield
|
||||
git+https://github.com/chrismeyersfsu/django-jsonbfield@fix-sqlite_serialization#egg=django-jsonbfield
|
||||
jsonpatch==1.11
|
||||
jsonpointer==1.9
|
||||
jsonschema==2.5.1
|
||||
|
||||
@ -7,3 +7,4 @@ pytest
|
||||
pytest-cov
|
||||
pytest-django
|
||||
pytest-pythonpath
|
||||
pytest-mock
|
||||
|
||||
@ -13,3 +13,4 @@ pytest
|
||||
pytest-cov
|
||||
pytest-django
|
||||
pytest-pythonpath
|
||||
pytest-mock
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
#!/bin/bash
|
||||
set +x
|
||||
|
||||
# Wait for the databases to come up
|
||||
ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user