Merge branch 'devel' of github.com:ansible/ansible-tower into merge-devel

This commit is contained in:
Akita Noek 2016-03-01 15:09:58 -05:00
commit ea2cabe4d4
76 changed files with 6779 additions and 1263 deletions

View File

@ -291,7 +291,7 @@ migrate:
# Run after making changes to the models to create a new migration.
dbchange:
$(PYTHON) manage.py schemamigration main v14_changes --auto
$(PYTHON) manage.py makemigrations
# access database shell, asks for password
dbshell:

View File

@ -32,7 +32,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
'RetrieveUpdateDestroyAPIView', 'DestroyAPIView',
'MongoAPIView', 'MongoListAPIView']
'SubDetailAPIView',
'ParentMixin',]
logger = logging.getLogger('awx.api.generics')
@ -219,28 +220,6 @@ class GenericAPIView(generics.GenericAPIView, APIView):
d['settings'] = settings
return d
class MongoAPIView(GenericAPIView):
def get_parent_object(self):
parent_filter = {
self.lookup_field: self.kwargs.get(self.lookup_field, None),
}
return get_object_or_404(self.parent_model, **parent_filter)
def check_parent_access(self, parent=None):
parent = parent or self.get_parent_object()
parent_access = getattr(self, 'parent_access', 'read')
if parent_access in ('read', 'delete'):
args = (self.parent_model, parent_access, parent)
else:
args = (self.parent_model, parent_access, parent, None)
if not self.request.user.can_access(*args):
raise PermissionDenied()
class MongoListAPIView(generics.ListAPIView, MongoAPIView):
pass
class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
def get_queryset(self):
@ -277,7 +256,25 @@ class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
# Base class for a list view that allows creating new objects.
pass
class SubListAPIView(ListAPIView):
class ParentMixin(object):
def get_parent_object(self):
parent_filter = {
self.lookup_field: self.kwargs.get(self.lookup_field, None),
}
return get_object_or_404(self.parent_model, **parent_filter)
def check_parent_access(self, parent=None):
parent = parent or self.get_parent_object()
parent_access = getattr(self, 'parent_access', 'read')
if parent_access in ('read', 'delete'):
args = (self.parent_model, parent_access, parent)
else:
args = (self.parent_model, parent_access, parent, None)
if not self.request.user.can_access(*args):
raise PermissionDenied()
class SubListAPIView(ListAPIView, ParentMixin):
# Base class for a read-only sublist view.
# Subclasses should define at least:
@ -297,22 +294,6 @@ class SubListAPIView(ListAPIView):
})
return d
def get_parent_object(self):
parent_filter = {
self.lookup_field: self.kwargs.get(self.lookup_field, None),
}
return get_object_or_404(self.parent_model, **parent_filter)
def check_parent_access(self, parent=None):
parent = parent or self.get_parent_object()
parent_access = getattr(self, 'parent_access', 'read')
if parent_access in ('read', 'delete'):
args = (self.parent_model, parent_access, parent)
else:
args = (self.parent_model, parent_access, parent, None)
if not self.request.user.can_access(*args):
raise PermissionDenied()
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
@ -449,6 +430,9 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
else:
return self.attach(request, *args, **kwargs)
class SubDetailAPIView(generics.RetrieveAPIView, GenericAPIView, ParentMixin):
pass
class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
pass

View File

@ -13,7 +13,7 @@ from rest_framework import serializers
from rest_framework.request import clone_request
# Ansible Tower
from awx.main.models import InventorySource
from awx.main.models import InventorySource, Notifier
class Metadata(metadata.SimpleMetadata):
@ -76,6 +76,12 @@ class Metadata(metadata.SimpleMetadata):
get_group_by_choices = getattr(InventorySource, 'get_%s_group_by_choices' % cp)
field_info['%s_group_by_choices' % cp] = get_group_by_choices()
# Special handling of notification configuration where the required properties
# are conditional on the type selected.
if field.field_name == 'notification_configuration':
for (notification_type_name, notification_tr_name, notification_type_class) in Notifier.NOTIFICATION_TYPES:
field_info[notification_type_name] = notification_type_class.init_parameters
# Update type of fields returned...
if field.field_name == 'type':
field_info['type'] = 'multiple choice'

View File

@ -9,8 +9,6 @@ import logging
from collections import OrderedDict
from dateutil import rrule
from rest_framework_mongoengine.serializers import DocumentSerializer
# PyYAML
import yaml
@ -46,12 +44,10 @@ from awx.main.conf import tower_settings
from awx.api.license import feature_enabled
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, EncryptedPasswordField, VerbatimField
from awx.fact.models import * # noqa
logger = logging.getLogger('awx.api.serializers')
# Fields that should be summarized regardless of object type.
DEFAULT_SUMMARY_FIELDS = ('name', 'description')# , 'created_by', 'modified_by')#, 'type')
DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type')
# Keys are fields (foreign keys) where, if found on an instance, summary info
# should be added to the serialized data. Values are a tuple of field names on
@ -555,19 +551,19 @@ class BaseSerializer(serializers.ModelSerializer):
class EmptySerializer(serializers.Serializer):
pass
class BaseFactSerializer(DocumentSerializer):
class BaseFactSerializer(BaseSerializer):
__metaclass__ = BaseSerializerMetaclass
def get_fields(self):
ret = super(BaseFactSerializer, self).get_fields()
if 'module' in ret and feature_enabled('system_tracking'):
choices = [(o, o.title()) for o in FactVersion.objects.all().only('module').distinct('module')]
ret['module'] = serializers.ChoiceField(source='module', choices=choices, read_only=True, required=False)
# TODO: the values_list may pull in a LOT of entries before the distinct is called
modules = Fact.objects.all().values_list('module', flat=True).distinct()
choices = [(o, o.title()) for o in modules]
ret['module'] = serializers.ChoiceField(choices=choices, read_only=True, required=False)
return ret
class UnifiedJobTemplateSerializer(BaseSerializer):
class Meta:
@ -868,7 +864,11 @@ class OrganizationSerializer(BaseSerializer):
users = reverse('api:organization_users_list', args=(obj.pk,)),
admins = reverse('api:organization_admins_list', args=(obj.pk,)),
teams = reverse('api:organization_teams_list', args=(obj.pk,)),
activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,))
activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)),
notifiers = reverse('api:organization_notifiers_list', args=(obj.pk,)),
notifiers_any = reverse('api:organization_notifiers_any_list', args=(obj.pk,)),
notifiers_success = reverse('api:organization_notifiers_success_list', args=(obj.pk,)),
notifiers_error = reverse('api:organization_notifiers_error_list', args=(obj.pk,)),
))
return res
@ -938,6 +938,9 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
project_updates = reverse('api:project_updates_list', args=(obj.pk,)),
schedules = reverse('api:project_schedules_list', args=(obj.pk,)),
activity_stream = reverse('api:project_activity_stream_list', args=(obj.pk,)),
notifiers_any = reverse('api:project_notifiers_any_list', args=(obj.pk,)),
notifiers_success = reverse('api:project_notifiers_success_list', args=(obj.pk,)),
notifiers_error = reverse('api:project_notifiers_error_list', args=(obj.pk,)),
))
# Backwards compatibility.
if obj.current_update:
@ -983,6 +986,7 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
res.update(dict(
project = reverse('api:project_detail', args=(obj.project.pk,)),
cancel = reverse('api:project_update_cancel', args=(obj.pk,)),
notifications = reverse('api:project_update_notifications_list', args=(obj.pk,)),
))
return res
@ -1390,6 +1394,9 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
activity_stream = reverse('api:inventory_activity_stream_list', args=(obj.pk,)),
hosts = reverse('api:inventory_source_hosts_list', args=(obj.pk,)),
groups = reverse('api:inventory_source_groups_list', args=(obj.pk,)),
notifiers_any = reverse('api:inventory_source_notifiers_any_list', args=(obj.pk,)),
notifiers_success = reverse('api:inventory_source_notifiers_success_list', args=(obj.pk,)),
notifiers_error = reverse('api:inventory_source_notifiers_error_list', args=(obj.pk,)),
))
if obj.inventory and obj.inventory.active:
res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,))
@ -1434,6 +1441,7 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
res.update(dict(
inventory_source = reverse('api:inventory_source_detail', args=(obj.inventory_source.pk,)),
cancel = reverse('api:inventory_update_cancel', args=(obj.pk,)),
notifications = reverse('api:inventory_update_notifications_list', args=(obj.pk,)),
))
return res
@ -1672,6 +1680,9 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
schedules = reverse('api:job_template_schedules_list', args=(obj.pk,)),
activity_stream = reverse('api:job_template_activity_stream_list', args=(obj.pk,)),
launch = reverse('api:job_template_launch', args=(obj.pk,)),
notifiers_any = reverse('api:job_template_notifiers_any_list', args=(obj.pk,)),
notifiers_success = reverse('api:job_template_notifiers_success_list', args=(obj.pk,)),
notifiers_error = reverse('api:job_template_notifiers_error_list', args=(obj.pk,)),
))
if obj.host_config_key:
res['callback'] = reverse('api:job_template_callback', args=(obj.pk,))
@ -1726,6 +1737,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
job_tasks = reverse('api:job_job_tasks_list', args=(obj.pk,)),
job_host_summaries = reverse('api:job_job_host_summaries_list', args=(obj.pk,)),
activity_stream = reverse('api:job_activity_stream_list', args=(obj.pk,)),
notifications = reverse('api:job_notifications_list', args=(obj.pk,)),
))
if obj.job_template and obj.job_template.active:
res['job_template'] = reverse('api:job_template_detail',
@ -2141,6 +2153,79 @@ class JobLaunchSerializer(BaseSerializer):
attrs = super(JobLaunchSerializer, self).validate(attrs)
return attrs
class NotifierSerializer(BaseSerializer):
class Meta:
model = Notifier
fields = ('*', 'organization', 'notification_type', 'notification_configuration')
type_map = {"string": (str, unicode),
"int": (int,),
"bool": (bool,),
"list": (list,),
"password": (str, unicode),
"object": (dict,)}
def to_representation(self, obj):
ret = super(NotifierSerializer, self).to_representation(obj)
for field in obj.notification_class.init_parameters:
if field in ret['notification_configuration'] and \
force_text(ret['notification_configuration'][field]).startswith('$encrypted$'):
ret['notification_configuration'][field] = '$encrypted$'
return ret
def get_related(self, obj):
res = super(NotifierSerializer, self).get_related(obj)
res.update(dict(
test = reverse('api:notifier_test', args=(obj.pk,)),
notifications = reverse('api:notifier_notification_list', args=(obj.pk,)),
))
if obj.organization and obj.organization.active:
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
return res
def validate(self, attrs):
notification_class = Notifier.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']]
missing_fields = []
incorrect_type_fields = []
if 'notification_configuration' not in attrs:
return attrs
for field in notification_class.init_parameters:
if field not in attrs['notification_configuration']:
missing_fields.append(field)
continue
field_val = attrs['notification_configuration'][field]
field_type = notification_class.init_parameters[field]['type']
expected_types = self.type_map[field_type]
if not type(field_val) in expected_types:
incorrect_type_fields.append((field, field_type))
continue
if field_type == "password" and field_val.startswith('$encrypted$'):
missing_fields.append(field)
error_list = []
if missing_fields:
error_list.append("Missing required fields for Notification Configuration: {}".format(missing_fields))
if incorrect_type_fields:
for type_field_error in incorrect_type_fields:
error_list.append("Configuration field '{}' incorrect type, expected {}".format(type_field_error[0],
type_field_error[1]))
if error_list:
raise serializers.ValidationError(error_list)
return attrs
class NotificationSerializer(BaseSerializer):
class Meta:
model = Notification
fields = ('*', '-name', '-description', 'notifier', 'error', 'status', 'notifications_sent',
'notification_type', 'recipients', 'subject')
def get_related(self, obj):
res = super(NotificationSerializer, self).get_related(obj)
res.update(dict(
notifier = reverse('api:notifier_detail', args=(obj.notifier.pk,)),
))
return res
class ScheduleSerializer(BaseSerializer):
@ -2391,28 +2476,31 @@ class AuthTokenSerializer(serializers.Serializer):
class FactVersionSerializer(BaseFactSerializer):
related = serializers.SerializerMethodField('get_related')
class Meta:
model = FactVersion
fields = ('related', 'module', 'timestamp',)
model = Fact
fields = ('related', 'module', 'timestamp')
read_only_fields = ('*',)
def get_related(self, obj):
host_obj = self.context.get('host_obj')
res = {}
res = super(FactVersionSerializer, self).get_related(obj)
params = {
'datetime': timestamp_apiformat(obj.timestamp),
'module': obj.module,
}
res.update(dict(
fact_view = build_url('api:host_fact_compare_view', args=(host_obj.pk,), get=params),
))
res['fact_view'] = build_url('api:host_fact_compare_view', args=(obj.host.pk,), get=params)
return res
class FactSerializer(BaseFactSerializer):
class Meta:
model = Fact
depth = 2
fields = ('timestamp', 'host', 'module', 'fact')
# TODO: Consider adding in host to the fields list ?
fields = ('related', 'timestamp', 'module', 'facts', 'id', 'summary_fields', 'host')
read_only_fields = ('*',)
def get_related(self, obj):
res = super(FactSerializer, self).get_related(obj)
res['host'] = obj.host.get_absolute_url()
return res

View File

@ -20,6 +20,10 @@ organization_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/projects/$', 'organization_projects_list'),
url(r'^(?P<pk>[0-9]+)/teams/$', 'organization_teams_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'organization_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/notifiers/$', 'organization_notifiers_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_any/$', 'organization_notifiers_any_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_error/$', 'organization_notifiers_error_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_success/$', 'organization_notifiers_success_list'),
)
user_urls = patterns('awx.api.views',
@ -44,12 +48,16 @@ project_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/project_updates/$', 'project_updates_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'project_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', 'project_schedules_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_any/$', 'project_notifiers_any_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_error/$', 'project_notifiers_error_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_success/$', 'project_notifiers_success_list'),
)
project_update_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/$', 'project_update_detail'),
url(r'^(?P<pk>[0-9]+)/cancel/$', 'project_update_cancel'),
url(r'^(?P<pk>[0-9]+)/stdout/$', 'project_update_stdout'),
url(r'^(?P<pk>[0-9]+)/notifications/$', 'project_update_notifications_list'),
)
team_urls = patterns('awx.api.views',
@ -92,8 +100,8 @@ host_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'),
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'),
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'host_single_fact_view'),
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
)
group_urls = patterns('awx.api.views',
@ -121,12 +129,16 @@ inventory_source_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/schedules/$', 'inventory_source_schedules_list'),
url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_source_groups_list'),
url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_source_hosts_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_any/$', 'inventory_source_notifiers_any_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_error/$', 'inventory_source_notifiers_error_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_success/$', 'inventory_source_notifiers_success_list'),
)
inventory_update_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/$', 'inventory_update_detail'),
url(r'^(?P<pk>[0-9]+)/cancel/$', 'inventory_update_cancel'),
url(r'^(?P<pk>[0-9]+)/stdout/$', 'inventory_update_stdout'),
url(r'^(?P<pk>[0-9]+)/notifications/$', 'inventory_update_notifications_list'),
)
inventory_script_urls = patterns('awx.api.views',
@ -168,6 +180,9 @@ job_template_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/schedules/$', 'job_template_schedules_list'),
url(r'^(?P<pk>[0-9]+)/survey_spec/$', 'job_template_survey_spec'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_any/$', 'job_template_notifiers_any_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_error/$', 'job_template_notifiers_error_list'),
url(r'^(?P<pk>[0-9]+)/notifiers_success/$', 'job_template_notifiers_success_list'),
)
job_urls = patterns('awx.api.views',
@ -182,6 +197,7 @@ job_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/job_tasks/$', 'job_job_tasks_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/stdout/$', 'job_stdout'),
url(r'^(?P<pk>[0-9]+)/notifications/$', 'job_notifications_list'),
)
job_host_summary_urls = patterns('awx.api.views',
@ -224,6 +240,18 @@ system_job_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/cancel/$', 'system_job_cancel'),
)
notifier_urls = patterns('awx.api.views',
url(r'^$', 'notifier_list'),
url(r'^(?P<pk>[0-9]+)/$', 'notifier_detail'),
url(r'^(?P<pk>[0-9]+)/test/$', 'notifier_test'),
url(r'^(?P<pk>[0-9]+)/notifications/$', 'notifier_notification_list'),
)
notification_urls = patterns('awx.api.views',
url(r'^$', 'notification_list'),
url(r'^(?P<pk>[0-9]+)/$', 'notification_detail'),
)
schedule_urls = patterns('awx.api.views',
url(r'^$', 'schedule_list'),
url(r'^(?P<pk>[0-9]+)/$', 'schedule_detail'),
@ -273,6 +301,8 @@ v1_urls = patterns('awx.api.views',
url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)),
url(r'^system_job_templates/', include(system_job_template_urls)),
url(r'^system_jobs/', include(system_job_urls)),
url(r'^notifiers/', include(notifier_urls)),
url(r'^notifications/', include(notification_urls)),
url(r'^unified_job_templates/$', 'unified_job_template_list'),
url(r'^unified_jobs/$', 'unified_job_list'),
url(r'^activity_stream/', include(activity_stream_urls)),

View File

@ -42,9 +42,6 @@ from rest_framework import status
from rest_framework_yaml.parsers import YAMLParser
from rest_framework_yaml.renderers import YAMLRenderer
# MongoEngine
import mongoengine
# QSStats
import qsstats
@ -56,12 +53,11 @@ from social.backends.utils import load_backends
# AWX
from awx.main.task_engine import TaskSerializer, TASK_FILE, TEMPORARY_TASK_FILE
from awx.main.tasks import mongodb_control
from awx.main.tasks import mongodb_control, send_notifications
from awx.main.access import get_user_queryset
from awx.main.ha import is_ha_environment
from awx.api.authentication import TaskAuthentication, TokenGetAuthentication
from awx.api.utils.decorators import paginated
from awx.api.filters import MongoFilterBackend
from awx.api.generics import get_view_name
from awx.api.generics import * # noqa
from awx.api.license import feature_enabled, feature_exists, LicenseForbids
@ -70,7 +66,6 @@ from awx.main.utils import * # noqa
from awx.api.permissions import * # noqa
from awx.api.renderers import * # noqa
from awx.api.serializers import * # noqa
from awx.fact.models import * # noqa
from awx.main.utils import emit_websocket_notification
from awx.main.conf import tower_settings
@ -137,6 +132,8 @@ class ApiV1RootView(APIView):
data['schedules'] = reverse('api:schedule_list')
data['roles'] = reverse('api:role_list')
data['resources'] = reverse('api:resource_list')
data['notifiers'] = reverse('api:notifier_list')
data['notifications'] = reverse('api:notification_list')
data['unified_job_templates'] = reverse('api:unified_job_template_list')
data['unified_jobs'] = reverse('api:unified_job_list')
data['activity_stream'] = reverse('api:activity_stream_list')
@ -252,32 +249,12 @@ class ApiV1ConfigView(APIView):
# FIX: Log
return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST)
# Sanity check: If this license includes system tracking, make
# sure that we have a valid MongoDB to point to, and complain if
# we do not.
if ('features' in license_data and 'system_tracking' in license_data['features'] and
license_data['features']['system_tracking'] and settings.MONGO_HOST == NotImplemented):
return Response({
'error': 'This license supports system tracking, which '
'requires MongoDB to be installed. Since you are '
'running in an HA environment, you will need to '
'provide a MongoDB instance. Please re-run the '
'installer prior to installing this license.'
}, status=status.HTTP_400_BAD_REQUEST)
# If the license is valid, write it to disk.
if license_data['valid_key']:
tower_settings.LICENSE = data_actual
# Spawn a task to ensure that MongoDB is started (or stopped)
# as appropriate, based on whether the license uses it.
if license_data['features']['system_tracking']:
mongodb_control.delay('start')
else:
mongodb_control.delay('stop')
# Done; return the response.
tower_settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
return Response(license_data)
return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
@ -698,6 +675,35 @@ class OrganizationActivityStreamList(SubListAPIView):
# Okay, let it through.
return super(type(self), self).get(request, *args, **kwargs)
class OrganizationNotifiersList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = Organization
relationship = 'notifiers'
parent_key = 'organization'
class OrganizationNotifiersAnyList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = Organization
relationship = 'notifiers_any'
class OrganizationNotifiersErrorList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = Organization
relationship = 'notifiers_error'
class OrganizationNotifiersSuccessList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = Organization
relationship = 'notifiers_success'
class TeamList(ListCreateAPIView):
model = Team
@ -868,6 +874,26 @@ class ProjectActivityStreamList(SubListAPIView):
return qs.filter(project=parent)
return qs.filter(Q(project=parent) | Q(credential__in=parent.credential))
class ProjectNotifiersAnyList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = Project
relationship = 'notifiers_any'
class ProjectNotifiersErrorList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = Project
relationship = 'notifiers_error'
class ProjectNotifiersSuccessList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = Project
relationship = 'notifiers_success'
class ProjectUpdatesList(SubListAPIView):
@ -918,6 +944,12 @@ class ProjectUpdateCancel(RetrieveAPIView):
else:
return self.http_method_not_allowed(request, *args, **kwargs)
class ProjectUpdateNotificationsList(SubListAPIView):
model = Notification
serializer_class = NotificationSerializer
parent_model = Project
relationship = 'notifications'
class UserList(ListCreateAPIView):
@ -1172,33 +1204,6 @@ class InventoryScanJobTemplateList(SubListAPIView):
qs = self.request.user.get_queryset(self.model)
return qs.filter(job_type=PERM_INVENTORY_SCAN, inventory=parent)
class InventorySingleFactView(MongoAPIView):
model = Fact
parent_model = Inventory
new_in_220 = True
serializer_class = FactSerializer
filter_backends = (MongoFilterBackend,)
def get(self, request, *args, **kwargs):
# Sanity check: Does the license allow system tracking?
if not feature_enabled('system_tracking'):
raise LicenseForbids('Your license does not permit use '
'of system tracking.')
fact_key = request.query_params.get("fact_key", None)
fact_value = request.query_params.get("fact_value", None)
datetime_spec = request.query_params.get("timestamp", None)
module_spec = request.query_params.get("module", None)
if fact_key is None or fact_value is None or module_spec is None:
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
inventory_obj = self.get_parent_object()
fact_data = Fact.get_single_facts([h.name for h in inventory_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec)
return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else []))
class HostList(ListCreateAPIView):
model = Host
@ -1285,88 +1290,43 @@ class HostActivityStreamList(SubListAPIView):
qs = self.request.user.get_queryset(self.model)
return qs.filter(Q(host=parent) | Q(inventory=parent.inventory))
class HostFactVersionsList(MongoListAPIView):
class HostFactVersionsList(ListAPIView, ParentMixin):
model = Fact
serializer_class = FactVersionSerializer
parent_model = Host
new_in_220 = True
filter_backends = (MongoFilterBackend,)
def get_queryset(self):
from_spec = self.request.query_params.get('from', None)
to_spec = self.request.query_params.get('to', None)
module_spec = self.request.query_params.get('module', None)
if not feature_enabled("system_tracking"):
raise LicenseForbids("Your license does not permit use "
"of system tracking.")
host = self.get_parent_object()
self.check_parent_access(host)
from_spec = self.request.query_params.get('from', None)
to_spec = self.request.query_params.get('to', None)
module_spec = self.request.query_params.get('module', None)
try:
fact_host = FactHost.objects.get(hostname=host.name, inventory_id=host.inventory.pk)
except FactHost.DoesNotExist:
return None
except mongoengine.ConnectionError:
return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST)
if from_spec:
from_spec = dateutil.parser.parse(from_spec)
if to_spec:
to_spec = dateutil.parser.parse(to_spec)
kv = {
'host': fact_host.id,
}
if module_spec is not None:
kv['module'] = module_spec
if from_spec is not None:
from_actual = dateutil.parser.parse(from_spec)
kv['timestamp__gt'] = from_actual
if to_spec is not None:
to_actual = dateutil.parser.parse(to_spec)
kv['timestamp__lte'] = to_actual
host_obj = self.get_parent_object()
return FactVersion.objects.filter(**kv).order_by("-timestamp")
return Fact.get_timeline(host_obj.id, module=module_spec, ts_from=from_spec, ts_to=to_spec)
def list(self, *args, **kwargs):
queryset = self.get_queryset() or []
try:
serializer = FactVersionSerializer(queryset, many=True, context=dict(host_obj=self.get_parent_object()))
except mongoengine.ConnectionError:
return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(results=serializer.data))
return Response(dict(results=self.serializer_class(queryset, many=True).data))
class HostSingleFactView(MongoAPIView):
class HostFactCompareView(SubDetailAPIView):
model = Fact
parent_model = Host
new_in_220 = True
serializer_class = FactSerializer
filter_backends = (MongoFilterBackend,)
def get(self, request, *args, **kwargs):
# Sanity check: Does the license allow system tracking?
if not feature_enabled('system_tracking'):
raise LicenseForbids('Your license does not permit use '
'of system tracking.')
fact_key = request.query_params.get("fact_key", None)
fact_value = request.query_params.get("fact_value", None)
datetime_spec = request.query_params.get("timestamp", None)
module_spec = request.query_params.get("module", None)
if fact_key is None or fact_value is None or module_spec is None:
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
host_obj = self.get_parent_object()
fact_data = Fact.get_single_facts([host_obj.name], fact_key, fact_value, datetime_actual, module_spec)
return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else []))
class HostFactCompareView(MongoAPIView):
new_in_220 = True
parent_model = Host
serializer_class = FactSerializer
filter_backends = (MongoFilterBackend,)
def get(self, request, *args, **kwargs):
def retrieve(self, request, *args, **kwargs):
# Sanity check: Does the license allow system tracking?
if not feature_enabled('system_tracking'):
raise LicenseForbids('Your license does not permit use '
@ -1377,10 +1337,11 @@ class HostFactCompareView(MongoAPIView):
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
host_obj = self.get_parent_object()
fact_entry = Fact.get_host_version(host_obj.name, host_obj.inventory.pk, datetime_actual, module_spec)
host_data = FactSerializer(fact_entry).data if fact_entry is not None else {}
return Response(host_data)
fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual)
if not fact_entry:
return Response({'detail': 'Fact not found'}, status=status.HTTP_404_NOT_FOUND)
return Response(self.serializer_class(instance=fact_entry).data)
class GroupList(ListCreateAPIView):
@ -1549,33 +1510,6 @@ class GroupDetail(RetrieveUpdateDestroyAPIView):
obj.mark_inactive_recursive()
return Response(status=status.HTTP_204_NO_CONTENT)
class GroupSingleFactView(MongoAPIView):
model = Fact
parent_model = Group
new_in_220 = True
serializer_class = FactSerializer
filter_backends = (MongoFilterBackend,)
def get(self, request, *args, **kwargs):
# Sanity check: Does the license allow system tracking?
if not feature_enabled('system_tracking'):
raise LicenseForbids('Your license does not permit use '
'of system tracking.')
fact_key = request.query_params.get("fact_key", None)
fact_value = request.query_params.get("fact_value", None)
datetime_spec = request.query_params.get("timestamp", None)
module_spec = request.query_params.get("module", None)
if fact_key is None or fact_value is None or module_spec is None:
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
group_obj = self.get_parent_object()
fact_data = Fact.get_single_facts([h.name for h in group_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec)
return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else []))
class InventoryGroupsList(SubListCreateAttachDetachAPIView):
model = Group
@ -1803,6 +1737,27 @@ class InventorySourceActivityStreamList(SubListAPIView):
# Okay, let it through.
return super(type(self), self).get(request, *args, **kwargs)
class InventorySourceNotifiersAnyList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = InventorySource
relationship = 'notifiers_any'
class InventorySourceNotifiersErrorList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = InventorySource
relationship = 'notifiers_error'
class InventorySourceNotifiersSuccessList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = InventorySource
relationship = 'notifiers_success'
class InventorySourceHostsList(SubListAPIView):
model = Host
@ -1867,6 +1822,13 @@ class InventoryUpdateCancel(RetrieveAPIView):
else:
return self.http_method_not_allowed(request, *args, **kwargs)
class InventoryUpdateNotificationsList(SubListAPIView):
model = Notification
serializer_class = NotificationSerializer
parent_model = InventoryUpdate
relationship = 'notifications'
class JobTemplateList(ListCreateAPIView):
model = JobTemplate
@ -2036,6 +1998,27 @@ class JobTemplateActivityStreamList(SubListAPIView):
# Okay, let it through.
return super(type(self), self).get(request, *args, **kwargs)
class JobTemplateNotifiersAnyList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = JobTemplate
relationship = 'notifiers_any'
class JobTemplateNotifiersErrorList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = JobTemplate
relationship = 'notifiers_error'
class JobTemplateNotifiersSuccessList(SubListCreateAttachDetachAPIView):
model = Notifier
serializer_class = NotifierSerializer
parent_model = JobTemplate
relationship = 'notifiers_success'
class JobTemplateCallback(GenericAPIView):
model = JobTemplate
@ -2369,6 +2352,13 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView):
headers = {'Location': new_job.get_absolute_url()}
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
class JobNotificationsList(SubListAPIView):
model = Notification
serializer_class = NotificationSerializer
parent_model = Job
relationship = 'notifications'
class BaseJobHostSummariesList(SubListAPIView):
model = JobHostSummary
@ -3022,6 +3012,58 @@ class AdHocCommandStdout(UnifiedJobStdout):
model = AdHocCommand
new_in_220 = True
class NotifierList(ListCreateAPIView):
model = Notifier
serializer_class = NotifierSerializer
new_in_300 = True
class NotifierDetail(RetrieveUpdateDestroyAPIView):
model = Notifier
serializer_class = NotifierSerializer
new_in_300 = True
class NotifierTest(GenericAPIView):
view_name = 'Notifier Test'
model = Notifier
serializer_class = EmptySerializer
new_in_300 = True
def post(self, request, *args, **kwargs):
obj = self.get_object()
notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, tower_settings.TOWER_URL_BASE),
{"body": "Ansible Tower Test Notification {} {}".format(obj.id, tower_settings.TOWER_URL_BASE)})
if not notification:
return Response({}, status=status.HTTP_400_BAD_REQUEST)
else:
send_notifications.delay([notification.id])
headers = {'Location': notification.get_absolute_url()}
return Response({"notification": notification.id},
headers=headers,
status=status.HTTP_202_ACCEPTED)
class NotifierNotificationList(SubListAPIView):
model = Notification
serializer_class = NotificationSerializer
parent_model = Notifier
relationship = 'notifications'
parent_key = 'notifier'
class NotificationList(ListAPIView):
model = Notification
serializer_class = NotificationSerializer
new_in_300 = True
class NotificationDetail(RetrieveAPIView):
model = Notification
serializer_class = NotificationSerializer
new_in_300 = True
class ActivityStreamList(SimpleListAPIView):
model = ActivityStream

View File

@ -1486,6 +1486,31 @@ class ScheduleAccess(BaseAccess):
else:
return False
class NotifierAccess(BaseAccess):
'''
I can see/use a notifier if I have permission to
'''
model = Notifier
def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct()
if self.user.is_superuser:
return qs
return qs
class NotificationAccess(BaseAccess):
'''
I can see/use a notification if I have permission to
'''
model = Notification
def get_queryset(self):
qs = self.model.objects.distinct()
if self.user.is_superuser:
return qs
return qs
class ActivityStreamAccess(BaseAccess):
'''
I can see activity stream events only when I have permission on all objects included in the event
@ -1745,3 +1770,5 @@ register_access(CustomInventoryScript, CustomInventoryScriptAccess)
register_access(TowerSettings, TowerSettingsAccess)
register_access(Role, RoleAccess)
register_access(Resource, ResourceAccess)
register_access(Notifier, NotifierAccess)
register_access(Notification, NotificationAccess)

View File

@ -9,9 +9,11 @@ from datetime import datetime
# Django
from django.core.management.base import NoArgsCommand
from django.conf import settings
#from django.core.exceptions import Does
# AWX
from awx.fact.models.fact import * # noqa
from awx.main.models.fact import Fact
from awx.main.models.inventory import Host
from awx.main.socket import Socket
logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver')
@ -47,35 +49,34 @@ class FactCacheReceiver(object):
# ansible v2 will not emit this message. Thus, this can be removed at that time.
if 'module_setup' in facts_data and len(facts_data) == 1:
logger.info('Received module_setup message')
return
return None
try:
host = FactHost.objects.get(hostname=hostname, inventory_id=inventory_id)
except FactHost.DoesNotExist:
logger.info('Creating new host <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
host = FactHost(hostname=hostname, inventory_id=inventory_id)
host.save()
logger.info('Created new host <%s>' % (host.id))
except FactHost.MultipleObjectsReturned:
query = "db['fact_host'].find(hostname=%s, inventory_id=%s)" % (hostname, inventory_id)
logger.warn('Database inconsistent. Multiple FactHost "%s" exist. Try the query %s to find the records.' % (hostname, query))
host_obj = Host.objects.get(name=hostname, inventory__id=inventory_id)
except Fact.DoesNotExist:
logger.warn('Failed to intake fact. Host does not exist <hostname, inventory_id> <%s, %s>' % (hostname, inventory_id))
return
except Fact.MultipleObjectsReturned:
logger.warn('Database inconsistent. Multiple Hosts found for <hostname, inventory_id> <%s, %s>.' % (hostname, inventory_id))
return None
except Exception, e:
logger.error("Exception communicating with Fact Cache Database: %s" % str(e))
return
return None
(module, facts) = self.process_facts(facts_data)
(module_name, facts) = self.process_facts(facts_data)
self.timestamp = datetime.fromtimestamp(date_key, None)
try:
# Update existing Fact entry
version_obj = FactVersion.objects.get(timestamp=self.timestamp, host=host, module=module)
Fact.objects(id=version_obj.fact.id).update_one(fact=facts)
logger.info('Updated existing fact <%s>' % (version_obj.fact.id))
except FactVersion.DoesNotExist:
# Update existing Fact entry
fact_obj = Fact.get_host_fact(host_obj.id, module_name, self.timestamp)
if fact_obj:
fact_obj.facts = facts
fact_obj.save()
logger.info('Updated existing fact <%s>' % (fact_obj.id))
else:
# Create new Fact entry
(fact_obj, version_obj) = Fact.add_fact(self.timestamp, facts, host, module)
logger.info('Created new fact <fact, fact_version> <%s, %s>' % (fact_obj.id, version_obj.id))
fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts)
logger.info('Created new fact <fact_id, module> <%s, %s>' % (fact_obj.id, module_name))
return fact_obj
def run_receiver(self, use_processing_threads=True):
with Socket('fact_cache', 'r') as facts:

View File

@ -15,7 +15,7 @@ from django.core.management.base import NoArgsCommand
# AWX
from awx.main.models import * # noqa
from awx.main.queue import FifoQueue
from awx.main.tasks import handle_work_error
from awx.main.tasks import handle_work_error, handle_work_success
from awx.main.utils import get_system_task_capacity
# Celery
@ -265,14 +265,15 @@ def process_graph(graph, task_capacity):
[{'type': graph.get_node_type(n['node_object']),
'id': n['node_object'].id} for n in node_dependencies]
error_handler = handle_work_error.s(subtasks=dependent_nodes)
start_status = node_obj.start(error_callback=error_handler)
success_handler = handle_work_success.s(task_actual={'type': graph.get_node_type(node_obj),
'id': node_obj.id})
start_status = node_obj.start(error_callback=error_handler, success_callback=success_handler)
if not start_status:
node_obj.status = 'failed'
if node_obj.job_explanation:
node_obj.job_explanation += ' '
node_obj.job_explanation += 'Task failed pre-start check.'
node_obj.save()
# TODO: Run error handler
continue
remaining_volume -= impact
running_impact += impact

View File

@ -43,7 +43,7 @@ class Migration(migrations.Migration):
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable')])),
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])),
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
('failed', models.BooleanField(default=False, editable=False)),
('changed', models.BooleanField(default=False, editable=False)),

View File

@ -0,0 +1,105 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
import django.db.models.deletion
from django.conf import settings
import taggit.managers
class Migration(migrations.Migration):
dependencies = [
('taggit', '0002_auto_20150616_2121'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0002_v300_changes'),
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])),
('error', models.TextField(default=b'', editable=False, blank=True)),
('notifications_sent', models.IntegerField(default=0, editable=False)),
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])),
('recipients', models.TextField(default=b'', editable=False, blank=True)),
('subject', models.TextField(default=b'', editable=False, blank=True)),
('body', jsonfield.fields.JSONField(default=dict, blank=True)),
],
options={
'ordering': ('pk',),
},
),
migrations.CreateModel(
name='Notifier',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
('description', models.TextField(default=b'', blank=True)),
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(unique=True, max_length=512)),
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])),
('notification_configuration', jsonfield.fields.JSONField(default=dict)),
('created_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
('modified_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
('organization', models.ForeignKey(related_name='notifiers', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)),
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
],
),
migrations.AddField(
model_name='notification',
name='notifier',
field=models.ForeignKey(related_name='notifications', editable=False, to='main.Notifier'),
),
migrations.AddField(
model_name='activitystream',
name='notification',
field=models.ManyToManyField(to='main.Notification', blank=True),
),
migrations.AddField(
model_name='activitystream',
name='notifier',
field=models.ManyToManyField(to='main.Notifier', blank=True),
),
migrations.AddField(
model_name='organization',
name='notifiers_any',
field=models.ManyToManyField(related_name='organization_notifiers_for_any', to='main.Notifier', blank=True),
),
migrations.AddField(
model_name='organization',
name='notifiers_error',
field=models.ManyToManyField(related_name='organization_notifiers_for_errors', to='main.Notifier', blank=True),
),
migrations.AddField(
model_name='organization',
name='notifiers_success',
field=models.ManyToManyField(related_name='organization_notifiers_for_success', to='main.Notifier', blank=True),
),
migrations.AddField(
model_name='unifiedjob',
name='notifications',
field=models.ManyToManyField(related_name='unifiedjob_notifications', editable=False, to='main.Notification'),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='notifiers_any',
field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_any', to='main.Notifier', blank=True),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='notifiers_error',
field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_errors', to='main.Notifier', blank=True),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='notifiers_success',
field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_success', to='main.Notifier', blank=True),
),
]

View File

@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonbfield.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0003_v300_changes'),
]
operations = [
migrations.CreateModel(
name='Fact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(default=None, help_text='Date and time of the corresponding fact scan gathering time.', editable=False)),
('module', models.CharField(max_length=128)),
('facts', jsonbfield.fields.JSONField(default={}, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True)),
('host', models.ForeignKey(related_name='facts', to='main.Host', help_text='Host for the facts that the fact scan captured.')),
],
),
migrations.AlterIndexTogether(
name='fact',
index_together=set([('timestamp', 'module', 'host')]),
),
]

View File

@ -20,6 +20,8 @@ from awx.main.models.configuration import * # noqa
from awx.main.models.rbac import * # noqa
from awx.main.models.user import * # noqa
from awx.main.models.mixins import * # noqa
from awx.main.models.notifications import * # noqa
from awx.main.models.fact import * # noqa
# Monkeypatch Django serializer to ignore django-taggit fields (which break
# the dumpdata command; see https://github.com/alex/django-taggit/issues/155).
@ -63,3 +65,5 @@ activity_stream_registrar.connect(AdHocCommand)
activity_stream_registrar.connect(Schedule)
activity_stream_registrar.connect(CustomInventoryScript)
activity_stream_registrar.connect(TowerSettings)
activity_stream_registrar.connect(Notifier)
activity_stream_registrar.connect(Notification)

View File

@ -53,6 +53,8 @@ class ActivityStream(models.Model):
ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True)
schedule = models.ManyToManyField("Schedule", blank=True)
custom_inventory_script = models.ManyToManyField("CustomInventoryScript", blank=True)
notifier = models.ManyToManyField("Notifier", blank=True)
notification = models.ManyToManyField("Notification", blank=True)
def get_absolute_url(self):
return reverse('api:activity_stream_detail', args=(self.pk,))

View File

@ -5,6 +5,7 @@
import hmac
import json
import logging
from urlparse import urljoin
# Django
from django.conf import settings
@ -139,6 +140,9 @@ class AdHocCommand(UnifiedJob):
def get_absolute_url(self):
return reverse('api:ad_hoc_command_detail', args=(self.pk,))
def get_ui_url(self):
return urljoin(tower_settings.TOWER_URL_BASE, "/#/ad_hoc_commands/{}".format(self.pk))
@property
def task_auth_token(self):
'''Return temporary auth token used for task requests via API.'''
@ -221,8 +225,9 @@ class AdHocCommandEvent(CreatedModifiedModel):
('runner_on_unreachable', _('Host Unreachable'), True),
# Tower won't see no_hosts (check is done earlier without callback).
#('runner_on_no_hosts', _('No Hosts Matched'), False),
# Tower should probably never see skipped (no conditionals).
#('runner_on_skipped', _('Host Skipped'), False),
# Tower will see skipped (when running in check mode for a module that
# does not support check mode).
('runner_on_skipped', _('Host Skipped'), False),
# Tower does not support async for ad hoc commands.
#('runner_on_async_poll', _('Host Polling'), False),
#('runner_on_async_ok', _('Host Async OK'), False),

View File

@ -25,7 +25,7 @@ from awx.main.utils import encrypt_field
__all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel',
'PasswordFieldsModel', 'PrimordialModel', 'CommonModel',
'CommonModelNameNotUnique',
'CommonModelNameNotUnique', 'NotificationFieldsModel',
'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ',
'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN',
'PERM_INVENTORY_CHECK', 'PERM_JOBTEMPLATE_CREATE', 'JOB_TYPE_CHOICES',
@ -337,3 +337,26 @@ class CommonModelNameNotUnique(PrimordialModel):
max_length=512,
unique=False,
)
class NotificationFieldsModel(BaseModel):
class Meta:
abstract = True
notifiers_error = models.ManyToManyField(
"Notifier",
blank=True,
related_name='%(class)s_notifiers_for_errors'
)
notifiers_success = models.ManyToManyField(
"Notifier",
blank=True,
related_name='%(class)s_notifiers_for_success'
)
notifiers_any = models.ManyToManyField(
"Notifier",
blank=True,
related_name='%(class)s_notifiers_for_any'
)

64
awx/main/models/fact.py Normal file
View File

@ -0,0 +1,64 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jsonbfield.fields import JSONField
__all__ = ('Fact', )
class Fact(models.Model):
"""A model representing a fact returned from Ansible.
Facts are stored as JSON dictionaries.
"""
host = models.ForeignKey(
'Host',
related_name='facts',
db_index=True,
on_delete=models.CASCADE,
help_text=_('Host for the facts that the fact scan captured.'),
)
timestamp = models.DateTimeField(
default=None,
editable=False,
help_text=_('Date and time of the corresponding fact scan gathering time.')
)
module = models.CharField(max_length=128)
facts = JSONField(blank=True, default={}, help_text=_('Arbitrary JSON structure of module facts captured at timestamp for a single host.'))
class Meta:
app_label = 'main'
index_together = [
["timestamp", "module", "host"],
]
@staticmethod
def get_host_fact(host_id, module, timestamp):
qs = Fact.objects.filter(host__id=host_id, module=module, timestamp__lte=timestamp).order_by('-timestamp')
if qs:
return qs[0]
else:
return None
@staticmethod
def get_timeline(host_id, module=None, ts_from=None, ts_to=None):
kwargs = {
'host__id': host_id,
}
if module:
kwargs['module'] = module
if ts_from and ts_to and ts_from == ts_to:
kwargs['timestamp'] = ts_from
else:
if ts_from:
kwargs['timestamp__gt'] = ts_from
if ts_to:
kwargs['timestamp__lte'] = ts_to
return Fact.objects.filter(**kwargs).order_by('-timestamp').only('timestamp', 'module').order_by('-timestamp', 'module')
@staticmethod
def add_fact(host_id, module, timestamp, facts):
fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts)
fact_obj.save()
return fact_obj

View File

@ -6,6 +6,7 @@ import datetime
import logging
import re
import copy
from urlparse import urljoin
# Django
from django.conf import settings
@ -24,7 +25,9 @@ from awx.main.models.base import * # noqa
from awx.main.models.jobs import Job
from awx.main.models.unified_jobs import * # noqa
from awx.main.models.mixins import ResourceMixin
from awx.main.models.notifications import Notifier
from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates
from awx.main.conf import tower_settings
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript']
@ -1217,6 +1220,14 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin)
return True
return False
@property
def notifiers(self):
base_notifiers = Notifier.objects.filter(active=True)
error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors=self.inventory.organization))
success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success=self.inventory.organization))
any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any=self.inventory.organization))
return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers)
def clean_source(self):
source = self.source
if source and self.group:
@ -1276,6 +1287,9 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions):
def get_absolute_url(self):
return reverse('api:inventory_update_detail', args=(self.pk,))
def get_ui_url(self):
return urljoin(tower_settings.TOWER_URL_BASE, "/#/inventory_sync/{}".format(self.pk))
def is_blocked_by(self, obj):
if type(obj) == InventoryUpdate:
if self.inventory_source.inventory == obj.inventory_source.inventory:

View File

@ -6,6 +6,7 @@ import hmac
import json
import yaml
import logging
from urlparse import urljoin
# Django
from django.conf import settings
@ -22,6 +23,7 @@ from jsonfield import JSONField
from awx.main.constants import CLOUD_PROVIDERS
from awx.main.models.base import * # noqa
from awx.main.models.unified_jobs import * # noqa
from awx.main.models.notifications import Notifier
from awx.main.utils import decrypt_field, ignore_inventory_computed_fields
from awx.main.utils import emit_websocket_notification
from awx.main.redact import PlainTextCleaner
@ -347,6 +349,20 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
def _can_update(self):
return self.can_start_without_user_input()
@property
def notifiers(self):
# Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type
# TODO: Currently there is no org fk on project so this will need to be added once that is
# available after the rbac pr
base_notifiers = Notifier.objects.filter(active=True)
error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project]))
success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project]))
any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project]))
# Get Organization Notifiers
error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.project.organizations.all())))
success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.project.organizations.all())))
any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.project.organizations.all())))
return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers))
class Job(UnifiedJob, JobOptions):
'''
@ -386,6 +402,9 @@ class Job(UnifiedJob, JobOptions):
def get_absolute_url(self):
return reverse('api:job_detail', args=(self.pk,))
def get_ui_url(self):
return urljoin(tower_settings.TOWER_URL_BASE, "/#/jobs/{}".format(self.pk))
@property
def task_auth_token(self):
'''Return temporary auth token used for task requests via API.'''
@ -502,6 +521,26 @@ class Job(UnifiedJob, JobOptions):
dependencies.append(source.create_inventory_update(launch_type='dependency'))
return dependencies
def notification_data(self):
data = super(Job, self).notification_data()
all_hosts = {}
for h in self.job_host_summaries.all():
all_hosts[h.host.name] = dict(failed=h.failed,
changed=h.changed,
dark=h.dark,
failures=h.failures,
ok=h.ok,
processed=h.processed,
skipped=h.skipped)
data.update(dict(inventory=self.inventory.name,
project=self.project.name,
playbook=self.playbook,
credential=self.credential.name,
limit=self.limit,
extra_vars=self.extra_vars,
hosts=all_hosts))
return data
def handle_extra_data(self, extra_data):
extra_vars = {}
if isinstance(extra_data, dict):
@ -1082,6 +1121,9 @@ class SystemJob(UnifiedJob, SystemJobOptions):
def get_absolute_url(self):
return reverse('api:system_job_detail', args=(self.pk,))
def get_ui_url(self):
return urljoin(tower_settings.TOWER_URL_BASE, "/#/management_jobs/{}".format(self.pk))
def is_blocked_by(self, obj):
return True

View File

@ -0,0 +1,172 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
from django.db import models
from django.core.urlresolvers import reverse
from django.core.mail.message import EmailMessage
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_str
from awx.main.models.base import * # noqa
from awx.main.utils import encrypt_field, decrypt_field
from awx.main.notifications.email_backend import CustomEmailBackend
from awx.main.notifications.slack_backend import SlackBackend
from awx.main.notifications.twilio_backend import TwilioBackend
from awx.main.notifications.pagerduty_backend import PagerDutyBackend
from awx.main.notifications.hipchat_backend import HipChatBackend
from awx.main.notifications.webhook_backend import WebhookBackend
from awx.main.notifications.irc_backend import IrcBackend
# Django-JSONField
from jsonfield import JSONField
logger = logging.getLogger('awx.main.models.notifications')
__all__ = ['Notifier', 'Notification']
class Notifier(CommonModel):
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
('slack', _('Slack'), SlackBackend),
('twilio', _('Twilio'), TwilioBackend),
('pagerduty', _('Pagerduty'), PagerDutyBackend),
('hipchat', _('HipChat'), HipChatBackend),
('webhook', _('Webhook'), WebhookBackend),
('irc', _('IRC'), IrcBackend)]
NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES]
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
class Meta:
app_label = 'main'
organization = models.ForeignKey(
'Organization',
blank=False,
null=True,
on_delete=models.SET_NULL,
related_name='notifiers',
)
notification_type = models.CharField(
max_length = 32,
choices=NOTIFICATION_TYPE_CHOICES,
)
notification_configuration = JSONField(blank=False)
def get_absolute_url(self):
return reverse('api:notifier_detail', args=(self.pk,))
@property
def notification_class(self):
return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type]
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
update_fields = kwargs.get('update_fields', [])
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if new_instance:
value = self.notification_configuration[field]
setattr(self, '_saved_{}_{}'.format("config", field), value)
self.notification_configuration[field] = ''
else:
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
self.notification_configuration[field] = encrypted
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
super(Notifier, self).save(*args, **kwargs)
if new_instance:
update_fields = []
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '')
self.notification_configuration[field] = saved_value
#setattr(self.notification_configuration, field, saved_value)
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
self.save(update_fields=update_fields)
@property
def recipients(self):
return self.notification_configuration[self.notification_class.recipient_parameter]
def generate_notification(self, subject, message):
notification = Notification(notifier=self,
notification_type=self.notification_type,
recipients=smart_str(self.recipients),
subject=subject,
body=message)
notification.save()
return notification
def send(self, subject, body):
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
self.notification_configuration[field] = decrypt_field(self,
'notification_configuration',
subfield=field)
recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter)
if not isinstance(recipients, list):
recipients = [recipients]
sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None)
backend_obj = self.notification_class(**self.notification_configuration)
notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients)
return backend_obj.send_messages([notification_obj])
class Notification(CreatedModifiedModel):
'''
A notification event emitted when a Notifier is run
'''
NOTIFICATION_STATE_CHOICES = [
('pending', _('Pending')),
('successful', _('Successful')),
('failed', _('Failed')),
]
class Meta:
app_label = 'main'
ordering = ('pk',)
notifier = models.ForeignKey(
'Notifier',
related_name='notifications',
on_delete=models.CASCADE,
editable=False
)
status = models.CharField(
max_length=20,
choices=NOTIFICATION_STATE_CHOICES,
default='pending',
editable=False,
)
error = models.TextField(
blank=True,
default='',
editable=False,
)
notifications_sent = models.IntegerField(
default=0,
editable=False,
)
notification_type = models.CharField(
max_length = 32,
choices=Notifier.NOTIFICATION_TYPE_CHOICES,
)
recipients = models.TextField(
blank=True,
default='',
editable=False,
)
subject = models.TextField(
blank=True,
default='',
editable=False,
)
body = JSONField(blank=True)
def get_absolute_url(self):
return reverse('api:notification_detail', args=(self.pk,))

View File

@ -25,7 +25,7 @@ from awx.main.conf import tower_settings
__all__ = ['Organization', 'Team', 'Permission', 'Profile', 'AuthToken']
class Organization(CommonModel, ResourceMixin):
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin):
'''
An organization is the basic unit of multi-tenancy divisions
'''

View File

@ -20,10 +20,12 @@ from django.utils.timezone import now, make_aware, get_default_timezone
from awx.lib.compat import slugify
from awx.main.models.base import * # noqa
from awx.main.models.jobs import Job
from awx.main.models.notifications import Notifier
from awx.main.models.unified_jobs import * # noqa
from awx.main.models.mixins import ResourceMixin
from awx.main.utils import update_scm_url
from awx.main.fields import ImplicitRoleField
from awx.main.conf import tower_settings
__all__ = ['Project', 'ProjectUpdate']
@ -330,6 +332,18 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
return True
return False
@property
def notifiers(self):
base_notifiers = Notifier.objects.filter(active=True)
error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self))
success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self))
any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self))
# Get Organization Notifiers
error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.organizations.all())))
success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.organizations.all())))
any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.organizations.all())))
return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers))
def get_absolute_url(self):
return reverse('api:project_detail', args=(self.pk,))
@ -391,6 +405,9 @@ class ProjectUpdate(UnifiedJob, ProjectOptions):
def get_absolute_url(self):
return reverse('api:project_update_detail', args=(self.pk,))
def get_ui_url(self):
return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/scm_update/{}".format(self.pk))
def _update_parent_instance(self):
parent_instance = self._get_parent_instance()
if parent_instance:

View File

@ -17,6 +17,7 @@ from django.db import models
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now
from django.utils.encoding import smart_text
# Django-JSONField
from jsonfield import JSONField
@ -40,7 +41,7 @@ logger = logging.getLogger('awx.main.models.unified_jobs')
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique):
class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, NotificationFieldsModel):
'''
Concrete base class for unified job templates.
'''
@ -297,6 +298,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique):
'''
return kwargs # Override if needed in subclass.
@property
def notifiers(self):
'''
Return notifiers relevant to this Unified Job Template
'''
# NOTE: Derived classes should implement
return Notifier.objects.none()
def create_unified_job(self, **kwargs):
'''
Create a new unified job based on this unified job template.
@ -385,6 +394,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
editable=False,
related_name='%(class)s_blocked_jobs+',
)
notifications = models.ManyToManyField(
'Notification',
editable=False,
related_name='%(class)s_notifications',
)
cancel_flag = models.BooleanField(
blank=True,
default=False,
@ -470,6 +484,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
else:
return ''
def get_ui_url(self):
real_instance = self.get_real_instance()
if real_instance != self:
return real_instance.get_ui_url()
else:
return ''
@classmethod
def _get_task_class(cls):
raise NotImplementedError # Implement in subclasses.
@ -717,7 +738,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
tasks that might preclude creating one'''
return []
def start(self, error_callback, **kwargs):
def notification_data(self):
return dict(id=self.id,
name=self.name,
url=self.get_ui_url(),
created_by=smart_text(self.created_by),
started=self.started.isoformat(),
finished=self.finished.isoformat(),
status=self.status,
traceback=self.result_traceback)
def start(self, error_callback, success_callback, **kwargs):
'''
Start the task running via Celery.
'''
@ -743,7 +774,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
# if field not in needed])
if 'extra_vars' in kwargs:
self.handle_extra_data(kwargs['extra_vars'])
task_class().apply_async((self.pk,), opts, link_error=error_callback)
task_class().apply_async((self.pk,), opts, link_error=error_callback, link=success_callback)
return True
def signal_start(self, **kwargs):
@ -765,7 +796,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
# Sanity check: If we are running unit tests, then run synchronously.
if getattr(settings, 'CELERY_UNIT_TEST', False):
return self.start(None, **kwargs)
return self.start(None, None, **kwargs)
# Save the pending status, and inform the SocketIO listener.
self.update_fields(start_args=json.dumps(kwargs), status='pending')

View File

View File

@ -0,0 +1,20 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import pprint
from django.utils.encoding import smart_text
from django.core.mail.backends.base import BaseEmailBackend
class TowerBaseEmailBackend(BaseEmailBackend):
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
body['id'],
body['status'],
body['url']))
body_actual += pprint.pformat(body, indent=4)
return body_actual

View File

@ -0,0 +1,28 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import pprint
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
body['id'],
body['status'],
body['url']))
body_actual += pprint.pformat(body, indent=4)
return body_actual

View File

@ -0,0 +1,49 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
class HipChatBackend(TowerBaseEmailBackend):
init_parameters = {"token": {"label": "Token", "type": "password"},
"channels": {"label": "Destination Channels", "type": "list"},
"color": {"label": "Notification Color", "type": "string"},
"api_url": {"label": "API Url (e.g: https://mycompany.hipchat.com)", "type": "string"},
"notify": {"label": "Notify channel", "type": "bool"},
"message_from": {"label": "Label to be shown with notification", "type": "string"}}
recipient_parameter = "channels"
sender_parameter = "message_from"
def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs):
super(HipChatBackend, self).__init__(fail_silently=fail_silently)
self.token = token
self.color = color
self.api_url = api_url
self.notify = notify
def send_messages(self, messages):
sent_messages = 0
for m in messages:
for rcp in m.recipients():
r = requests.post("{}/v2/room/{}/notification".format(self.api_url, rcp),
params={"auth_token": self.token},
json={"color": self.color,
"message": m.subject,
"notify": self.notify,
"from": m.from_email,
"message_format": "text"})
if r.status_code != 204:
logger.error(smart_text("Error sending messages: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text)))
sent_messages += 1
return sent_messages

View File

@ -0,0 +1,95 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import time
import ssl
import logging
import irc.client
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.irc_backend')
class IrcBackend(TowerBaseEmailBackend):
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
"port": {"label": "IRC Server Port", "type": "int"},
"nickname": {"label": "IRC Nick", "type": "string"},
"password": {"label": "IRC Server Password", "type": "password"},
"use_ssl": {"label": "SSL Connection", "type": "bool"},
"targets": {"label": "Destination Channels or Users", "type": "list"}}
recipient_parameter = "targets"
sender_parameter = None
def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs):
super(IrcBackend, self).__init__(fail_silently=fail_silently)
self.server = server
self.port = port
self.nickname = nickname
self.password = password if password != "" else None
self.use_ssl = use_ssl
self.connection = None
def open(self):
if self.connection is not None:
return False
if self.use_ssl:
connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket)
else:
connection_factory = irc.connection.Factory()
try:
self.reactor = irc.client.Reactor()
self.connection = self.reactor.server().connect(
self.server,
self.port,
self.nickname,
password=self.password,
connect_factory=connection_factory,
)
except irc.client.ServerConnectionError as e:
logger.error(smart_text("Exception connecting to irc server: {}".format(e)))
if not self.fail_silently:
raise
return True
def close(self):
if self.connection is None:
return
self.connection = None
def on_connect(self, connection, event):
for c in self.channels:
if irc.client.is_channel(c):
connection.join(c)
else:
for m in self.channels[c]:
connection.privmsg(c, m.subject)
self.channels_sent += 1
def on_join(self, connection, event):
for m in self.channels[event.target]:
connection.privmsg(event.target, m.subject)
self.channels_sent += 1
def send_messages(self, messages):
if self.connection is None:
self.open()
self.channels = {}
self.channels_sent = 0
for m in messages:
for r in m.recipients():
if r not in self.channels:
self.channels[r] = []
self.channels[r].append(m)
self.connection.add_global_handler("welcome", self.on_connect)
self.connection.add_global_handler("join", self.on_join)
start_time = time.time()
process_time = time.time()
while self.channels_sent < len(self.channels) and (process_time - start_time) < 60:
self.reactor.process_once(0.1)
process_time = time.time()
self.reactor.disconnect_all()
return self.channels_sent

View File

@ -0,0 +1,49 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import pygerduty
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
class PagerDutyBackend(TowerBaseEmailBackend):
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
"token": {"label": "API Token", "type": "password"},
"service_key": {"label": "API Service/Integration Key", "type": "string"},
"client_name": {"label": "Client Identifier", "type": "string"}}
recipient_parameter = "service_key"
sender_parameter = "client_name"
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
self.subdomain = subdomain
self.token = token
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
try:
pager = pygerduty.PagerDuty(self.subdomain, self.token)
except Exception as e:
if not self.fail_silently:
raise
logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e)))
for m in messages:
try:
pager.trigger_incident(m.recipients()[0],
description=m.subject,
details=m.body,
client=m.from_email)
except Exception as e:
logger.error(smart_text("Exception sending messages: {}".format(e)))
if not self.fail_silently:
raise
return sent_messages

View File

@ -0,0 +1,52 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
from slackclient import SlackClient
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.slack_backend')
class SlackBackend(TowerBaseEmailBackend):
init_parameters = {"token": {"label": "Token", "type": "password"},
"channels": {"label": "Destination Channels", "type": "list"}}
recipient_parameter = "channels"
sender_parameter = None
def __init__(self, token, fail_silently=False, **kwargs):
super(SlackBackend, self).__init__(fail_silently=fail_silently)
self.token = token
self.connection = None
def open(self):
if self.connection is not None:
return False
self.connection = SlackClient(self.token)
if not self.connection.rtm_connect():
if not self.fail_silently:
raise Exception("Slack Notification Token is invalid")
return True
def close(self):
if self.connection is None:
return
self.connection = None
def send_messages(self, messages):
if self.connection is None:
self.open()
sent_messages = 0
for m in messages:
try:
for r in m.recipients():
self.connection.rtm_send_message(r, m.subject)
sent_messages += 1
except Exception as e:
logger.error(smart_text("Exception sending messages: {}".format(e)))
if not self.fail_silently:
raise
return sent_messages

View File

@ -0,0 +1,48 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
from twilio.rest import TwilioRestClient
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.twilio_backend')
class TwilioBackend(TowerBaseEmailBackend):
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
"account_token": {"label": "Account Token", "type": "password"},
"from_number": {"label": "Source Phone Number", "type": "string"},
"to_numbers": {"label": "Destination SMS Numbers", "type": "list"}}
recipient_parameter = "to_numbers"
sender_parameter = "from_number"
def __init__(self, account_sid, account_token, fail_silently=False, **kwargs):
super(TwilioBackend, self).__init__(fail_silently=fail_silently)
self.account_sid = account_sid
self.account_token = account_token
def send_messages(self, messages):
sent_messages = 0
try:
connection = TwilioRestClient(self.account_sid, self.account_token)
except Exception as e:
if not self.fail_silently:
raise
logger.error(smart_text("Exception connecting to Twilio: {}".format(e)))
for m in messages:
try:
connection.messages.create(
to=m.to,
from_=m.from_email,
body=m.subject)
sent_messages += 1
except Exception as e:
logger.error(smart_text("Exception sending messages: {}".format(e)))
if not self.fail_silently:
raise
return sent_messages

View File

@ -0,0 +1,39 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages

View File

@ -387,6 +387,8 @@ model_serializer_mapping = {
Job: JobSerializer,
AdHocCommand: AdHocCommandSerializer,
TowerSettings: TowerSettingsSerializer,
Notifier: NotifierSerializer,
Notification: NotificationSerializer,
}
def activity_stream_create(sender, instance, created, **kwargs):

View File

@ -39,6 +39,9 @@ from celery import Task, task
from django.conf import settings
from django.db import transaction, DatabaseError
from django.utils.timezone import now
from django.utils.encoding import smart_text
from django.core.mail import send_mail
from django.contrib.auth.models import User
# AWX
from awx.lib.metrics import task_timer
@ -46,13 +49,15 @@ from awx.main.constants import CLOUD_PROVIDERS
from awx.main.models import * # noqa
from awx.main.queue import FifoQueue
from awx.main.conf import tower_settings
from awx.main.task_engine import TaskSerializer, TASK_TIMEOUT_INTERVAL
from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url,
ignore_inventory_computed_fields, emit_websocket_notification,
check_proot_installed, build_proot_temp_dir, wrap_args_with_proot)
from awx.fact.utils.connection import test_mongo_connection
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
'RunAdHocCommand', 'handle_work_error', 'update_inventory_computed_fields']
'RunAdHocCommand', 'handle_work_error', 'handle_work_success',
'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks']
HIDDEN_PASSWORD = '**********'
@ -64,6 +69,48 @@ Try upgrading OpenSSH or providing your private key in an different format. \
logger = logging.getLogger('awx.main.tasks')
@task()
def send_notifications(notification_list, job_id=None):
if not isinstance(notification_list, list):
raise TypeError("notification_list should be of type list")
if job_id is not None:
job_actual = UnifiedJob.objects.get(id=job_id)
for notification_id in notification_list:
notification = Notification.objects.get(id=notification_id)
try:
sent = notification.notifier.send(notification.subject, notification.body)
notification.status = "successful"
notification.notifications_sent = sent
except Exception as e:
logger.error("Send Notification Failed {}".format(e))
notification.status = "failed"
notification.error = smart_text(e)
finally:
notification.save()
if job_id is not None:
job_actual.notifications.add(notification)
@task(bind=True)
def run_administrative_checks(self):
if not tower_settings.TOWER_ADMIN_ALERTS:
return
reader = TaskSerializer()
validation_info = reader.from_database()
if validation_info.get('instance_count', 0) < 1:
return
used_percentage = float(validation_info.get('current_instances', 0)) / float(validation_info.get('instance_count', 100))
tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True)
if (used_percentage * 100) > 90:
send_mail("Ansible Tower host usage over 90%",
"Ansible Tower host usage over 90%",
tower_admin_emails,
fail_silently=True)
if validation_info.get('time_remaining', 0) < TASK_TIMEOUT_INTERVAL:
send_mail("Ansible Tower license will expire soon",
"Ansible Tower license will expire soon",
tower_admin_emails,
fail_silently=True)
@task()
def bulk_inventory_element_delete(inventory, hosts=[], groups=[]):
from awx.main.signals import disable_activity_stream
@ -134,7 +181,6 @@ def notify_task_runner(metadata_dict):
queue = FifoQueue('tower_task_manager')
queue.push(metadata_dict)
@task()
def mongodb_control(cmd):
# Sanity check: Do not send arbitrary commands.
@ -159,6 +205,39 @@ def mongodb_control(cmd):
p = subprocess.Popen('sudo mongod --shutdown -f /etc/mongod.conf', shell=True)
p.wait()
@task(bind=True)
def handle_work_success(self, result, task_actual):
if task_actual['type'] == 'project_update':
instance = ProjectUpdate.objects.get(id=task_actual['id'])
instance_name = instance.name
notifiers = instance.project.notifiers
friendly_name = "Project Update"
elif task_actual['type'] == 'inventory_update':
instance = InventoryUpdate.objects.get(id=task_actual['id'])
instance_name = instance.name
notifiers = instance.inventory_source.notifiers
friendly_name = "Inventory Update"
elif task_actual['type'] == 'job':
instance = Job.objects.get(id=task_actual['id'])
instance_name = instance.job_template.name
notifiers = instance.job_template.notifiers
friendly_name = "Job"
elif task_actual['type'] == 'ad_hoc_command':
instance = AdHocCommand.objects.get(id=task_actual['id'])
instance_name = instance.module_name
notifiers = [] # TODO: Ad-hoc commands need to notify someone
friendly_name = "AdHoc Command"
else:
return
notification_body = instance.notification_data()
notification_subject = "{} #{} '{}' succeeded on Ansible Tower: {}".format(friendly_name,
task_actual['id'],
instance_name,
notification_body['url'])
send_notifications.delay([n.generate_notification(notification_subject, notification_body)
for n in set(notifiers.get('success', []) + notifiers.get('any', []))],
job_id=task_actual['id'])
@task(bind=True)
def handle_work_error(self, task_id, subtasks=None):
print('Executing error task id %s, subtasks: %s' %
@ -173,15 +252,23 @@ def handle_work_error(self, task_id, subtasks=None):
if each_task['type'] == 'project_update':
instance = ProjectUpdate.objects.get(id=each_task['id'])
instance_name = instance.name
notifiers = instance.project.notifiers
friendly_name = "Project Update"
elif each_task['type'] == 'inventory_update':
instance = InventoryUpdate.objects.get(id=each_task['id'])
instance_name = instance.name
notifiers = instance.inventory_source.notifiers
friendly_name = "Inventory Update"
elif each_task['type'] == 'job':
instance = Job.objects.get(id=each_task['id'])
instance_name = instance.job_template.name
notifiers = instance.job_template.notifiers
friendly_name = "Job"
elif each_task['type'] == 'ad_hoc_command':
instance = AdHocCommand.objects.get(id=each_task['id'])
instance_name = instance.module_name
notifiers = []
friendly_name = "AdHoc Command"
else:
# Unknown task type
break
@ -190,6 +277,7 @@ def handle_work_error(self, task_id, subtasks=None):
first_task_id = instance.id
first_task_type = each_task['type']
first_task_name = instance_name
first_task_friendly_name = friendly_name
if instance.celery_task_id != task_id:
instance.status = 'failed'
instance.failed = True
@ -197,6 +285,16 @@ def handle_work_error(self, task_id, subtasks=None):
(first_task_type, first_task_name, first_task_id)
instance.save()
instance.socketio_emit_status("failed")
notification_body = first_task.notification_data()
notification_subject = "{} #{} '{}' failed on Ansible Tower: {}".format(first_task_friendly_name,
first_task_id,
first_task_name,
notification_body['url'])
notification_body['friendly_name'] = first_task_friendly_name
send_notifications.delay([n.generate_notification(notification_subject, notification_body).id
for n in set(notifiers.get('error', []) + notifiers.get('any', []))],
job_id=first_task_id)
@task()
def update_inventory_computed_fields(inventory_id, should_update_hosts=True):

View File

@ -0,0 +1,283 @@
{
"ansible_all_ipv4_addresses": [
"172.17.0.7"
],
"ansible_all_ipv6_addresses": [
"fe80::42:acff:fe11:7"
],
"ansible_architecture": "x86_64",
"ansible_bios_date": "12/01/2006",
"ansible_bios_version": "VirtualBox",
"ansible_cmdline": {
"BOOT_IMAGE": "/boot/vmlinuz64",
"base": true,
"console": "tty0",
"initrd": "/boot/initrd.img",
"loglevel": "3",
"noembed": true,
"nomodeset": true,
"norestore": true,
"user": "docker",
"waitusb": "10:LABEL=boot2docker-data"
},
"ansible_date_time": {
"date": "2016-02-02",
"day": "02",
"epoch": "1454424257",
"hour": "14",
"iso8601": "2016-02-02T14:44:17Z",
"iso8601_basic": "20160202T144417348424",
"iso8601_basic_short": "20160202T144417",
"iso8601_micro": "2016-02-02T14:44:17.348496Z",
"minute": "44",
"month": "02",
"second": "17",
"time": "14:44:17",
"tz": "UTC",
"tz_offset": "+0000",
"weekday": "Tuesday",
"weekday_number": "2",
"weeknumber": "05",
"year": "2016"
},
"ansible_default_ipv4": {
"address": "172.17.0.7",
"alias": "eth0",
"broadcast": "global",
"gateway": "172.17.0.1",
"interface": "eth0",
"macaddress": "02:42:ac:11:00:07",
"mtu": 1500,
"netmask": "255.255.0.0",
"network": "172.17.0.0",
"type": "ether"
},
"ansible_default_ipv6": {},
"ansible_devices": {
"sda": {
"holders": [],
"host": "",
"model": "VBOX HARDDISK",
"partitions": {
"sda1": {
"sectors": "510015555",
"sectorsize": 512,
"size": "243.19 GB",
"start": "1975995"
},
"sda2": {
"sectors": "1975932",
"sectorsize": 512,
"size": "964.81 MB",
"start": "63"
}
},
"removable": "0",
"rotational": "0",
"scheduler_mode": "deadline",
"sectors": "512000000",
"sectorsize": "512",
"size": "244.14 GB",
"support_discard": "0",
"vendor": "ATA"
},
"sr0": {
"holders": [],
"host": "",
"model": "CD-ROM",
"partitions": {},
"removable": "1",
"rotational": "1",
"scheduler_mode": "deadline",
"sectors": "61440",
"sectorsize": "2048",
"size": "120.00 MB",
"support_discard": "0",
"vendor": "VBOX"
}
},
"ansible_distribution": "Ubuntu",
"ansible_distribution_major_version": "14",
"ansible_distribution_release": "trusty",
"ansible_distribution_version": "14.04",
"ansible_dns": {
"nameservers": [
"8.8.8.8"
]
},
"ansible_domain": "",
"ansible_env": {
"HOME": "/root",
"HOSTNAME": "ede894599989",
"LANG": "en_US.UTF-8",
"LC_ALL": "en_US.UTF-8",
"LC_MESSAGES": "en_US.UTF-8",
"LESSCLOSE": "/usr/bin/lesspipe %s %s",
"LESSOPEN": "| /usr/bin/lesspipe %s",
"LS_COLORS": "",
"OLDPWD": "/ansible",
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"PWD": "/ansible/examples",
"SHLVL": "1",
"_": "/usr/local/bin/ansible",
"container": "docker"
},
"ansible_eth0": {
"active": true,
"device": "eth0",
"ipv4": {
"address": "172.17.0.7",
"broadcast": "global",
"netmask": "255.255.0.0",
"network": "172.17.0.0"
},
"ipv6": [
{
"address": "fe80::42:acff:fe11:7",
"prefix": "64",
"scope": "link"
}
],
"macaddress": "02:42:ac:11:00:07",
"mtu": 1500,
"promisc": false,
"type": "ether"
},
"ansible_fips": false,
"ansible_form_factor": "Other",
"ansible_fqdn": "ede894599989",
"ansible_hostname": "ede894599989",
"ansible_interfaces": [
"lo",
"eth0"
],
"ansible_kernel": "4.1.12-boot2docker",
"ansible_lo": {
"active": true,
"device": "lo",
"ipv4": {
"address": "127.0.0.1",
"broadcast": "host",
"netmask": "255.0.0.0",
"network": "127.0.0.0"
},
"ipv6": [
{
"address": "::1",
"prefix": "128",
"scope": "host"
}
],
"mtu": 65536,
"promisc": false,
"type": "loopback"
},
"ansible_lsb": {
"codename": "trusty",
"description": "Ubuntu 14.04.3 LTS",
"id": "Ubuntu",
"major_release": "14",
"release": "14.04"
},
"ansible_machine": "x86_64",
"ansible_memfree_mb": 3746,
"ansible_memory_mb": {
"nocache": {
"free": 8896,
"used": 3638
},
"real": {
"free": 3746,
"total": 12534,
"used": 8788
},
"swap": {
"cached": 0,
"free": 4048,
"total": 4048,
"used": 0
}
},
"ansible_memtotal_mb": 12534,
"ansible_mounts": [
{
"device": "/dev/sda1",
"fstype": "ext4",
"mount": "/etc/resolv.conf",
"options": "rw,relatime,data=ordered",
"size_available": 201281392640,
"size_total": 256895700992,
"uuid": "NA"
},
{
"device": "/dev/sda1",
"fstype": "ext4",
"mount": "/etc/hostname",
"options": "rw,relatime,data=ordered",
"size_available": 201281392640,
"size_total": 256895700992,
"uuid": "NA"
},
{
"device": "/dev/sda1",
"fstype": "ext4",
"mount": "/etc/hosts",
"options": "rw,relatime,data=ordered",
"size_available": 201281392640,
"size_total": 256895700992,
"uuid": "NA"
}
],
"ansible_nodename": "ede894599989",
"ansible_os_family": "Debian",
"ansible_pkg_mgr": "apt",
"ansible_processor": [
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz",
"GenuineIntel",
"Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz"
],
"ansible_processor_cores": 8,
"ansible_processor_count": 1,
"ansible_processor_threads_per_core": 1,
"ansible_processor_vcpus": 8,
"ansible_product_name": "VirtualBox",
"ansible_product_serial": "0",
"ansible_product_uuid": "25C5EA5A-1DF1-48D9-A2C6-81227DA153C0",
"ansible_product_version": "1.2",
"ansible_python_version": "2.7.6",
"ansible_selinux": false,
"ansible_service_mgr": "upstart",
"ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBALF0xsM8UMXgSKiWNw4t19wxbxLnxQX742t/dIM0O8YLx+/lIP+Q69Dv5uoVt0zKV39eFziRlCh96qj2KYkGEJ6XfVZFnhpculL2Pv2CPpSwKuQ1vTbDO/xxUrvY+bHpfNJf9Rh69bFEE2pTsjomFPCgp8M0qGaFtwg6czSaeBONAAAAFQCGEfVtj97JiexTVRqgQITYlFp/eQAAAIEAg+S9qWn+AIb3amwVoLL/usQYOPCmZY9RVPzpkjJ6OG+HI4B7cXeauPtNTJwT0f9vGEqzf4mPpmS+aCShj6iwdmJ+cOwR5+SJlNalab3CMBoXKVLbT1J2XWFlK0szKKnoReP96IDbkAkGQ3fkm4jz0z6Wy0u6wOQVNcd4G5cwLZ4AAACAFvBm+H1LwNrwWBjWio+ayhglZ4Y25mLMEn2+dqBz0gLK5szEbft1HMPOWIVHvl6vi3v34pAJHKpxXpkLlNliTn8iw9BzCOrgP4V8sp2/85mxEuCdI1w/QERj9cHu5iS2pZ0cUwDE3pfuuGBB3IEliaJyaapowdrM8lN12jQl11E=",
"ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHiYp4e9RfXpxDcEWpK4EuXPHW9++xcFI9hiB0TYAZgxEF9RIgwfucpPawFk7HIFoNc7EXQMlryilLSbg155KWM=",
"ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAILclD2JaC654azEsAfcHRIOA2Ig9/Qk6MX80i/VCEdSH",
"ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDeSUGxZaZsgBsezld0mj3HcbAwx6aykGnejceBjcs6lVwSGMHevofzSXIQDPYBhZoyWNl0PYAHv6AsQ8+3khd2SitUMJAuHSz1ZjgHCCGQP9ijXTKHn+lWCKA8rhLG/dwYwiouoOPZfn1G+erbKO6XiVbELrrf2RadnMGuMinESIOKVj3IunXsaGRMsDOQferOnUf7MvH7xpQnoySyQ1+p4rGruaohWG+Y2cDo7+B2FylPVbrpRDDJkfbt4J96WHx0KOdD0qzOicQP8JqDflqQPJJCWcgrvjQOSe4gXdPB6GZDtBl2qgQRwt1IgizPMm+b7Bwbd2VDe1TeWV2gT/7H",
"ansible_swapfree_mb": 4048,
"ansible_swaptotal_mb": 4048,
"ansible_system": "Linux",
"ansible_system_vendor": "innotek GmbH",
"ansible_uptime_seconds": 178398,
"ansible_user_dir": "/root",
"ansible_user_gecos": "root",
"ansible_user_gid": 0,
"ansible_user_id": "root",
"ansible_user_shell": "/bin/bash",
"ansible_user_uid": 0,
"ansible_userspace_architecture": "x86_64",
"ansible_userspace_bits": "64",
"ansible_virtualization_role": "guest",
"ansible_virtualization_type": "docker",
"module_setup": true
}

View File

@ -0,0 +1,236 @@
# Python
import mock
import pytest
from datetime import timedelta
import urlparse
import urllib
# AWX
from awx.main.models.fact import Fact
from awx.main.utils import timestamp_apiformat
# Django
from django.core.urlresolvers import reverse
from django.utils import timezone
def mock_feature_enabled(feature, bypass_database=None):
return True
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1):
hosts = hosts(host_count=host_count)
fact_scans(fact_scans=3, timestamp_epoch=epoch)
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
response = get(url, user('admin', True), data=get_params)
return (hosts[0], response)
def check_url(url1_full, fact_known, module):
url1_split = urlparse.urlsplit(url1_full)
url1 = url1_split.path
url1_params = urlparse.parse_qsl(url1_split.query)
url2 = reverse('api:host_fact_compare_view', args=(fact_known.host.pk,))
url2_params = [('module', module), ('datetime', timestamp_apiformat(fact_known.timestamp))]
assert url1 == url2
assert urllib.urlencode(url1_params) == urllib.urlencode(url2_params)
def check_response_facts(facts_known, response):
for i, fact_known in enumerate(facts_known):
assert fact_known.module == response.data['results'][i]['module']
assert timestamp_apiformat(fact_known.timestamp) == response.data['results'][i]['timestamp']
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_no_facts_db(hosts, get, user):
hosts = hosts(host_count=1)
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
response = get(url, user('admin', True))
response_expected = {
'results': []
}
assert response_expected == response.data
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_basic_fields(hosts, fact_scans, get, user):
epoch = timezone.now()
search = {
'from': epoch,
'to': epoch,
}
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
results = response.data['results']
assert 'related' in results[0]
assert 'timestamp' in results[0]
assert 'module' in results[0]
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
@pytest.mark.skipif(True, reason="Options fix landed in devel but not here. Enable this after this pr gets merged.")
def test_basic_options_fields(hosts, fact_scans, options, user):
hosts = hosts(host_count=1)
fact_scans(fact_scans=1)
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
response = options(url, user('admin', True), pk=hosts[0].id)
#import json
#print(json.dumps(response.data))
assert 'related' in response.data
assert 'id' in response.data
assert 'facts' in response.data
assert 'module' in response.data
assert 'host' in response.data
assert isinstance(response.data['host'], int)
assert 'summary_fields' in response.data
assert 'host' in response.data['summary_fields']
assert 'name' in response.data['summary_fields']['host']
assert 'description' in response.data['summary_fields']['host']
assert 'host' in response.data['related']
assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host']
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_related_fact_view(hosts, fact_scans, get, user):
epoch = timezone.now()
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch)
facts_known = Fact.get_timeline(host.id)
assert 9 == len(facts_known)
assert 9 == len(response.data['results'])
for i, fact_known in enumerate(facts_known):
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_multiple_hosts(hosts, fact_scans, get, user):
epoch = timezone.now()
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, host_count=3)
facts_known = Fact.get_timeline(host.id)
assert 9 == len(facts_known)
assert 9 == len(response.data['results'])
for i, fact_known in enumerate(facts_known):
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_param_to_from(hosts, fact_scans, get, user):
epoch = timezone.now()
search = {
'from': epoch - timedelta(days=10),
'to': epoch + timedelta(days=10),
}
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
facts_known = Fact.get_timeline(host.id, ts_from=search['from'], ts_to=search['to'])
assert 9 == len(facts_known)
assert 9 == len(response.data['results'])
check_response_facts(facts_known, response)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_param_module(hosts, fact_scans, get, user):
epoch = timezone.now()
search = {
'module': 'packages',
}
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
facts_known = Fact.get_timeline(host.id, module=search['module'])
assert 3 == len(facts_known)
assert 3 == len(response.data['results'])
check_response_facts(facts_known, response)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_param_from(hosts, fact_scans, get, user):
epoch = timezone.now()
search = {
'from': epoch + timedelta(days=1),
}
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
facts_known = Fact.get_timeline(host.id, ts_from=search['from'])
assert 3 == len(facts_known)
assert 3 == len(response.data['results'])
check_response_facts(facts_known, response)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_param_to(hosts, fact_scans, get, user):
epoch = timezone.now()
search = {
'to': epoch + timedelta(days=1),
}
(host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search)
facts_known = Fact.get_timeline(host.id, ts_to=search['to'])
assert 6 == len(facts_known)
assert 6 == len(response.data['results'])
check_response_facts(facts_known, response)
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
hosts = hosts(host_count=1)
fact_scans(fact_scans=1)
team_obj.users.add(user_obj)
url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,))
response = get(url, user_obj)
return response
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_normal_user_403(hosts, fact_scans, get, user, team):
user_bob = user('bob', False)
response = _test_user_access_control(hosts, fact_scans, get, user_bob, team)
assert 403 == response.status_code
assert "You do not have permission to perform this action." == response.data['detail']
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_super_user_ok(hosts, fact_scans, get, user, team):
user_super = user('bob', True)
response = _test_user_access_control(hosts, fact_scans, get, user_super, team)
assert 200 == response.status_code
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
user_admin = user('johnson', False)
organization.admins.add(user_admin)
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
assert 200 == response.status_code
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team):
user_admin = user('johnson', False)
org2 = organizations(1)
org2[0].admins.add(user_admin)
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
assert 403 == response.status_code

View File

@ -0,0 +1,156 @@
import mock
import pytest
import json
import urllib
from awx.main.utils import timestamp_apiformat
from django.core.urlresolvers import reverse
from django.utils import timezone
def mock_feature_enabled(feature, bypass_database=None):
return True
# TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it
def find_fact(facts, host_id, module_name, timestamp):
for f in facts:
if f.host_id == host_id and f.module == module_name and f.timestamp == timestamp:
return f
raise RuntimeError('fact <%s, %s, %s> not found in %s', (host_id, module_name, timestamp, facts))
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name='ansible', get_params={}):
hosts = hosts(host_count=1)
facts = fact_scans(fact_scans=1, timestamp_epoch=epoch)
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
response = get(url, user('admin', True), data=get_params)
fact_known = find_fact(facts, hosts[0].id, module_name, epoch)
return (fact_known, response)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_no_fact_found(hosts, get, user):
hosts = hosts(host_count=1)
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
response = get(url, user('admin', True))
expected_response = {
"detail": "Fact not found"
}
assert 404 == response.status_code
assert expected_response == response.data
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_basic_fields(hosts, fact_scans, get, user):
hosts = hosts(host_count=1)
fact_scans(fact_scans=1)
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
response = get(url, user('admin', True))
assert 'related' in response.data
assert 'id' in response.data
assert 'facts' in response.data
assert 'module' in response.data
assert 'host' in response.data
assert isinstance(response.data['host'], int)
assert 'summary_fields' in response.data
assert 'host' in response.data['summary_fields']
assert 'name' in response.data['summary_fields']['host']
assert 'description' in response.data['summary_fields']['host']
assert 'host' in response.data['related']
assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host']
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_content(hosts, fact_scans, get, user, fact_ansible_json):
(fact_known, response) = setup_common(hosts, fact_scans, get, user)
assert fact_known.host_id == response.data['host']
assert fact_ansible_json == json.loads(response.data['facts'])
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
assert fact_known.module == response.data['module']
def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name):
params = {
'module': module_name
}
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params)
assert fact_json == json.loads(response.data['facts'])
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
assert module_name == response.data['module']
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_search_by_module_packages(hosts, fact_scans, get, user, fact_packages_json):
_test_search_by_module(hosts, fact_scans, get, user, fact_packages_json, 'packages')
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_search_by_module_services(hosts, fact_scans, get, user, fact_services_json):
_test_search_by_module(hosts, fact_scans, get, user, fact_services_json, 'services')
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packages_json):
epoch = timezone.now()
module_name = 'packages'
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, epoch=epoch, get_params=dict(module=module_name, datetime=epoch))
assert fact_known.id == response.data['id']
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
hosts = hosts(host_count=1)
fact_scans(fact_scans=1)
team_obj.users.add(user_obj)
url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,))
response = get(url, user_obj)
return response
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_normal_user_403(hosts, fact_scans, get, user, team):
user_bob = user('bob', False)
response = _test_user_access_control(hosts, fact_scans, get, user_bob, team)
assert 403 == response.status_code
assert "You do not have permission to perform this action." == response.data['detail']
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_super_user_ok(hosts, fact_scans, get, user, team):
user_super = user('bob', True)
response = _test_user_access_control(hosts, fact_scans, get, user_super, team)
assert 200 == response.status_code
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
user_admin = user('johnson', False)
organization.admins.add(user_admin)
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
assert 200 == response.status_code
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.ac
@pytest.mark.django_db
def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team):
user_admin = user('johnson', False)
org2 = organizations(1)
org2[0].admins.add(user_admin)
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
assert 403 == response.status_code

View File

@ -0,0 +1,17 @@
# TODO: As of writing this our only concern is ensuring that the fact feature is reflected in the Host endpoint.
# Other host tests should live here to make this test suite more complete.
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_basic_fields(hosts, fact_scans, get, user):
hosts = hosts(host_count=1)
url = reverse('api:host_detail', args=(hosts[0].pk,))
response = get(url, user('admin', True))
assert 'related' in response.data
assert 'fact_versions' in response.data['related']
assert reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) == response.data['related']['fact_versions']

View File

@ -0,0 +1,109 @@
import pytest
import time
from datetime import datetime
@pytest.fixture
def fact_msg_base(inventory, hosts):
host_objs = hosts(1)
return {
'host': host_objs[0].name,
'date_key': time.mktime(datetime.utcnow().timetuple()),
'facts' : { },
'inventory_id': inventory.id
}
@pytest.fixture
def fact_msg_small(fact_msg_base):
fact_msg_base['facts'] = {
'packages': {
"accountsservice": [
{
"architecture": "amd64",
"name": "accountsservice",
"source": "apt",
"version": "0.6.35-0ubuntu7.1"
}
],
"acpid": [
{
"architecture": "amd64",
"name": "acpid",
"source": "apt",
"version": "1:2.0.21-1ubuntu2"
}
],
"adduser": [
{
"architecture": "all",
"name": "adduser",
"source": "apt",
"version": "3.113+nmu3ubuntu3"
}
],
},
'services': [
{
"name": "acpid",
"source": "sysv",
"state": "running"
},
{
"name": "apparmor",
"source": "sysv",
"state": "stopped"
},
{
"name": "atd",
"source": "sysv",
"state": "running"
},
{
"name": "cron",
"source": "sysv",
"state": "running"
}
],
'ansible': {
'ansible_fact_simple': 'hello world',
'ansible_fact_complex': {
'foo': 'bar',
'hello': [
'scooby',
'dooby',
'doo'
]
},
}
}
return fact_msg_base
'''
Facts sent from ansible to our fact cache reciever.
The fact module type is implicit i.e
Note: The 'ansible' module is an expection to this rule.
It is NOT nested in a dict, and thus does NOT contain a first-level
key of 'ansible'
{
'fact_module_name': { ... },
}
'''
@pytest.fixture
def fact_msg_ansible(fact_msg_base, fact_ansible_json):
fact_msg_base['facts'] = fact_ansible_json
return fact_msg_base
@pytest.fixture
def fact_msg_packages(fact_msg_base, fact_packages_json):
fact_msg_base['facts']['packages'] = fact_packages_json
return fact_msg_base
@pytest.fixture
def fact_msg_services(fact_msg_base, fact_services_json):
fact_msg_base['facts']['services'] = fact_services_json
return fact_msg_base

View File

@ -0,0 +1,95 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
# Python
import pytest
from datetime import datetime
import json
# Django
# AWX
from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver
from awx.main.models.fact import Fact
from awx.main.models.inventory import Host
# TODO: Check that timestamp and other attributes are as expected
def check_process_fact_message_module(fact_returned, data, module_name):
date_key = data['date_key']
# Ensure 1, and only 1, fact created
timestamp = datetime.fromtimestamp(date_key, None)
assert 1 == Fact.objects.all().count()
host_obj = Host.objects.get(name=data['host'], inventory__id=data['inventory_id'])
assert host_obj is not None
fact_known = Fact.get_host_fact(host_obj.id, module_name, timestamp)
assert fact_known is not None
assert fact_known == fact_returned
assert host_obj == fact_returned.host
if module_name == 'ansible':
assert data['facts'] == fact_returned.facts
else:
assert data['facts'][module_name] == fact_returned.facts
assert timestamp == fact_returned.timestamp
assert module_name == fact_returned.module
@pytest.mark.django_db
def test_process_fact_message_ansible(fact_msg_ansible):
receiver = FactCacheReceiver()
fact_returned = receiver.process_fact_message(fact_msg_ansible)
check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible')
@pytest.mark.django_db
def test_process_fact_message_packages(fact_msg_packages):
receiver = FactCacheReceiver()
fact_returned = receiver.process_fact_message(fact_msg_packages)
check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages')
@pytest.mark.django_db
def test_process_fact_message_services(fact_msg_services):
receiver = FactCacheReceiver()
fact_returned = receiver.process_fact_message(fact_msg_services)
check_process_fact_message_module(fact_returned, fact_msg_services, 'services')
'''
We pickypack our fact sending onto the Ansible fact interface.
The interface is <hostname, facts>. Where facts is a json blob of all the facts.
This makes it hard to decipher what facts are new/changed.
Because of this, we handle the same fact module data being sent multiple times
and just keep the newest version.
'''
@pytest.mark.django_db
def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible):
#epoch = timezone.now()
epoch = datetime.fromtimestamp(fact_msg_ansible['date_key'])
fact_scans(fact_scans=1, timestamp_epoch=epoch)
key = 'ansible.overwrite'
value = 'hello world'
receiver = FactCacheReceiver()
receiver.process_fact_message(fact_msg_ansible)
fact_msg_ansible['facts'][key] = value
fact_returned = receiver.process_fact_message(fact_msg_ansible)
fact_obj = Fact.objects.get(id=fact_returned.id)
assert key in fact_obj.facts
assert json.loads(fact_obj.facts) == fact_msg_ansible['facts']
assert value == json.loads(fact_obj.facts)[key]
# Ensure that the message flows from the socket through to process_fact_message()
@pytest.mark.django_db
def test_run_receiver(mocker, fact_msg_ansible):
mocker.patch("awx.main.socket.Socket.listen", return_value=[fact_msg_ansible])
receiver = FactCacheReceiver()
mocker.patch.object(receiver, 'process_fact_message', return_value=None)
receiver.run_receiver(use_processing_threads=False)
receiver.process_fact_message.assert_called_once_with(fact_msg_ansible)

View File

@ -1,8 +1,23 @@
import pytest
# Python
import pytest
import mock
import json
import os
from datetime import timedelta
# Django
from django.core.urlresolvers import resolve
from django.utils.six.moves.urllib.parse import urlparse
from django.utils import timezone
from django.contrib.auth.models import User
from django.conf import settings
# AWX
from awx.main.models.projects import Project
from awx.main.models.base import PERM_INVENTORY_READ
from awx.main.models.ha import Instance
from awx.main.models.fact import Fact
from rest_framework.test import (
APIRequestFactory,
@ -10,20 +25,34 @@ from rest_framework.test import (
)
from awx.main.models.credential import Credential
from awx.main.models.projects import Project
from awx.main.models.jobs import JobTemplate
from awx.main.models.ha import Instance
from awx.main.models.inventory import (
Inventory,
Group,
)
from awx.main.models.organization import (
Organization,
Team,
Permission,
)
from awx.main.models.rbac import Role
'''
Disable all django model signals.
'''
@pytest.fixture(scope="session", autouse=False)
def disable_signals():
mocked = mock.patch('django.dispatch.Signal.send', autospec=True)
mocked.start()
'''
FIXME: Not sure how "far" just setting the BROKER_URL will get us.
We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py)
Allows django signal code to execute without the need for redis
'''
@pytest.fixture(scope="session", autouse=True)
def celery_memory_broker():
settings.BROKER_URL='memory://localhost/'
@pytest.fixture
def user():
@ -60,11 +89,15 @@ def deploy_jobtemplate(project, inventory, credential):
@pytest.fixture
def team(organization):
return Team.objects.create(organization=organization, name='test-team')
return organization.teams.create(name='test-team')
@pytest.fixture
def project(organization):
prj = Project.objects.create(name="test-project", description="test-project-desc")
@mock.patch.object(Project, "update", lambda self, **kwargs: None)
def project(instance, organization):
prj = Project.objects.create(name="test-proj",
description="test-proj-desc",
scm_type="git",
scm_url="https://github.com/jlaska/ansible-playbooks")
prj.organizations.add(organization)
return prj
@ -87,7 +120,7 @@ def credential():
@pytest.fixture
def inventory(organization):
return Inventory.objects.create(name="test-inventory", organization=organization)
return organization.inventories.create(name="test-inv")
@pytest.fixture
def role():
@ -105,12 +138,38 @@ def alice(user):
def bob(user):
return user('bob', False)
@pytest.fixture
def organizations(instance):
def rf(organization_count=1):
orgs = []
for i in xrange(0, organization_count):
o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc")
orgs.append(o)
return orgs
return rf
@pytest.fixture
def group(inventory):
def g(name):
return Group.objects.create(inventory=inventory, name=name)
return g
@pytest.fixture
def hosts(group):
def rf(host_count=1):
hosts = []
for i in xrange(0, host_count):
name = '%s-host-%s' % (group.name, i)
(host, created) = group.inventory.hosts.get_or_create(name=name)
if created:
group.hosts.add(host)
hosts.append(host)
return hosts
return rf
@pytest.fixture
def permissions():
return {
@ -244,7 +303,46 @@ def options():
return response
return rf
@pytest.fixture(scope="session", autouse=True)
def celery_memory_broker():
from django.conf import settings
settings.BROKER_URL='memory://localhost/'
@pytest.fixture
def fact_scans(group, fact_ansible_json, fact_packages_json, fact_services_json):
def rf(fact_scans=1, timestamp_epoch=timezone.now()):
facts_json = {}
facts = []
module_names = ['ansible', 'services', 'packages']
timestamp_current = timestamp_epoch
facts_json['ansible'] = fact_ansible_json
facts_json['packages'] = fact_packages_json
facts_json['services'] = fact_services_json
for i in xrange(0, fact_scans):
for host in group.hosts.all():
for module_name in module_names:
facts.append(Fact.objects.create(host=host, timestamp=timestamp_current, module=module_name, facts=facts_json[module_name]))
timestamp_current += timedelta(days=1)
return facts
return rf
def _fact_json(module_name):
current_dir = os.path.dirname(os.path.realpath(__file__))
with open('%s/%s.json' % (current_dir, module_name)) as f:
return json.load(f)
@pytest.fixture
def fact_ansible_json():
return _fact_json('ansible')
@pytest.fixture
def fact_packages_json():
return _fact_json('packages')
@pytest.fixture
def fact_services_json():
return _fact_json('services')
@pytest.fixture
def permission_inv_read(organization, inventory, team):
return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ)

View File

@ -0,0 +1,111 @@
import pytest
from datetime import timedelta
from django.utils import timezone
from awx.main.models import Fact
@pytest.mark.django_db
def test_newest_scan_exact(hosts, fact_scans):
epoch = timezone.now()
hosts = hosts(host_count=2)
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
fact_known = None
for f in facts:
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch:
fact_known = f
break
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', epoch)
assert fact_found == fact_known
'''
Show me the most recent state of the sytem at any point of time.
or, said differently
For any timestamp, get the first scan that is <= the timestamp.
'''
'''
Ensure most recent scan run is the scan returned.
Query by future date.
'''
@pytest.mark.django_db
def test_newest_scan_less_than(hosts, fact_scans):
epoch = timezone.now()
timestamp_future = epoch + timedelta(days=10)
hosts = hosts(host_count=2)
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
fact_known = None
for f in facts:
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=2):
fact_known = f
break
assert fact_known is not None
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_future)
assert fact_found == fact_known
'''
Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp.
'''
@pytest.mark.django_db
def test_query_middle_of_timeline(hosts, fact_scans):
epoch = timezone.now()
timestamp_middle = epoch + timedelta(days=1, hours=3)
hosts = hosts(host_count=2)
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
fact_known = None
for f in facts:
if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=1):
fact_known = f
break
assert fact_known is not None
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_middle)
assert fact_found == fact_known
'''
Query time less than any fact scan. Should return None
'''
@pytest.mark.django_db
def test_query_result_empty(hosts, fact_scans):
epoch = timezone.now()
timestamp_less = epoch - timedelta(days=1)
hosts = hosts(host_count=2)
fact_scans(fact_scans=3, timestamp_epoch=epoch)
fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_less)
assert fact_found is None
'''
Query by fact module other than 'ansible'
'''
@pytest.mark.django_db
def test_by_module(hosts, fact_scans):
epoch = timezone.now()
hosts = hosts(host_count=2)
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
fact_known_services = None
fact_known_packages = None
for f in facts:
if f.host_id == hosts[0].id:
if f.module == 'services' and f.timestamp == epoch:
fact_known_services = f
elif f.module == 'packages' and f.timestamp == epoch:
fact_known_packages = f
assert fact_known_services is not None
assert fact_known_packages is not None
fact_found_services = Fact.get_host_fact(hosts[0].id, 'services', epoch)
fact_found_packages = Fact.get_host_fact(hosts[0].id, 'packages', epoch)
assert fact_found_services == fact_known_services
assert fact_found_packages == fact_known_packages

View File

@ -0,0 +1,129 @@
import pytest
from datetime import timedelta
from django.utils import timezone
from awx.main.models import Fact
def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now(), module_name='ansible', ts_known=None):
hosts = hosts(host_count=2)
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
facts_known = []
for f in facts:
if f.host.id == hosts[0].id:
if module_name and f.module != module_name:
continue
if ts_known and f.timestamp != ts_known:
continue
facts_known.append(f)
fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to)
return (facts_known, fact_objs)
@pytest.mark.django_db
def test_all(hosts, fact_scans):
epoch = timezone.now()
ts_from = epoch - timedelta(days=1)
ts_to = epoch + timedelta(days=10)
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch)
assert 9 == len(facts_known)
assert 9 == len(fact_objs)
@pytest.mark.django_db
def test_all_ansible(hosts, fact_scans):
epoch = timezone.now()
ts_from = epoch - timedelta(days=1)
ts_to = epoch + timedelta(days=10)
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
assert 3 == len(facts_known)
assert 3 == len(fact_objs)
for i in xrange(len(facts_known) - 1, 0):
assert facts_known[i].id == fact_objs[i].id
@pytest.mark.django_db
def test_empty_db(hosts, fact_scans):
hosts = hosts(host_count=2)
epoch = timezone.now()
ts_from = epoch - timedelta(days=1)
ts_to = epoch + timedelta(days=10)
fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to)
assert 0 == len(fact_objs)
@pytest.mark.django_db
def test_no_results(hosts, fact_scans):
epoch = timezone.now()
ts_from = epoch - timedelta(days=100)
ts_to = epoch - timedelta(days=50)
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
assert 0 == len(fact_objs)
@pytest.mark.django_db
def test_exact_same_equal(hosts, fact_scans):
epoch = timezone.now()
ts_to = ts_from = epoch + timedelta(days=1)
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch)
assert 1 == len(facts_known)
assert 1 == len(fact_objs)
assert facts_known[0].id == fact_objs[0].id
@pytest.mark.django_db
def test_exact_from_exclusive_to_inclusive(hosts, fact_scans):
epoch = timezone.now()
ts_from = epoch + timedelta(days=1)
ts_to = epoch + timedelta(days=2)
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch)
assert 1 == len(facts_known)
assert 1 == len(fact_objs)
assert facts_known[0].id == fact_objs[0].id
@pytest.mark.django_db
def test_to_lte(hosts, fact_scans):
epoch = timezone.now()
ts_to = epoch + timedelta(days=1)
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=ts_to, epoch=epoch)
facts_known_subset = filter(lambda x: x.timestamp <= ts_to, facts_known)
assert 2 == len(facts_known_subset)
assert 2 == len(fact_objs)
for i in xrange(0, len(fact_objs)):
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
@pytest.mark.django_db
def test_from_gt(hosts, fact_scans):
epoch = timezone.now()
ts_from = epoch
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=ts_from, ts_to=None, epoch=epoch)
facts_known_subset = filter(lambda x: x.timestamp > ts_from, facts_known)
assert 2 == len(facts_known_subset)
assert 2 == len(fact_objs)
for i in xrange(0, len(fact_objs)):
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
@pytest.mark.django_db
def test_no_ts(hosts, fact_scans):
epoch = timezone.now()
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=epoch)
assert 3 == len(facts_known)
assert 3 == len(fact_objs)
for i in xrange(len(facts_known) - 1, 0):
assert facts_known[i].id == fact_objs[i].id

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,697 @@
[
{
"source": "sysv",
"state": "running",
"name": "iprdump"
},
{
"source": "sysv",
"state": "running",
"name": "iprinit"
},
{
"source": "sysv",
"state": "running",
"name": "iprupdate"
},
{
"source": "sysv",
"state": "stopped",
"name": "netconsole"
},
{
"source": "sysv",
"state": "running",
"name": "network"
},
{
"source": "systemd",
"state": "stopped",
"name": "arp-ethers.service"
},
{
"source": "systemd",
"state": "running",
"name": "auditd.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "autovt@.service"
},
{
"source": "systemd",
"state": "running",
"name": "avahi-daemon.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "blk-availability.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "brandbot.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "console-getty.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "console-shell.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "cpupower.service"
},
{
"source": "systemd",
"state": "running",
"name": "crond.service"
},
{
"source": "systemd",
"state": "running",
"name": "dbus-org.fedoraproject.FirewallD1.service"
},
{
"source": "systemd",
"state": "running",
"name": "dbus-org.freedesktop.Avahi.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dbus-org.freedesktop.hostname1.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dbus-org.freedesktop.locale1.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dbus-org.freedesktop.login1.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dbus-org.freedesktop.machine1.service"
},
{
"source": "systemd",
"state": "running",
"name": "dbus-org.freedesktop.NetworkManager.service"
},
{
"source": "systemd",
"state": "running",
"name": "dbus-org.freedesktop.nm-dispatcher.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dbus-org.freedesktop.timedate1.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dbus.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "debug-shell.service"
},
{
"source": "systemd",
"state": "running",
"name": "dhcpd.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dhcpd6.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dhcrelay.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dm-event.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dnsmasq.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-cmdline.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-initqueue.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-mount.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-pre-mount.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-pre-pivot.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-pre-trigger.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-pre-udev.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "dracut-shutdown.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "ebtables.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "emergency.service"
},
{
"source": "systemd",
"state": "running",
"name": "firewalld.service"
},
{
"source": "systemd",
"state": "running",
"name": "getty@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "halt-local.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "initrd-cleanup.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "initrd-parse-etc.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "initrd-switch-root.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "initrd-udevadm-cleanup-db.service"
},
{
"source": "systemd",
"state": "running",
"name": "irqbalance.service"
},
{
"source": "systemd",
"state": "running",
"name": "kdump.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "kmod-static-nodes.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "lvm2-lvmetad.service"
},
{
"source": "systemd",
"state": "running",
"name": "lvm2-monitor.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "lvm2-pvscan@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "messagebus.service"
},
{
"source": "systemd",
"state": "running",
"name": "microcode.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "named-setup-rndc.service"
},
{
"source": "systemd",
"state": "running",
"name": "named.service"
},
{
"source": "systemd",
"state": "running",
"name": "NetworkManager-dispatcher.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "NetworkManager-wait-online.service"
},
{
"source": "systemd",
"state": "running",
"name": "NetworkManager.service"
},
{
"source": "systemd",
"state": "running",
"name": "ntpd.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "ntpdate.service"
},
{
"source": "systemd",
"state": "running",
"name": "openvpn@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-halt.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-kexec.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-poweroff.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-quit-wait.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-quit.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-read-write.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-reboot.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-start.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "plymouth-switch-root.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "polkit.service"
},
{
"source": "systemd",
"state": "running",
"name": "postfix.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "quotaon.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rc-local.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rdisc.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rescue.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-autorelabel-mark.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-autorelabel.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-configure.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-dmesg.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-domainname.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-import-state.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-loadmodules.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "rhel-readonly.service"
},
{
"source": "systemd",
"state": "running",
"name": "rsyslog.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "serial-getty@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "sshd-keygen.service"
},
{
"source": "systemd",
"state": "running",
"name": "sshd.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "sshd@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-ask-password-console.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-ask-password-plymouth.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-ask-password-wall.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-backlight@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-binfmt.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-fsck-root.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-fsck@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-halt.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-hibernate.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-hostnamed.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-hybrid-sleep.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-initctl.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-journal-flush.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-journald.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-kexec.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-localed.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-logind.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-machined.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-modules-load.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-nspawn@.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-poweroff.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-quotacheck.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-random-seed.service"
},
{
"source": "systemd",
"state": "running",
"name": "systemd-readahead-collect.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-readahead-done.service"
},
{
"source": "systemd",
"state": "running",
"name": "systemd-readahead-drop.service"
},
{
"source": "systemd",
"state": "running",
"name": "systemd-readahead-replay.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-reboot.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-remount-fs.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-shutdownd.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-suspend.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-sysctl.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-timedated.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-tmpfiles-clean.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-tmpfiles-setup-dev.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-tmpfiles-setup.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-udev-settle.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-udev-trigger.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-udevd.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-update-utmp-runlevel.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-update-utmp.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-user-sessions.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "systemd-vconsole-setup.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "teamd@.service"
},
{
"source": "systemd",
"state": "running",
"name": "tuned.service"
},
{
"source": "systemd",
"state": "running",
"name": "vmtoolsd.service"
},
{
"source": "systemd",
"state": "stopped",
"name": "wpa_supplicant.service"
}
]

View File

@ -0,0 +1,115 @@
import mock
import pytest
from awx.main.models.notifications import Notifier
from awx.main.models.inventory import Inventory, Group
from awx.main.models.jobs import JobTemplate
from django.core.urlresolvers import reverse
@pytest.fixture
def notifier():
return Notifier.objects.create(name="test-notification",
notification_type="webhook",
notification_configuration=dict(url="http://localhost",
headers={"Test": "Header"}))
@pytest.mark.django_db
def test_get_notifier_list(get, user, notifier):
url = reverse('api:notifier_list')
response = get(url, user('admin', True))
assert response.status_code == 200
assert len(response.data['results']) == 1
@pytest.mark.django_db
def test_basic_parameterization(get, post, user, organization):
u = user('admin-poster', True)
url = reverse('api:notifier_list')
response = post(url,
dict(name="test-webhook",
description="test webhook",
organization=1,
notification_type="webhook",
notification_configuration=dict(url="http://localhost",
headers={"Test": "Header"})),
u)
assert response.status_code == 201
url = reverse('api:notifier_detail', args=(response.data['id'],))
response = get(url, u)
assert 'related' in response.data
assert 'organization' in response.data['related']
assert 'summary_fields' in response.data
assert 'organization' in response.data['summary_fields']
assert 'notifications' in response.data['related']
assert 'notification_configuration' in response.data
assert 'url' in response.data['notification_configuration']
assert 'headers' in response.data['notification_configuration']
@pytest.mark.django_db
def test_encrypted_subfields(get, post, user, organization):
def assert_send(self, messages):
assert self.account_token == "shouldhide"
return 1
u = user('admin-poster', True)
url = reverse('api:notifier_list')
response = post(url,
dict(name="test-twilio",
description="test twilio",
organization=organization.id,
notification_type="twilio",
notification_configuration=dict(account_sid="dummy",
account_token="shouldhide",
from_number="+19999999999",
to_numbers=["9998887777"])),
u)
assert response.status_code == 201
notifier_actual = Notifier.objects.get(id=response.data['id'])
url = reverse('api:notifier_detail', args=(response.data['id'],))
response = get(url, u)
assert response.data['notification_configuration']['account_token'] == "$encrypted$"
with mock.patch.object(notifier_actual.notification_class, "send_messages", assert_send):
notifier_actual.send("Test", {'body': "Test"})
@pytest.mark.django_db
def test_inherited_notifiers(get, post, user, organization, project):
u = user('admin-poster', True)
url = reverse('api:notifier_list')
notifiers = []
for nfiers in xrange(3):
response = post(url,
dict(name="test-webhook-{}".format(nfiers),
description="test webhook {}".format(nfiers),
organization=1,
notification_type="webhook",
notification_configuration=dict(url="http://localhost",
headers={"Test": "Header"})),
u)
assert response.status_code == 201
notifiers.append(response.data['id'])
organization.projects.add(project)
i = Inventory.objects.create(name='test', organization=organization)
i.save()
g = Group.objects.create(name='test', inventory=i)
g.save()
jt = JobTemplate.objects.create(name='test', inventory=i, project=project, playbook='debug.yml')
jt.save()
url = reverse('api:organization_notifiers_any_list', args=(organization.id,))
response = post(url, dict(id=notifiers[0]), u)
assert response.status_code == 204
url = reverse('api:project_notifiers_any_list', args=(project.id,))
response = post(url, dict(id=notifiers[1]), u)
assert response.status_code == 204
url = reverse('api:job_template_notifiers_any_list', args=(jt.id,))
response = post(url, dict(id=notifiers[2]), u)
assert response.status_code == 204
assert len(jt.notifiers['any']) == 3
assert len(project.notifiers['any']) == 2
assert len(g.inventory_source.notifiers['any']) == 1
@pytest.mark.django_db
def test_notifier_merging(get, post, user, organization, project, notifier):
user('admin-poster', True)
organization.projects.add(project)
organization.notifiers_any.add(notifier)
project.notifiers_any.add(notifier)
assert len(project.notifiers['any']) == 1

View File

@ -128,8 +128,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest):
self.assertFalse(ad_hoc_command.passwords_needed_to_start)
self.assertTrue(ad_hoc_command.signal_start())
ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk)
self.check_job_result(ad_hoc_command, 'failed')
self.check_ad_hoc_command_events(ad_hoc_command, 'unreachable')
self.check_job_result(ad_hoc_command, 'successful')
self.check_ad_hoc_command_events(ad_hoc_command, 'skipped')
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('canceled', 0))
def test_cancel_ad_hoc_command(self, ignore):

File diff suppressed because one or more lines are too long

View File

@ -1,242 +0,0 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
# Python
import unittest2 as unittest
# Django
from django.core.urlresolvers import reverse
# AWX
from awx.main.utils import timestamp_apiformat
from awx.main.models import * # noqa
from awx.main.tests.base import BaseLiveServerTest
from awx.fact.models import * # noqa
from awx.fact.tests.base import BaseFactTestMixin, FactScanBuilder, TEST_FACT_ANSIBLE, TEST_FACT_PACKAGES, TEST_FACT_SERVICES
from awx.main.utils import build_url
__all__ = ['FactVersionApiTest', 'FactViewApiTest', 'SingleFactApiTest',]
class FactApiBaseTest(BaseLiveServerTest, BaseFactTestMixin):
def setUp(self):
super(FactApiBaseTest, self).setUp()
self.create_test_license_file()
self.setup_instances()
self.setup_users()
self.organization = self.make_organization(self.super_django_user)
self.organization.admins.add(self.normal_django_user)
self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory')
self.host = self.inventory.hosts.create(name='host.example.com')
self.host2 = self.inventory.hosts.create(name='host2.example.com')
self.host3 = self.inventory.hosts.create(name='host3.example.com')
def setup_facts(self, scan_count):
self.builder = FactScanBuilder()
self.builder.set_inventory_id(self.inventory.pk)
self.builder.add_fact('ansible', TEST_FACT_ANSIBLE)
self.builder.add_fact('packages', TEST_FACT_PACKAGES)
self.builder.add_fact('services', TEST_FACT_SERVICES)
self.builder.add_hostname('host.example.com')
self.builder.add_hostname('host2.example.com')
self.builder.add_hostname('host3.example.com')
self.builder.build(scan_count=scan_count, host_count=3)
self.fact_host = FactHost.objects.get(hostname=self.host.name)
class FactVersionApiTest(FactApiBaseTest):
def check_equal(self, fact_versions, results):
def find(element, set1):
for e in set1:
if all([ e.get(field) == element.get(field) for field in element.keys()]):
return e
return None
self.assertEqual(len(results), len(fact_versions))
for v in fact_versions:
v_dict = {
'timestamp': timestamp_apiformat(v.timestamp),
'module': v.module
}
e = find(v_dict, results)
self.assertIsNotNone(e, "%s not found in %s" % (v_dict, results))
def get_list(self, fact_versions, params=None):
url = build_url('api:host_fact_versions_list', args=(self.host.pk,), get=params)
with self.current_user(self.super_django_user):
response = self.get(url, expect=200)
self.check_equal(fact_versions, response['results'])
return response
def test_permission_list(self):
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
with self.current_user('admin'):
self.get(url, expect=200)
with self.current_user('normal'):
self.get(url, expect=200)
with self.current_user('other'):
self.get(url, expect=403)
with self.current_user('nobody'):
self.get(url, expect=403)
with self.current_user(None):
self.get(url, expect=401)
def test_list_empty(self):
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
with self.current_user(self.super_django_user):
response = self.get(url, expect=200)
self.assertIn('results', response)
self.assertIsInstance(response['results'], list)
self.assertEqual(len(response['results']), 0)
def test_list_related_fact_view(self):
self.setup_facts(2)
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
with self.current_user(self.super_django_user):
response = self.get(url, expect=200)
for entry in response['results']:
self.assertIn('fact_view', entry['related'])
self.get(entry['related']['fact_view'], expect=200)
def test_list(self):
self.setup_facts(2)
self.get_list(FactVersion.objects.filter(host=self.fact_host))
def test_list_module(self):
self.setup_facts(10)
self.get_list(FactVersion.objects.filter(host=self.fact_host, module='packages'), dict(module='packages'))
def test_list_time_from(self):
self.setup_facts(10)
params = {
'from': timestamp_apiformat(self.builder.get_timestamp(1)),
}
# 'to': timestamp_apiformat(self.builder.get_timestamp(3))
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'])
self.get_list(fact_versions, params)
def test_list_time_to(self):
self.setup_facts(10)
params = {
'to': timestamp_apiformat(self.builder.get_timestamp(3))
}
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__lte=params['to'])
self.get_list(fact_versions, params)
def test_list_time_from_to(self):
self.setup_facts(10)
params = {
'from': timestamp_apiformat(self.builder.get_timestamp(1)),
'to': timestamp_apiformat(self.builder.get_timestamp(3))
}
fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'], timestamp__lte=params['to'])
self.get_list(fact_versions, params)
class FactViewApiTest(FactApiBaseTest):
def check_equal(self, fact_obj, results):
fact_dict = {
'timestamp': timestamp_apiformat(fact_obj.timestamp),
'module': fact_obj.module,
'host': {
'hostname': fact_obj.host.hostname,
'inventory_id': fact_obj.host.inventory_id,
'id': str(fact_obj.host.id)
},
'fact': fact_obj.fact
}
self.assertEqual(fact_dict, results)
def test_permission_view(self):
url = reverse('api:host_fact_compare_view', args=(self.host.pk,))
with self.current_user('admin'):
self.get(url, expect=200)
with self.current_user('normal'):
self.get(url, expect=200)
with self.current_user('other'):
self.get(url, expect=403)
with self.current_user('nobody'):
self.get(url, expect=403)
with self.current_user(None):
self.get(url, expect=401)
def get_fact(self, fact_obj, params=None):
url = build_url('api:host_fact_compare_view', args=(self.host.pk,), get=params)
with self.current_user(self.super_django_user):
response = self.get(url, expect=200)
self.check_equal(fact_obj, response)
def test_view(self):
self.setup_facts(2)
self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible').order_by('-timestamp')[0])
def test_view_module_filter(self):
self.setup_facts(2)
self.get_fact(Fact.objects.filter(host=self.fact_host, module='services').order_by('-timestamp')[0], dict(module='services'))
def test_view_time_filter(self):
self.setup_facts(6)
ts = self.builder.get_timestamp(3)
self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible', timestamp__lte=ts).order_by('-timestamp')[0],
dict(datetime=ts))
@unittest.skip("single fact query needs to be updated to use inventory_id attribute on host document")
class SingleFactApiTest(FactApiBaseTest):
def setUp(self):
super(SingleFactApiTest, self).setUp()
self.group = self.inventory.groups.create(name='test-group')
self.group.hosts.add(self.host, self.host2, self.host3)
def test_permission_list(self):
url = reverse('api:host_fact_versions_list', args=(self.host.pk,))
with self.current_user('admin'):
self.get(url, expect=200)
with self.current_user('normal'):
self.get(url, expect=200)
with self.current_user('other'):
self.get(url, expect=403)
with self.current_user('nobody'):
self.get(url, expect=403)
with self.current_user(None):
self.get(url, expect=401)
def _test_related(self, url):
with self.current_user(self.super_django_user):
response = self.get(url, expect=200)
self.assertTrue(len(response['results']) > 0)
for entry in response['results']:
self.assertIn('single_fact', entry['related'])
# Requires fields
self.get(entry['related']['single_fact'], expect=400)
def test_related_host_list(self):
self.setup_facts(2)
self._test_related(reverse('api:host_list'))
def test_related_group_list(self):
self.setup_facts(2)
self._test_related(reverse('api:group_list'))
def test_related_inventory_list(self):
self.setup_facts(2)
self._test_related(reverse('api:inventory_list'))
def test_params(self):
self.setup_facts(2)
params = {
'module': 'packages',
'fact_key': 'name',
'fact_value': 'acpid',
}
url = build_url('api:inventory_single_fact_view', args=(self.inventory.pk,), get=params)
with self.current_user(self.super_django_user):
response = self.get(url, expect=200)
self.assertEqual(len(response['results']), 3)
for entry in response['results']:
self.assertEqual(entry['fact'][0]['name'], 'acpid')

View File

@ -139,12 +139,13 @@ def get_encryption_key(instance, field_name):
h.update(field_name)
return h.digest()[:16]
def encrypt_field(instance, field_name, ask=False):
def encrypt_field(instance, field_name, ask=False, subfield=None):
'''
Return content of the given instance and field name encrypted.
'''
value = getattr(instance, field_name)
if isinstance(value, dict) and subfield is not None:
value = value[subfield]
if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'):
return value
value = smart_str(value)
@ -157,11 +158,13 @@ def encrypt_field(instance, field_name, ask=False):
return '$encrypted$%s$%s' % ('AES', b64data)
def decrypt_field(instance, field_name):
def decrypt_field(instance, field_name, subfield=None):
'''
Return content of the given instance and field name decrypted.
'''
value = getattr(instance, field_name)
if isinstance(value, dict) and subfield is not None:
value = value[subfield]
if not value or not value.startswith('$encrypted$'):
return value
algo, b64data = value[len('$encrypted$'):].split('$', 1)

View File

@ -2,10 +2,10 @@
# This file is a utility Ansible plugin that is not part of the AWX or Ansible
# packages. It does not import any code from either package, nor does its
# license apply to Ansible or AWX.
#
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
@ -90,8 +90,12 @@ CENSOR_FIELD_WHITELIST=[
'skip_reason',
]
def censor(obj):
if obj.get('_ansible_no_log', False):
def censor(obj, no_log=False):
if not isinstance(obj, dict):
if no_log:
return "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
return obj
if obj.get('_ansible_no_log', no_log):
new_obj = {}
for k in CENSOR_FIELD_WHITELIST:
if k in obj:
@ -104,8 +108,12 @@ def censor(obj):
new_obj['censored'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
obj = new_obj
if 'results' in obj:
for i in xrange(len(obj['results'])):
obj['results'][i] = censor(obj['results'][i])
if isinstance(obj['results'], list):
for i in xrange(len(obj['results'])):
obj['results'][i] = censor(obj['results'][i], obj.get('_ansible_no_log', no_log))
elif obj.get('_ansible_no_log', False):
obj['results'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
return obj
class TokenAuth(requests.auth.AuthBase):
@ -460,7 +468,7 @@ class JobCallbackModule(BaseCallbackModule):
# this from a normal task
self._log_event('playbook_on_task_start', task=task,
name=task.get_name())
def playbook_on_vars_prompt(self, varname, private=True, prompt=None,
encrypt=None, confirm=False, salt_size=None,
salt=None, default=None):
@ -529,6 +537,7 @@ class AdHocCommandCallbackModule(BaseCallbackModule):
def __init__(self):
self.ad_hoc_command_id = int(os.getenv('AD_HOC_COMMAND_ID', '0'))
self.rest_api_path = '/api/v1/ad_hoc_commands/%d/events/' % self.ad_hoc_command_id
self.skipped_hosts = set()
super(AdHocCommandCallbackModule, self).__init__()
def _log_event(self, event, **event_data):
@ -539,6 +548,18 @@ class AdHocCommandCallbackModule(BaseCallbackModule):
def runner_on_file_diff(self, host, diff):
pass # Ignore file diff for ad hoc commands.
def runner_on_ok(self, host, res):
# When running in check mode using a module that does not support check
# mode, Ansible v1.9 will call runner_on_skipped followed by
# runner_on_ok for the same host; only capture the skipped event and
# ignore the ok event.
if host not in self.skipped_hosts:
super(AdHocCommandCallbackModule, self).runner_on_ok(host, res)
def runner_on_skipped(self, host, item=None):
super(AdHocCommandCallbackModule, self).runner_on_skipped(host, item)
self.skipped_hosts.add(host)
if os.getenv('JOB_ID', ''):
CallbackModule = JobCallbackModule

View File

@ -342,6 +342,10 @@ CELERYBEAT_SCHEDULE = {
'task': 'awx.main.tasks.tower_periodic_scheduler',
'schedule': timedelta(seconds=30)
},
'admin_checks': {
'task': 'awx.main.tasks.run_administrative_checks',
'schedule': timedelta(days=30)
},
}
# Social Auth configuration.
@ -677,6 +681,10 @@ FACT_CACHE_PORT = 6564
ORG_ADMINS_CAN_SEE_ALL_USERS = True
TOWER_ADMIN_ALERTS = True
TOWER_URL_BASE = "https://towerhost"
TOWER_SETTINGS_MANIFEST = {
"SCHEDULE_MAX_JOBS": {
"name": "Maximum Scheduled Jobs",
@ -804,6 +812,20 @@ TOWER_SETTINGS_MANIFEST = {
"type": "bool",
"category": "system",
},
"TOWER_ADMIN_ALERTS": {
"name": "Enable Tower Administrator Alerts",
"description": "Allow Tower to email Admin users for system events that may require attention",
"default": TOWER_ADMIN_ALERTS,
"type": "bool",
"category": "system",
},
"TOWER_URL_BASE": {
"name": "Base URL of the Tower host",
"description": "This is used by services like Notifications to render a valid url to the Tower host",
"default": TOWER_URL_BASE,
"type": "string",
"category": "system",
},
"LICENSE": {
"name": "Tower License",
"description": "Controls what features and functionality is enabled in Tower.",

View File

@ -13,7 +13,6 @@ from split_settings.tools import optional, include
# Load default settings.
from defaults import * # NOQA
MONGO_HOST = '127.0.0.1'
MONGO_PORT = 27017
MONGO_USERNAME = None
@ -66,10 +65,13 @@ PASSWORD_HASHERS = (
# Configure a default UUID for development only.
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
STATSD_CLIENT = 'django_statsd.clients.normal'
STATSD_HOST = 'graphite'
STATSD_CLIENT = 'django_statsd.clients.null'
STATSD_HOST = None
STATSD_PREFIX = None
#STATSD_CLIENT = 'django_statsd.clients.normal'
#STATSD_HOST = 'graphite'
STATSD_PORT = 8125
STATSD_PREFIX = 'tower'
#STATSD_PREFIX = 'tower'
STATSD_MAXUDPSIZE = 512
# If there is an `/etc/tower/settings.py`, include it.

View File

@ -172,7 +172,7 @@ var tower = angular.module('Tower', [
'SchedulesHelper',
'JobsListDefinition',
'LogViewerStatusDefinition',
'LogViewerHelper',
'StandardOutHelper',
'LogViewerOptionsDefinition',
'EventViewerHelper',
'HostEventsViewerHelper',
@ -200,9 +200,9 @@ var tower = angular.module('Tower', [
.config(['$pendolyticsProvider', function($pendolyticsProvider) {
$pendolyticsProvider.doNotAutoStart();
}])
.config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider',
function ($stateProvider, $urlRouterProvider, $breadcrumbProvider) {
.config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider', '$urlMatcherFactoryProvider',
function ($stateProvider, $urlRouterProvider, $breadcrumbProvider, $urlMatcherFactoryProvider) {
$urlMatcherFactoryProvider.strictMode(false)
$breadcrumbProvider.setOptions({
templateUrl: urlPrefix + 'partials/breadcrumb.html'
});

View File

@ -149,7 +149,7 @@ Home.$inject = ['$scope', '$compile', '$stateParams', '$rootScope', '$location',
* @description This controls the 'home/groups' page that is loaded from the dashboard
*
*/
export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $location, $stateParams, LogViewer, HomeGroupList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope,
export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $location, $stateParams, HomeGroupList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope,
GetBasePath, SearchInit, PaginateInit, FormatDate, GetHostsStatusMsg, GetSyncStatusMsg, ViewUpdateStatus, GroupsEdit, Wait,
Alert, Rest, Empty, InventoryUpdate, Find, GroupsCancelUpdate, Store) {
@ -461,58 +461,6 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio
attachElem(event, html, title);
});
if (scope.removeGroupSummaryReady) {
scope.removeGroupSummaryReady();
}
scope.removeGroupSummaryReady = scope.$on('GroupSummaryReady', function(e, event, inventory, data) {
var html, title;
Wait('stop');
// Build the html for our popover
html = "<table class=\"table table-condensed flyout\" style=\"width: 100%\">\n";
html += "<thead>\n";
html += "<tr>";
html += "<th>Status</th>";
html += "<th>Last Sync</th>";
html += "<th>Group</th>";
html += "</tr>";
html += "</thead>\n";
html += "<tbody>\n";
data.results.forEach( function(row) {
html += "<tr>";
html += "<td><a href=\"\" ng-click=\"viewJob('" + row.related.last_update + "')\" aw-tool-tip=\"" + row.status.charAt(0).toUpperCase() + row.status.slice(1) + ". Click for details\" aw-tip-placement=\"top\"><i class=\"fa icon-job-" + row.status + "\"></i></a></td>";
html += "<td>" + ($filter('longDate')(row.last_updated)).replace(/ /,'<br />') + "</td>";
html += "<td><a href=\"\" ng-click=\"viewJob('" + row.related.last_update + "')\">" + ellipsis(row.summary_fields.group.name) + "</a></td>";
html += "</tr>\n";
});
html += "</tbody>\n";
html += "</table>\n";
title = "Sync Status";
attachElem(event, html, title);
});
scope.showGroupSummary = function(event, id) {
var group, status;
if (!Empty(id)) {
group = Find({ list: scope.home_groups, key: 'id', val: id });
status = group.summary_fields.inventory_source.status;
if (status === 'running' || status === 'failed' || status === 'error' || status === 'successful') {
Wait('start');
Rest.setUrl(group.related.inventory_sources + '?or__source=ec2&or__source=rax&order_by=-last_job_run&page_size=5');
Rest.get()
.success(function(data) {
scope.$emit('GroupSummaryReady', event, group, data);
})
.error(function(data, status) {
ProcessErrors( scope, data, status, null, { hdr: 'Error!',
msg: 'Call to ' + group.related.inventory_sources + ' failed. GET returned status: ' + status
});
});
}
}
};
scope.showHostSummary = function(event, id) {
var url, jobs = [];
if (!Empty(id)) {
@ -549,13 +497,6 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio
}
};
scope.viewJob = function(url) {
LogViewer({
scope: modal_scope,
url: url
});
};
scope.cancelUpdate = function(id) {
var group = Find({ list: scope.home_groups, key: 'id', val: id });
GroupsCancelUpdate({ scope: scope, group: group });
@ -564,7 +505,7 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio
}
HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$location', '$stateParams', 'LogViewer', 'HomeGroupList', 'generateList', 'ProcessErrors', 'ReturnToCaller',
HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$location', '$stateParams', 'HomeGroupList', 'generateList', 'ProcessErrors', 'ReturnToCaller',
'ClearScope', 'GetBasePath', 'SearchInit', 'PaginateInit', 'FormatDate', 'GetHostsStatusMsg', 'GetSyncStatusMsg', 'ViewUpdateStatus',
'GroupsEdit', 'Wait', 'Alert', 'Rest', 'Empty', 'InventoryUpdate', 'Find', 'GroupsCancelUpdate', 'Store', 'Socket'
];
@ -578,7 +519,7 @@ HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$l
*/
export function HomeHosts($scope, $location, $stateParams, HomeHostList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope,
GetBasePath, SearchInit, PaginateInit, FormatDate, SetStatus, ToggleHostEnabled, HostsEdit, Find, ShowJobSummary, ViewJob) {
GetBasePath, SearchInit, PaginateInit, FormatDate, SetStatus, ToggleHostEnabled, HostsEdit, Find, ShowJobSummary) {
ClearScope('htmlTemplate'); //Garbage collection. Don't leave behind any listeners/watchers from the prior
//scope.
@ -647,10 +588,6 @@ export function HomeHosts($scope, $location, $stateParams, HomeHostList, Generat
$scope.search(list.iterator);
};
$scope.viewJob = function(id) {
ViewJob({ scope: $scope, id: id });
};
$scope.toggleHostEnabled = function (id, sources) {
ToggleHostEnabled({
host_id: id,
@ -687,5 +624,5 @@ export function HomeHosts($scope, $location, $stateParams, HomeHostList, Generat
HomeHosts.$inject = ['$scope', '$location', '$stateParams', 'HomeHostList', 'generateList', 'ProcessErrors', 'ReturnToCaller',
'ClearScope', 'GetBasePath', 'SearchInit', 'PaginateInit', 'FormatDate', 'SetStatus', 'ToggleHostEnabled', 'HostsEdit',
'Find', 'ShowJobSummary', 'ViewJob'
'Find', 'ShowJobSummary'
];

View File

@ -16,7 +16,7 @@ export function InventoriesList($scope, $rootScope, $location, $log,
$stateParams, $compile, $filter, sanitizeFilter, Rest, Alert, InventoryList,
generateList, Prompt, SearchInit, PaginateInit, ReturnToCaller,
ClearScope, ProcessErrors, GetBasePath, Wait,
EditInventoryProperties, Find, Empty, LogViewer, $state) {
EditInventoryProperties, Find, Empty, $state) {
var list = InventoryList,
defaultUrl = GetBasePath('inventory'),
@ -295,10 +295,12 @@ export function InventoriesList($scope, $rootScope, $location, $log,
};
$scope.viewJob = function(url) {
LogViewer({
scope: $scope,
url: url
});
// Pull the id out of the URL
var id = url.replace(/^\//, '').split('/')[3];
$state.go('inventorySyncStdout', {id: id});
};
$scope.editInventoryProperties = function (inventory_id) {
@ -364,7 +366,7 @@ export function InventoriesList($scope, $rootScope, $location, $log,
InventoriesList.$inject = ['$scope', '$rootScope', '$location', '$log', '$stateParams', '$compile', '$filter', 'sanitizeFilter', 'Rest', 'Alert', 'InventoryList', 'generateList',
'Prompt', 'SearchInit', 'PaginateInit', 'ReturnToCaller', 'ClearScope', 'ProcessErrors',
'GetBasePath', 'Wait', 'EditInventoryProperties', 'Find', 'Empty', 'LogViewer', '$state'
'GetBasePath', 'Wait', 'EditInventoryProperties', 'Find', 'Empty', '$state'
];
@ -781,7 +783,7 @@ export function InventoriesManage ($log, $scope, $rootScope, $location,
GetHostsStatusMsg, GroupsEdit, InventoryUpdate, GroupsCancelUpdate,
ViewUpdateStatus, GroupsDelete, Store, HostsEdit, HostsDelete,
EditInventoryProperties, ToggleHostEnabled, ShowJobSummary,
InventoryGroupsHelp, HelpDialog, ViewJob,
InventoryGroupsHelp, HelpDialog,
GroupsCopy, HostsCopy, $stateParams) {
var PreviousSearchParams,
@ -1254,12 +1256,8 @@ export function InventoriesManage ($log, $scope, $rootScope, $location,
opts.autoShow = params.autoShow || false;
}
HelpDialog(opts);
};
$scope.viewJob = function(id) {
ViewJob({ scope: $scope, id: id });
};
}
;
$scope.showHosts = function (group_id, show_failures) {
// Clicked on group
if (group_id !== null) {
@ -1293,6 +1291,6 @@ InventoriesManage.$inject = ['$log', '$scope', '$rootScope', '$location',
'GroupsEdit', 'InventoryUpdate', 'GroupsCancelUpdate', 'ViewUpdateStatus',
'GroupsDelete', 'Store', 'HostsEdit', 'HostsDelete',
'EditInventoryProperties', 'ToggleHostEnabled', 'ShowJobSummary',
'InventoryGroupsHelp', 'HelpDialog', 'ViewJob', 'GroupsCopy',
'InventoryGroupsHelp', 'HelpDialog', 'GroupsCopy',
'HostsCopy', '$stateParams'
];

View File

@ -15,7 +15,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams,
Rest, Alert, ProjectList, GenerateList, Prompt, SearchInit,
PaginateInit, ReturnToCaller, ClearScope, ProcessErrors, GetBasePath,
SelectionInit, ProjectUpdate, Refresh, Wait, GetChoices, Empty,
Find, LogViewer, GetProjectIcon, GetProjectToolTip, $filter, $state) {
Find, GetProjectIcon, GetProjectToolTip, $filter, $state) {
ClearScope();
@ -200,24 +200,19 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams,
$state.transitionTo('projects.edit', {id: id});
};
if ($scope.removeShowLogViewer) {
$scope.removeShowLogViewer();
if ($scope.removeGoToJobDetails) {
$scope.removeGoToJobDetails();
}
$scope.removeShowLogViewer = $scope.$on('ShowLogViewer', function(e, data) {
if (data.related.current_update) {
$scope.removeGoToJobDetails = $scope.$on('GoToJobDetails', function(e, data) {
if (data.summary_fields.current_update || data.summary_fields.last_update) {
Wait('start');
LogViewer({
scope: $scope,
url: data.related.current_update,
getIcon: GetProjectIcon
});
} else if (data.related.last_update) {
Wait('start');
LogViewer({
scope: $scope,
url: data.related.last_update,
getIcon: GetProjectIcon
});
// Grab the id from summary_fields
var id = (data.summary_fields.current_update) ? data.summary_fields.current_update.id : data.summary_fields.last_update.id;
$state.go('scmUpdateStdout', {id: id});
} else {
Alert('No Updates Available', 'There is no SCM update information available for this project. An update has not yet been ' +
' completed. If you have not already done so, start an update for this project.', 'alert-info');
@ -235,7 +230,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams,
Rest.setUrl(project.url);
Rest.get()
.success(function(data) {
$scope.$emit('ShowLogViewer', data);
$scope.$emit('GoToJobDetails', data);
})
.error(function(data, status) {
ProcessErrors($scope, data, status, null, { hdr: 'Error!',
@ -374,7 +369,7 @@ ProjectsList.$inject = ['$scope', '$rootScope', '$location', '$log',
'SearchInit', 'PaginateInit', 'ReturnToCaller', 'ClearScope',
'ProcessErrors', 'GetBasePath', 'SelectionInit', 'ProjectUpdate',
'Refresh', 'Wait', 'GetChoices', 'Empty', 'Find',
'LogViewer', 'GetProjectIcon', 'GetProjectToolTip', '$filter', '$state'
'GetProjectIcon', 'GetProjectToolTip', '$filter', '$state'
];

View File

@ -21,7 +21,6 @@ import JobTemplates from "./helpers/JobTemplates";
import Jobs from "./helpers/Jobs";
import License from "./helpers/License";
import LoadConfig from "./helpers/LoadConfig";
import LogViewer from "./helpers/LogViewer";
import PaginationHelpers from "./helpers/PaginationHelpers";
import Parse from "./helpers/Parse";
import ProjectPath from "./helpers/ProjectPath";
@ -58,7 +57,6 @@ export
Jobs,
License,
LoadConfig,
LogViewer,
PaginationHelpers,
Parse,
ProjectPath,

View File

@ -13,8 +13,8 @@
export default
angular.module('EventViewerHelper', ['ModalDialog', 'Utilities', 'EventsViewerFormDefinition', 'HostsHelper'])
.factory('EventViewer', ['$compile', 'CreateDialog', 'GetEvent', 'Wait', 'EventAddTable', 'GetBasePath', 'LookUpName', 'Empty', 'EventAddPreFormattedText',
function($compile, CreateDialog, GetEvent, Wait, EventAddTable, GetBasePath, LookUpName, Empty, EventAddPreFormattedText) {
.factory('EventViewer', ['$compile', 'CreateDialog', 'GetEvent', 'Wait', 'EventAddTable', 'GetBasePath', 'Empty', 'EventAddPreFormattedText',
function($compile, CreateDialog, GetEvent, Wait, EventAddTable, GetBasePath, Empty, EventAddPreFormattedText) {
return function(params) {
var parent_scope = params.scope,
url = params.url,

View File

@ -18,7 +18,7 @@ export default
angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name, 'GroupListDefinition', 'SearchHelper',
'PaginationHelpers', listGenerator.name, 'GroupsHelper', 'InventoryHelper', 'SelectionHelper',
'JobSubmissionHelper', 'RefreshHelper', 'PromptDialog', 'CredentialsListDefinition', 'InventoryTree',
'InventoryStatusDefinition', 'VariablesHelper', 'SchedulesListDefinition', 'SourceFormDefinition', 'LogViewerHelper',
'InventoryStatusDefinition', 'VariablesHelper', 'SchedulesListDefinition', 'SourceFormDefinition', 'StandardOutHelper',
'SchedulesHelper'
])
@ -65,8 +65,8 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name
* TODO: Document
*
*/
.factory('ViewUpdateStatus', ['Rest', 'ProcessErrors', 'GetBasePath', 'Alert', 'Wait', 'Empty', 'Find', 'LogViewer',
function (Rest, ProcessErrors, GetBasePath, Alert, Wait, Empty, Find, LogViewer) {
.factory('ViewUpdateStatus', ['$state', 'Rest', 'ProcessErrors', 'GetBasePath', 'Alert', 'Wait', 'Empty', 'Find',
function ($state, Rest, ProcessErrors, GetBasePath, Alert, Wait, Empty, Find) {
return function (params) {
var scope = params.scope,
@ -76,11 +76,13 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name
if (scope.removeSourceReady) {
scope.removeSourceReady();
}
scope.removeSourceReady = scope.$on('SourceReady', function(e, url) {
LogViewer({
scope: scope,
url: url
});
scope.removeSourceReady = scope.$on('SourceReady', function(e, source) {
// Get the ID from the correct summary field
var update_id = (source.current_update) ? source.summary_fields.current_update.id : source.summary_fields.last_update.id;
$state.go('inventorySyncStdout', {id: update_id});
});
if (group) {
@ -94,8 +96,7 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name
Rest.setUrl(group.related.inventory_source);
Rest.get()
.success(function (data) {
var url = (data.related.current_update) ? data.related.current_update : data.related.last_update;
scope.$emit('SourceReady', url);
scope.$emit('SourceReady', data);
})
.error(function (data, status) {
ProcessErrors(scope, data, status, null, { hdr: 'Error!',

View File

@ -20,7 +20,7 @@ export default
angular.module('HostsHelper', [ 'RestServices', 'Utilities', listGenerator.name, 'HostListDefinition',
'SearchHelper', 'PaginationHelpers', listGenerator.name, 'HostsHelper',
'InventoryHelper', 'RelatedSearchHelper', 'InventoryFormDefinition', 'SelectionHelper',
'HostGroupsFormDefinition', 'VariablesHelper', 'ModalDialog', 'LogViewerHelper',
'HostGroupsFormDefinition', 'VariablesHelper', 'ModalDialog', 'StandardOutHelper',
'GroupListDefinition'
])
@ -159,17 +159,6 @@ angular.module('HostsHelper', [ 'RestServices', 'Utilities', listGenerator.name,
};
}])
.factory('ViewJob', ['LogViewer', 'GetBasePath', function(LogViewer, GetBasePath) {
return function(params) {
var scope = params.scope,
id = params.id;
LogViewer({
scope: scope,
url: GetBasePath('jobs') + id + '/'
});
};
}])
.factory('HostsReload', [ '$stateParams', 'Empty', 'InventoryHosts', 'GetBasePath', 'SearchInit', 'PaginateInit', 'Wait',
'SetHostStatus', 'SetStatus', 'ApplyEllipsis',
function($stateParams, Empty, InventoryHosts, GetBasePath, SearchInit, PaginateInit, Wait, SetHostStatus, SetStatus,

View File

@ -14,7 +14,7 @@ import listGenerator from '../shared/list-generator/main';
export default
angular.module('JobsHelper', ['Utilities', 'RestServices', 'FormGenerator', 'JobSummaryDefinition', 'InventoryHelper', 'GeneratorHelpers',
'JobSubmissionHelper', 'LogViewerHelper', 'SearchHelper', 'PaginationHelpers', 'AdhocHelper', listGenerator.name])
'JobSubmissionHelper', 'StandardOutHelper', 'SearchHelper', 'PaginationHelpers', 'AdhocHelper', listGenerator.name])
/**
* JobsControllerInit({ scope: $scope });
@ -22,8 +22,8 @@ export default
* Initialize calling scope with all the bits required to support a jobs list
*
*/
.factory('JobsControllerInit', ['$state', 'Find', 'DeleteJob', 'RelaunchJob', 'LogViewer', '$window',
function($state, Find, DeleteJob, RelaunchJob, LogViewer, $window) {
.factory('JobsControllerInit', ['$state', 'Find', 'DeleteJob', 'RelaunchJob', '$window',
function($state, Find, DeleteJob, RelaunchJob, $window) {
return function(params) {
var scope = params.scope,
iterator = (params.iterator) ? params.iterator : scope.iterator;

View File

@ -1,390 +0,0 @@
/*************************************************
* Copyright (c) 2015 Ansible, Inc.
*
* All Rights Reserved
*************************************************/
/**
* @ngdoc function
* @name helpers.function:LogViewer
* @description logviewer
*/
export default
angular.module('LogViewerHelper', ['ModalDialog', 'Utilities', 'FormGenerator', 'VariablesHelper'])
.factory('LogViewer', ['$location', '$compile', 'CreateDialog', 'GetJob', 'Wait', 'GenerateForm', 'LogViewerStatusForm', 'AddTable', 'AddTextarea',
'LogViewerOptionsForm', 'EnvTable', 'GetBasePath', 'LookUpName', 'Empty', 'AddPreFormattedText', 'ParseVariableString', 'GetChoices',
function($location, $compile, CreateDialog, GetJob, Wait, GenerateForm, LogViewerStatusForm, AddTable, AddTextarea, LogViewerOptionsForm, EnvTable,
GetBasePath, LookUpName, Empty, AddPreFormattedText, ParseVariableString, GetChoices) {
return function(params) {
var parent_scope = params.scope,
url = params.url,
getIcon = params.getIcon,
scope = parent_scope.$new(true),
base = $location.path().replace(/^\//, '').split('/')[0],
pieces;
if (scope.removeModalReady) {
scope.removeModalReady();
}
scope.removeModalReady = scope.$on('ModalReady', function() {
Wait('stop');
$('#logviewer-modal-dialog').dialog('open');
});
if (scope.removeJobReady) {
scope.removeJobReady();
}
scope.removeJobReady = scope.$on('JobReady', function(e, data) {
var key, resizeText, elem;
$('#status-form-container').empty();
$('#options-form-container').empty();
$('#stdout-form-container').empty();
$('#traceback-form-container').empty();
$('#variables-container').empty();
$('#source-container').empty();
$('#logview-tabs li:eq(1)').hide();
$('#logview-tabs li:eq(2)').hide();
$('#logview-tabs li:eq(4)').hide();
$('#logview-tabs li:eq(5)').hide();
// Make sure subsequenct scope references don't bubble up to the parent
for (key in LogViewerStatusForm.fields) {
scope[key] = '';
}
for (key in LogViewerOptionsForm.fields) {
scope[key] = '';
}
for (key in data) {
scope[key] = data[key];
}
scope.created_by = '';
scope.job_template = '';
if (data.related.created_by) {
pieces = data.related.created_by.replace(/^\//,'').replace(/\/$/,'').split('/');
scope.created_by = parseInt(pieces[pieces.length - 1],10);
LookUpName({
scope: scope,
scope_var: 'created_by',
url: GetBasePath('users') + scope.created_by + '/'
});
}
// For jobs link the name to the job parent
if (base === 'jobs') {
if (data.type === 'job') {
scope.name_link = "job_template";
scope.job_template = data.unified_job_template;
scope.job_template_name = (data.summary_fields.job_template) ? data.summary_fields.job_template.name : data.name;
scope.name_id = data.unified_job_template;
}
if (data.type === 'project_update') {
scope.name_link = "project";
scope.name_id = data.unified_job_template;
}
if (data.type === 'inventory_update') {
scope.name_link = "inventory_source";
scope.name_id = scope.group;
}
}
AddTable({ scope: scope, form: LogViewerStatusForm, id: 'status-form-container', getIcon: getIcon });
AddTable({ scope: scope, form: LogViewerOptionsForm, id: 'options-form-container', getIcon: getIcon });
if (data.result_stdout) {
$('#logview-tabs li:eq(1)').show();
var showStandardOut = (data.type !== "system_job") ? true : false;
AddPreFormattedText({
id: 'stdout-form-container',
val: data.result_stdout,
standardOut: showStandardOut,
jobUrl: data.url
});
}
if (data.result_traceback) {
$('#logview-tabs li:eq(2)').show();
AddPreFormattedText({
id: 'traceback-form-container',
val: data.result_traceback
});
}
/*if (data.job_env) {
EnvTable({
id: 'env-form-container',
vars: data.job_env
});
}*/
if (data.extra_vars) {
$('#logview-tabs li:eq(4)').show();
AddTextarea({
container_id: 'variables-container',
fld_id: 'variables',
val: ParseVariableString(data.extra_vars)
});
}
if (data.source_vars) {
$('#logview-tabs li:eq(5)').show();
AddTextarea({
container_id: 'source-container',
fld_id: 'source-variables',
val: ParseVariableString(data.source_vars)
});
}
if (!Empty(scope.source)) {
if (scope.removeChoicesReady) {
scope.removeChoicesReady();
}
scope.removeChoicesReady = scope.$on('ChoicesReady', function() {
scope.source_choices.every(function(e) {
if (e.value === scope.source) {
scope.source = e.label;
return false;
}
return true;
});
});
GetChoices({
scope: scope,
url: GetBasePath('inventory_sources'),
field: 'source',
variable: 'source_choices',
choice_name: 'choices',
callback: 'ChoicesReady'
});
}
if (!Empty(scope.credential)) {
LookUpName({
scope: scope,
scope_var: 'credential',
url: GetBasePath('credentials') + scope.credential + '/'
});
}
if (!Empty(scope.inventory)) {
LookUpName({
scope: scope,
scope_var: 'inventory',
url: GetBasePath('inventory') + scope.inventory + '/'
});
}
if (!Empty(scope.project)) {
LookUpName({
scope: scope,
scope_var: 'project',
url: GetBasePath('projects') + scope.project + '/'
});
}
if (!Empty(scope.cloud_credential)) {
LookUpName({
scope: scope,
scope_var: 'cloud_credential',
url: GetBasePath('credentials') + scope.cloud_credential + '/'
});
}
if (!Empty(scope.inventory_source)) {
LookUpName({
scope: scope,
scope_var: 'inventory_source',
url: GetBasePath('inventory_sources') + scope.inventory_source + '/'
});
}
resizeText = function() {
var u = $('#logview-tabs').outerHeight() + 25,
h = $('#logviewer-modal-dialog').innerHeight(),
rows = Math.floor((h - u) / 20);
rows -= 3;
rows = (rows < 6) ? 6 : rows;
$('#logviewer-modal-dialog #variables').attr({ rows: rows });
$('#logviewer-modal-dialog #source-variables').attr({ rows: rows });
};
elem = angular.element(document.getElementById('logviewer-modal-dialog'));
$compile(elem)(scope);
CreateDialog({
scope: scope,
width: 600,
height: 550,
minWidth: 450,
callback: 'ModalReady',
id: 'logviewer-modal-dialog',
onResizeStop: resizeText,
title: 'Job Results',
onOpen: function() {
$('#logview-tabs a:first').tab('show');
$('#dialog-ok-button').focus();
resizeText();
}
});
});
GetJob({
url: url,
scope: scope
});
scope.modalOK = function() {
$('#logviewer-modal-dialog').dialog('close');
scope.$destroy();
};
};
}])
.factory('GetJob', ['Rest', 'ProcessErrors', function(Rest, ProcessErrors) {
return function(params) {
var url = params.url,
scope = params.scope;
Rest.setUrl(url);
Rest.get()
.success(function(data){
scope.$emit('JobReady', data);
})
.error(function(data, status) {
ProcessErrors(scope, data, status, null, { hdr: 'Error!',
msg: 'Failed to retrieve ' + url + '. GET returned: ' + status });
});
};
}])
.factory('LookUpName', ['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) {
return function(params) {
var url = params.url,
scope_var = params.scope_var,
scope = params.scope;
Rest.setUrl(url);
Rest.get()
.success(function(data) {
if (scope_var === 'inventory_source') {
scope[scope_var + '_name'] = data.summary_fields.group.name;
}
else if (!Empty(data.name)) {
scope[scope_var + '_name'] = data.name;
}
if (!Empty(data.group)) {
// Used for inventory_source
scope.group = data.group;
}
})
.error(function(data, status) {
ProcessErrors(scope, data, status, null, { hdr: 'Error!',
msg: 'Failed to retrieve ' + url + '. GET returned: ' + status });
});
};
}])
.factory('AddTable', ['$compile', 'Empty', 'Find', function($compile, Empty, Find) {
return function(params) {
var form = params.form,
id = params.id,
scope = params.scope,
getIcon = params.getIcon,
fld, html, url, e,
urls = [
{ "variable": "credential", "url": "/#/credentials/" },
{ "variable": "project", "url": "/#/projects/" },
{ "variable": "inventory", "url": "/#/inventories/" },
{ "variable": "cloud_credential", "url": "/#/credentials/" },
{ "variable": "inventory_source", "url": "/#/home/groups/?id={{ group }}" },
{ "variable": "job_template", "url": "/#/job_templates/" },
{ "variable": "created_by", "url": "/#/users/" }
];
html = "<table class=\"table logviewer-status\">\n";
for (fld in form.fields) {
if (!Empty(scope[fld])) {
html += "<tr><td class=\"fld-label col-md-3 col-sm-3 col-xs-3\">" + form.fields[fld].label + "</td>" +
"<td>";
url = Find({ list: urls, key: "variable", val: fld });
if (url) {
html += "<a href=\"" + url.url;
html += (fld === "inventory_source") ? "" : scope[fld];
html += "\" ng-click=\"modalOK()\">{{ " + fld + '_name' + " }}</a>";
}
else if (fld === 'name' && scope.name_link) {
url = Find({ list: urls, key: "variable", val: scope.name_link });
html += "<a href=\"" + url.url + ( (scope.name_link === 'inventory_source') ? '' : scope.name_id ) + "\" ng-click=\"modalOK()\">{{ " +
( (scope.name_link === 'inventory_source') ? 'inventory_source_name' : fld ) + " }}</a>";
}
else if (fld === 'elapsed') {
html += scope[fld] + " <span class=\"small-text\">seconds</span>";
}
else if (fld === 'status') {
if (getIcon) {
html += "<i class=\"fa icon-job-" + getIcon(scope[fld]) + "\"></i> " + scope[fld];
}
else {
html += "<i class=\"fa icon-job-" + scope[fld] + "\"></i> " + scope[fld];
}
if (scope.job_explanation) {
html += "<p style=\"padding-top: 12px\">" + scope.job_explanation + "</p>";
}
}
else {
html += "{{ " + fld ;
html += (form.fields[fld].filter) ? " | " + form.fields[fld].filter : "" ;
html += " }}";
}
html += "</td></tr>\n";
}
}
html += "</table>\n";
e = angular.element(document.getElementById(id));
e.empty().html(html);
$compile(e)(scope);
};
}])
.factory('AddTextarea', [ function() {
return function(params) {
var container_id = params.container_id,
val = params.val,
fld_id = params.fld_id,
html;
html = "<div class=\"form-group\">\n" +
"<textarea id=\"" + fld_id + "\" ng-non-bindable class=\"form-control mono-space\" rows=\"12\" readonly>" + val + "</textarea>" +
"</div>\n";
$('#' + container_id).empty().html(html);
};
}])
.factory('AddPreFormattedText', ['$rootScope', function($rootScope) {
return function(params) {
var id = params.id,
val = params.val,
html = "";
if (params.standardOut) {
html += '<a ng-href="' + params.jobUrl + 'stdout?format=txt_download&token=' + $rootScope.token + '" class="btn btn-primary btn-xs DownloadStandardOut DownloadStandardOut--onModal" id="download-stdout-button" type="button" aw-tool-tip="Download standard out as a .txt file" data-placement="top" ng-show="status === \'cancelled\' || status === \'failed\' || status === \'error\' || status === \'successful\'"><i class="fa fa-download DownloadStandardOut-icon DownloadStandardOut-icon--withText"></i>Download</a>';
html += "<pre class='DownloadStandardOut-pre' ng-non-bindable>" + val + "</pre>\n";
} else {
html += "<pre ng-non-bindable>" + val + "</pre>\n";
}
$('#' + id).empty().html(html);
};
}])
.factory('EnvTable', [ function() {
return function(params) {
var id = params.id,
vars = params.vars,
key, html;
html = "<table class=\"table logviewer-status\">\n";
for (key in vars) {
html += "<tr><td class=\"fld-label col-md-4 col-sm-3 col-xs-3 break\">" + key + "</td>" +
"<td ng-non-bindable class=\"break\">" + vars[key] + "</td></tr>\n";
}
html += "</table>\n";
$('#' + id).empty().html(html);
};
}]);

View File

@ -9,8 +9,9 @@ import stdoutManagementJobsRoute from './management-jobs/standard-out-management
import stdoutInventorySyncRoute from './inventory-sync/standard-out-inventory-sync.route';
import stdoutScmUpdateRoute from './scm-update/standard-out-scm-update.route';
import {JobStdoutController} from './standard-out.controller';
import StandardOutHelper from './standard-out-factories/main';
export default angular.module('standardOut', [])
export default angular.module('standardOut', [StandardOutHelper.name])
.controller('JobStdoutController', JobStdoutController)
.run(['$stateExtender', function($stateExtender) {
$stateExtender.addState(stdoutAdhocRoute);

View File

@ -0,0 +1,32 @@
/*************************************************
* Copyright (c) 2016 Ansible, Inc.
*
* All Rights Reserved
*************************************************/
export default
['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) {
return function(params) {
var url = params.url,
scope_var = params.scope_var,
scope = params.scope;
Rest.setUrl(url);
Rest.get()
.success(function(data) {
if (scope_var === 'inventory_source') {
scope[scope_var + '_name'] = data.summary_fields.group.name;
}
else if (!Empty(data.name)) {
scope[scope_var + '_name'] = data.name;
}
if (!Empty(data.group)) {
// Used for inventory_source
scope.group = data.group;
}
})
.error(function(data, status) {
ProcessErrors(scope, data, status, null, { hdr: 'Error!',
msg: 'Failed to retrieve ' + url + '. GET returned: ' + status });
});
};
}];

View File

@ -0,0 +1,11 @@
/*************************************************
* Copyright (c) 2016 Ansible, Inc.
*
* All Rights Reserved
*************************************************/
import lookUpName from './lookup-name.factory';
export default
angular.module('StandardOutHelper', [])
.factory('LookUpName', lookUpName);

View File

@ -195,7 +195,7 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi
return true;
});
});
// GetChoices can be found in the helper: LogViewer.js
// GetChoices can be found in the helper: StandardOut.js
// It attaches the source choices to $scope.source_choices.
// Then, when the callback is fired, $scope.source is bound
// to the corresponding label.
@ -209,7 +209,7 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi
});
}
// LookUpName can be found in the helper: LogViewer.js
// LookUpName can be found in the helper: StandardOut.js
// It attaches the name that it gets (based on the url)
// to the $scope variable defined by the attribute scope_var.
if (!Empty(data.credential)) {

View File

@ -0,0 +1,27 @@
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

187
docs/notification_system.md Normal file
View File

@ -0,0 +1,187 @@
Completion pending unit tests and acceptance info and instructions. The following documentation will likely be moved to the feature epic card and reproduced in our development documentation.
# Notification System Overview
A Notifier is an instance of a notification type (Email, Slack, Webhook, etc) with a name, description, and a defined configuration (A few examples: Username, password, server, recipients for the Email type. Token and list of channels for Slack. Url and Headers for webhooks)
A Notification is a manifestation of the Notifier... for example, when a job fails a notification is sent using the configuration defined by the Notifier.
This PR implements the Notification system as outlined in the 3.0 Notifications spec. At a high level the typical flow is:
* User creates a Notifier at `/api/v1/notifiers`
* User assigns the notifier to any of the various objects that support it (all variants of job templates as well as organizations and projects) and at the appropriate trigger level for which they want the notification (error, success, or any). For example a user may wish to assign a particular Notifier to trigger when `Job Template 1` fails. In which case they will associate the notifier with the job template at `/api/v1/job_templates/n/notifiers_error`.
## Notifier hierarchy
Notifiers assigned at certain levels will inherit notifiers defined on parent objects as such:
* Job Templates will use notifiers defined on it as well as inheriting notifiers from the Project used by the Job Template and from the Organization that it is listed under (via the Project).
* Project Updates will use notifiers defined on the project and will inherit notifiers from the Organization associated with it.
* Inventory Updates will use notifiers defined on the Organization that it is listed under
* Ad-hoc commands will use notifiers defined on the Organization that the inventory is associated with
## Workflow
When a job succeeds or fails, the error or success handler will pull a list of relevant notifiers using the procedure defined above. It will then create a Notification object for each one containing relevant details about the job and then **send**s it to the destination (email addresses, slack channel(s), sms numbers, etc). These Notification objects are available as related resources on job types (jobs, inventory updates, project updates), and also at `/api/v1/notifications`. You may also see what notifications have been sent from a notifier by examining its related resources.
Notifications can succeed or fail but that will not cause its associated job to succeed or fail. The status of the notification can be viewed at its detail endpoint `/api/v1/notifications/<n>`
## Testing Notifiers before using them
Once a Notifier is created its configuration can be tested by utilizing the endpoint at `/api/v1/notifiers/<n>/test` This will emit a test notification given the configuration defined by the Notifier. These test notifications will also appear in the notifications list at `/api/v1/notifications`
# Notification Types
The currently defined Notification Types are:
* Email
* Slack
* Hipchat
* Pagerduty
* Twilio
* IRC
* Webhook
Each of these have their own configuration and behavioral semantics and testing them may need to be approached in different ways. The following sections will give as much detail as possible.
## Email
The email notification type supports a wide variety of smtp servers and has support for ssl/tls connections.
### Testing considerations
The following should be performed for good acceptance:
* Test plain authentication
* Test SSL and TLS authentication
* Verify single and multiple recipients
* Verify message subject and contents are formatted sanely. They should be plaintext but readable.
### Test Service
Either setup a local smtp mail service here are some options:
* postfix service on galaxy: https://galaxy.ansible.com/debops/postfix/
* Mailtrap has a good free plan and should provide all of the features we need under that plan: https://mailtrap.io/
## Slack
Slack is pretty easy to configure, it just needs a token which you can get from creating a bot in the integrations settings for the slack team.
### Testing considerations
The following should be performed for good acceptance:
* Test single and multiple channels and good formatting of the message. Note that slack notifications only contain the minimal information
### Test Service
Any user of the Ansible slack service can create a bot integration (which is how this notification is implemented). Remember to invite the bot to the channel first.
## Hipchat
There are several ways to integrate with hipchat. The Tower implementation uses Hipchat "Integrations". Currently you can find this at the bottom right of the main hipchat webview. From there you will select "Build your own Integration". After creating that it will list the `auth_token` that needs to be supplied to Tower. Some other relevant details on the fields accepted by Tower for the Hipchat notification type:
* `color`: This will highlight the message as the given color. If set to something hipchat doesn't expect then the notification will generate an error, but it's pretty rad. I like green personally.
* `notify`: Selecting this will cause the bot to "notify" channel members. Normally it will just be stuck as a message in the chat channel without triggering anyone's notifications. This option will notify users of the channel respecting their existing notification settings (browser notification, email fallback, etc.)
* `message_from`: Along with the integration name itself this will put another label on the notification. I reckon this would be helpful if multiple services are using the same integration to distinguish them from each other.
* `api_url`: The url of the hipchat api service. If you create a team hosted by them it'll be something like `https://team.hipchat.com`. For a self-hosted service it'll be the http url that is accessible by Tower.
### Testing considerations
* Make sure all options behave as expected
* Test single and multiple channels
* Test that notification preferences are obeyed.
* Test formatting and appearance. Note that, like Slack, hipchat will use the minimal version of the notification.
* Test standalone hipchat service for parity with hosted solution
### Test Service
Hipchat allows you to create a team with limited users and message history for free, which is easy to set up and get started with. Hipchat contains a self-hosted server also which we should test for parity... it has a 30 day trial but there might be some other way to negotiate with them, redhat, or ansible itself:
https://www.hipchat.com/server
## Pagerduty
Pager duty is a fairly straightforward integration. The user will create an API Key in the pagerduty system (this will be the token that is given to Tower) and then create a "Service" which will provide an "Integration Key" that will be given to Tower also. The other options of note are:
* `subdomain`: When you sign up for the pagerduty account you will get a unique subdomain to communicate with. For instance, if you signed up as "towertest" the web dashboard will be at towertest.pagerduty.com and you will give the Tower API "towertest" as the subdomain (not the full domain).
* `client_name`: This will be sent along with the alert content to the pagerduty service to help identify the service that is using the api key/service. This is helpful if multiple integrations are using the same api key and service.
### Testing considerations
* Make sure the alert lands on the pagerduty service
* Verify that the minimal information is displayed for the notification but also that the detail of the notification contains all fields. Pagerduty itself should understand the format in which we send the detail information.
### Test Service
Pagerduty allows you to sign up for a free trial with the service. We may also have a ansible-wide pagerduty service that we could tie into for other things.
## Twilio
Twilio service is an Voice and SMS automation service. Once you are signed in you'll need to create a phone number from which the message will be sent. You'll then define a "Messaging Service" under Programmable SMS and associate the number you created before with it. Note that you may need to verify this number or some other information before you are allowed to use it to send to any numbers. The Messaging Service does not need a status callback url nor does it need the ability to Process inbound messages.
Under your individual (or sub) account settings you will have API credentials. The Account SID and AuthToken are what will be given to Tower. There are a couple of other important fields:
* `from_number`: This is the number associated with the messaging service above and must be given in the form of "+15556667777"
* `to_numbers`: This will be the list of numbers to receive the SMS and should be the 10-digit phone number.
### Testing considerations
* Test notifications with single and multiple recipients
* Verify that the minimal information is displayed for the notification. Note that this notification type does not display the full detailed notification.
### Test Service
Twilio is fairly straightforward to sign up for but I don't believe it has a free plan, a credit card will be needed to sign up for it though the charges are fairly minimal per message.
## IRC
The Tower irc notification takes the form of an IRC bot that will connect, deliver its messages to channel(s) or individual user(s), and then disconnect. The Tower notification bot also supports SSL authentication. The Tower bot does not currently support Nickserv identification. If a channel or user does not exist or is not on-line then the Notification will not fail, the failure scenario is reserved specifically for connectivity.
Connectivity information is straightforward:
* `server`: The host name or address of the irc server
* `port`: The irc server port
* `nickname`: The bot's nickname once it connects to the server
* `password`: IRC servers can require a password to connect. If the server doesn't require one then this should be an empty string
* `use_ssl`: Should the bot use SSL when connecting
* `targets`: A list of users and/or channels to send the notification to.
### Test Considerations
* Test both plain and SSL connectivity
* Test single and multiples of both users and channels.
### Test Service
There are a few modern irc servers to choose from but we should use a fairly full featured service to get good test coverage. I recommend inspircd because it is actively maintained and pretty straightforward to configure.
## Webhook
The webhook notification type in Ansible Tower provides a simple interface to sending POSTs to a predefined web service. Tower will POST to this address using `application/json` content type with the data payload containing all relevant details in json format.
The parameters are pretty straightforward:
* `url`: The full url that will be POSTed to
* `headers`: Headers in json form where the keys and values are strings. For example: `{"Authentication": "988881adc9fc3655077dc2d4d757d480b5ea0e11", "MessageType": "Test"}`
### Test Considerations
* Test HTTP service and HTTPS, also specifically test HTTPS with a self signed cert.
* Verify that the headers and payload are present and that the payload is json and the content type is specifically `application/json`
### Test Service
A very basic test can be performed by using `netcat`:
```
netcat -l 8099
```
and then sending the request to: http://\<host\>:8099
Note that this won't respond correctly to the notification so it will yield an error. I recommend using a very basic Flask application for verifying the POST request, you can see an example of mine here:
https://gist.github.com/matburt/73bfbf85c2443f39d272
This demonstrates how to define an endpoint and parse headers and json content, it doesn't show configuring Flask for HTTPS but this is also pretty straightforward: http://flask.pocoo.org/snippets/111/

View File

@ -4,3 +4,5 @@ python_paths = awx/lib/site-packages
site_dirs = awx/lib/site-packages
python_files = *.py
addopts = --reuse-db
markers =
ac: access control test

View File

@ -21,6 +21,7 @@ django-auth-ldap==1.2.6
django-celery==3.1.17
django-crum==0.6.1
django-extensions==1.5.9
git+https://github.com/chrismeyersfsu/django-jsonbfield@fix-sqlite_serialization#egg=django-jsonbfield
django-polymorphic==0.7.2
django-radius==1.0.0
djangorestframework==3.3.2
@ -48,6 +49,7 @@ idna==2.0
importlib==1.0.3
ip-associations-python-novaclient-ext==0.1
ipaddress==1.0.16
irc==13.3.1
iso8601==0.1.11
isodate==0.5.1
jsonpatch==1.12
@ -84,6 +86,7 @@ psycopg2
pyasn1==0.1.9
pycrypto==2.6.1
pycparser==2.14
pygerduty==0.32.1
PyJWT==1.4.0
pymongo==2.8
pyOpenSSL==0.15.1
@ -121,11 +124,13 @@ requestsexceptions==1.1.1
shade==1.4.0
simplejson==3.8.1
six==1.9.0
slackclient==0.16
statsd==3.2.1
stevedore==1.10.0
suds==0.4
unicodecsv==0.14.1
warlock==1.2.0
twilio==4.9.1
wheel==0.24.0
wrapt==1.10.6
wsgiref==0.1.2

View File

@ -7,3 +7,4 @@ pytest
pytest-cov
pytest-django
pytest-pythonpath
pytest-mock

View File

@ -13,3 +13,4 @@ pytest
pytest-cov
pytest-django
pytest-pythonpath
pytest-mock

View File

@ -1,4 +1,5 @@
#!/bin/bash
set +x
# Wait for the databases to come up
ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all