diff --git a/.gitignore b/.gitignore index b452f7ee24..c385ad667d 100644 --- a/.gitignore +++ b/.gitignore @@ -34,7 +34,7 @@ __pycache__ /tar-build /setup-bundle-build /dist -*.egg-info +/*.egg-info *.py[c,o] # JavaScript diff --git a/Makefile b/Makefile index 74376e5e84..00d8f14ad3 100644 --- a/Makefile +++ b/Makefile @@ -273,9 +273,9 @@ version_file: # Do any one-time init tasks. init: @if [ "$(VIRTUAL_ENV)" ]; then \ - awx-manage register_instance --primary --hostname=127.0.0.1; \ + tower-manage register_instance --primary --hostname=127.0.0.1; \ else \ - sudo awx-manage register_instance --primary --hostname=127.0.0.1; \ + sudo tower-manage register_instance --primary --hostname=127.0.0.1; \ fi # Refresh development environment after pulling new code. @@ -291,7 +291,7 @@ migrate: # Run after making changes to the models to create a new migration. dbchange: - $(PYTHON) manage.py schemamigration main v14_changes --auto + $(PYTHON) manage.py makemigrations # access database shell, asks for password dbshell: @@ -358,7 +358,7 @@ pylint: reports @(set -o pipefail && $@ | reports/$@.report) check: flake8 pep8 # pyflakes pylint - + # Run all API unit tests. test: py.test awx/main/tests awx/api/tests awx/fact/tests diff --git a/awx/api/generics.py b/awx/api/generics.py index a4a1112d08..93dd3ba444 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -32,7 +32,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView', 'SubListCreateAttachDetachAPIView', 'RetrieveAPIView', 'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView', 'RetrieveUpdateDestroyAPIView', 'DestroyAPIView', - 'MongoAPIView', 'MongoListAPIView'] + 'SubDetailAPIView', + 'ParentMixin',] logger = logging.getLogger('awx.api.generics') @@ -219,28 +220,6 @@ class GenericAPIView(generics.GenericAPIView, APIView): d['settings'] = settings return d - -class MongoAPIView(GenericAPIView): - - def get_parent_object(self): - parent_filter = { - self.lookup_field: self.kwargs.get(self.lookup_field, None), - } - return get_object_or_404(self.parent_model, **parent_filter) - - def check_parent_access(self, parent=None): - parent = parent or self.get_parent_object() - parent_access = getattr(self, 'parent_access', 'read') - if parent_access in ('read', 'delete'): - args = (self.parent_model, parent_access, parent) - else: - args = (self.parent_model, parent_access, parent, None) - if not self.request.user.can_access(*args): - raise PermissionDenied() - -class MongoListAPIView(generics.ListAPIView, MongoAPIView): - pass - class SimpleListAPIView(generics.ListAPIView, GenericAPIView): def get_queryset(self): @@ -277,7 +256,25 @@ class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView): # Base class for a list view that allows creating new objects. pass -class SubListAPIView(ListAPIView): +class ParentMixin(object): + + def get_parent_object(self): + parent_filter = { + self.lookup_field: self.kwargs.get(self.lookup_field, None), + } + return get_object_or_404(self.parent_model, **parent_filter) + + def check_parent_access(self, parent=None): + parent = parent or self.get_parent_object() + parent_access = getattr(self, 'parent_access', 'read') + if parent_access in ('read', 'delete'): + args = (self.parent_model, parent_access, parent) + else: + args = (self.parent_model, parent_access, parent, None) + if not self.request.user.can_access(*args): + raise PermissionDenied() + +class SubListAPIView(ListAPIView, ParentMixin): # Base class for a read-only sublist view. # Subclasses should define at least: @@ -297,22 +294,6 @@ class SubListAPIView(ListAPIView): }) return d - def get_parent_object(self): - parent_filter = { - self.lookup_field: self.kwargs.get(self.lookup_field, None), - } - return get_object_or_404(self.parent_model, **parent_filter) - - def check_parent_access(self, parent=None): - parent = parent or self.get_parent_object() - parent_access = getattr(self, 'parent_access', 'read') - if parent_access in ('read', 'delete'): - args = (self.parent_model, parent_access, parent) - else: - args = (self.parent_model, parent_access, parent, None) - if not self.request.user.can_access(*args): - raise PermissionDenied() - def get_queryset(self): parent = self.get_parent_object() self.check_parent_access(parent) @@ -449,6 +430,9 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView): else: return self.attach(request, *args, **kwargs) +class SubDetailAPIView(generics.RetrieveAPIView, GenericAPIView, ParentMixin): + pass + class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView): pass diff --git a/awx/api/metadata.py b/awx/api/metadata.py index a11df8d1ce..01f8fe306e 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -13,7 +13,7 @@ from rest_framework import serializers from rest_framework.request import clone_request # Ansible Tower -from awx.main.models import InventorySource +from awx.main.models import InventorySource, Notifier class Metadata(metadata.SimpleMetadata): @@ -76,6 +76,12 @@ class Metadata(metadata.SimpleMetadata): get_group_by_choices = getattr(InventorySource, 'get_%s_group_by_choices' % cp) field_info['%s_group_by_choices' % cp] = get_group_by_choices() + # Special handling of notification configuration where the required properties + # are conditional on the type selected. + if field.field_name == 'notification_configuration': + for (notification_type_name, notification_tr_name, notification_type_class) in Notifier.NOTIFICATION_TYPES: + field_info[notification_type_name] = notification_type_class.init_parameters + # Update type of fields returned... if field.field_name == 'type': field_info['type'] = 'multiple choice' diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 25e438858c..71a103a1a6 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -9,8 +9,6 @@ import logging from collections import OrderedDict from dateutil import rrule -from rest_framework_mongoengine.serializers import DocumentSerializer - # PyYAML import yaml @@ -46,12 +44,10 @@ from awx.main.conf import tower_settings from awx.api.license import feature_enabled from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, EncryptedPasswordField, VerbatimField -from awx.fact.models import * # noqa - logger = logging.getLogger('awx.api.serializers') # Fields that should be summarized regardless of object type. -DEFAULT_SUMMARY_FIELDS = ('name', 'description')# , 'created_by', 'modified_by')#, 'type') +DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type') # Keys are fields (foreign keys) where, if found on an instance, summary info # should be added to the serialized data. Values are a tuple of field names on @@ -362,6 +358,7 @@ class BaseSerializer(serializers.ModelSerializer): roles[field.name] = { 'id': role.id, 'name': role.name, + 'description': role.description, 'url': role.get_absolute_url(), } if len(roles) > 0: @@ -555,19 +552,19 @@ class BaseSerializer(serializers.ModelSerializer): class EmptySerializer(serializers.Serializer): pass - -class BaseFactSerializer(DocumentSerializer): +class BaseFactSerializer(BaseSerializer): __metaclass__ = BaseSerializerMetaclass def get_fields(self): ret = super(BaseFactSerializer, self).get_fields() - if 'module' in ret and feature_enabled('system_tracking'): - choices = [(o, o.title()) for o in FactVersion.objects.all().only('module').distinct('module')] - ret['module'] = serializers.ChoiceField(source='module', choices=choices, read_only=True, required=False) + if 'module' in ret: + # TODO: the values_list may pull in a LOT of entries before the distinct is called + modules = Fact.objects.all().values_list('module', flat=True).distinct() + choices = [(o, o.title()) for o in modules] + ret['module'] = serializers.ChoiceField(choices=choices, read_only=True, required=False) return ret - class UnifiedJobTemplateSerializer(BaseSerializer): class Meta: @@ -868,7 +865,11 @@ class OrganizationSerializer(BaseSerializer): users = reverse('api:organization_users_list', args=(obj.pk,)), admins = reverse('api:organization_admins_list', args=(obj.pk,)), teams = reverse('api:organization_teams_list', args=(obj.pk,)), - activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)) + activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)), + notifiers = reverse('api:organization_notifiers_list', args=(obj.pk,)), + notifiers_any = reverse('api:organization_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:organization_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:organization_notifiers_error_list', args=(obj.pk,)), )) return res @@ -938,6 +939,9 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): project_updates = reverse('api:project_updates_list', args=(obj.pk,)), schedules = reverse('api:project_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:project_activity_stream_list', args=(obj.pk,)), + notifiers_any = reverse('api:project_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:project_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:project_notifiers_error_list', args=(obj.pk,)), )) # Backwards compatibility. if obj.current_update: @@ -983,6 +987,7 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer): res.update(dict( project = reverse('api:project_detail', args=(obj.project.pk,)), cancel = reverse('api:project_update_cancel', args=(obj.pk,)), + notifications = reverse('api:project_update_notifications_list', args=(obj.pk,)), )) return res @@ -1390,6 +1395,9 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt activity_stream = reverse('api:inventory_activity_stream_list', args=(obj.pk,)), hosts = reverse('api:inventory_source_hosts_list', args=(obj.pk,)), groups = reverse('api:inventory_source_groups_list', args=(obj.pk,)), + notifiers_any = reverse('api:inventory_source_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:inventory_source_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:inventory_source_notifiers_error_list', args=(obj.pk,)), )) if obj.inventory and obj.inventory.active: res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) @@ -1434,6 +1442,7 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri res.update(dict( inventory_source = reverse('api:inventory_source_detail', args=(obj.inventory_source.pk,)), cancel = reverse('api:inventory_update_cancel', args=(obj.pk,)), + notifications = reverse('api:inventory_update_notifications_list', args=(obj.pk,)), )) return res @@ -1532,7 +1541,7 @@ class ResourceAccessListElementSerializer(UserSerializer): ret['summary_fields']['permissions'] = resource.get_permissions(user) def format_role_perm(role): - role_dict = { 'id': role.id, 'name': role.name} + role_dict = { 'id': role.id, 'name': role.name, 'description': role.description} try: role_dict['resource_name'] = role.content_object.name role_dict['resource_type'] = role.content_type.name @@ -1672,6 +1681,9 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): schedules = reverse('api:job_template_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:job_template_activity_stream_list', args=(obj.pk,)), launch = reverse('api:job_template_launch', args=(obj.pk,)), + notifiers_any = reverse('api:job_template_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:job_template_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:job_template_notifiers_error_list', args=(obj.pk,)), )) if obj.host_config_key: res['callback'] = reverse('api:job_template_callback', args=(obj.pk,)) @@ -1726,6 +1738,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): job_tasks = reverse('api:job_job_tasks_list', args=(obj.pk,)), job_host_summaries = reverse('api:job_job_host_summaries_list', args=(obj.pk,)), activity_stream = reverse('api:job_activity_stream_list', args=(obj.pk,)), + notifications = reverse('api:job_notifications_list', args=(obj.pk,)), )) if obj.job_template and obj.job_template.active: res['job_template'] = reverse('api:job_template_detail', @@ -2141,6 +2154,79 @@ class JobLaunchSerializer(BaseSerializer): attrs = super(JobLaunchSerializer, self).validate(attrs) return attrs +class NotifierSerializer(BaseSerializer): + + class Meta: + model = Notifier + fields = ('*', 'organization', 'notification_type', 'notification_configuration') + + type_map = {"string": (str, unicode), + "int": (int,), + "bool": (bool,), + "list": (list,), + "password": (str, unicode), + "object": (dict, OrderedDict)} + + def to_representation(self, obj): + ret = super(NotifierSerializer, self).to_representation(obj) + for field in obj.notification_class.init_parameters: + if field in ret['notification_configuration'] and \ + force_text(ret['notification_configuration'][field]).startswith('$encrypted$'): + ret['notification_configuration'][field] = '$encrypted$' + return ret + + def get_related(self, obj): + res = super(NotifierSerializer, self).get_related(obj) + res.update(dict( + test = reverse('api:notifier_test', args=(obj.pk,)), + notifications = reverse('api:notifier_notification_list', args=(obj.pk,)), + )) + if obj.organization and obj.organization.active: + res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,)) + return res + + def validate(self, attrs): + notification_class = Notifier.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] + missing_fields = [] + incorrect_type_fields = [] + if 'notification_configuration' not in attrs: + return attrs + for field in notification_class.init_parameters: + if field not in attrs['notification_configuration']: + missing_fields.append(field) + continue + field_val = attrs['notification_configuration'][field] + field_type = notification_class.init_parameters[field]['type'] + expected_types = self.type_map[field_type] + if not type(field_val) in expected_types: + incorrect_type_fields.append((field, field_type)) + continue + if field_type == "password" and field_val.startswith('$encrypted$'): + missing_fields.append(field) + error_list = [] + if missing_fields: + error_list.append("Missing required fields for Notification Configuration: {}".format(missing_fields)) + if incorrect_type_fields: + for type_field_error in incorrect_type_fields: + error_list.append("Configuration field '{}' incorrect type, expected {}".format(type_field_error[0], + type_field_error[1])) + if error_list: + raise serializers.ValidationError(error_list) + return attrs + +class NotificationSerializer(BaseSerializer): + + class Meta: + model = Notification + fields = ('*', '-name', '-description', 'notifier', 'error', 'status', 'notifications_sent', + 'notification_type', 'recipients', 'subject') + + def get_related(self, obj): + res = super(NotificationSerializer, self).get_related(obj) + res.update(dict( + notifier = reverse('api:notifier_detail', args=(obj.notifier.pk,)), + )) + return res class ScheduleSerializer(BaseSerializer): @@ -2391,28 +2477,31 @@ class AuthTokenSerializer(serializers.Serializer): class FactVersionSerializer(BaseFactSerializer): - related = serializers.SerializerMethodField('get_related') class Meta: - model = FactVersion - fields = ('related', 'module', 'timestamp',) + model = Fact + fields = ('related', 'module', 'timestamp') + read_only_fields = ('*',) def get_related(self, obj): - host_obj = self.context.get('host_obj') - res = {} + res = super(FactVersionSerializer, self).get_related(obj) params = { 'datetime': timestamp_apiformat(obj.timestamp), 'module': obj.module, } - res.update(dict( - fact_view = build_url('api:host_fact_compare_view', args=(host_obj.pk,), get=params), - )) + res['fact_view'] = build_url('api:host_fact_compare_view', args=(obj.host.pk,), get=params) return res - class FactSerializer(BaseFactSerializer): class Meta: model = Fact - depth = 2 - fields = ('timestamp', 'host', 'module', 'fact') + # TODO: Consider adding in host to the fields list ? + fields = ('related', 'timestamp', 'module', 'facts', 'id', 'summary_fields', 'host') + read_only_fields = ('*',) + + def get_related(self, obj): + res = super(FactSerializer, self).get_related(obj) + res['host'] = obj.host.get_absolute_url() + return res + diff --git a/awx/api/urls.py b/awx/api/urls.py index a249077c8e..d3cde02401 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -20,6 +20,10 @@ organization_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/projects/$', 'organization_projects_list'), url(r'^(?P[0-9]+)/teams/$', 'organization_teams_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'organization_activity_stream_list'), + url(r'^(?P[0-9]+)/notifiers/$', 'organization_notifiers_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'organization_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'organization_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'organization_notifiers_success_list'), ) user_urls = patterns('awx.api.views', @@ -44,12 +48,16 @@ project_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/project_updates/$', 'project_updates_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'project_activity_stream_list'), url(r'^(?P[0-9]+)/schedules/$', 'project_schedules_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'project_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'project_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'project_notifiers_success_list'), ) project_update_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'project_update_detail'), url(r'^(?P[0-9]+)/cancel/$', 'project_update_cancel'), url(r'^(?P[0-9]+)/stdout/$', 'project_update_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'project_update_notifications_list'), ) team_urls = patterns('awx.api.views', @@ -92,8 +100,8 @@ host_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'), url(r'^(?P[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'), #url(r'^(?P[0-9]+)/single_fact/$', 'host_single_fact_view'), - url(r'^(?P[0-9]+)/fact_versions/$', 'host_fact_versions_list'), - url(r'^(?P[0-9]+)/fact_view/$', 'host_fact_compare_view'), + url(r'^(?P[0-9]+)/fact_versions/$', 'host_fact_versions_list'), + url(r'^(?P[0-9]+)/fact_view/$', 'host_fact_compare_view'), ) group_urls = patterns('awx.api.views', @@ -121,12 +129,16 @@ inventory_source_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'inventory_source_schedules_list'), url(r'^(?P[0-9]+)/groups/$', 'inventory_source_groups_list'), url(r'^(?P[0-9]+)/hosts/$', 'inventory_source_hosts_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'inventory_source_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'inventory_source_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'inventory_source_notifiers_success_list'), ) inventory_update_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'inventory_update_detail'), url(r'^(?P[0-9]+)/cancel/$', 'inventory_update_cancel'), url(r'^(?P[0-9]+)/stdout/$', 'inventory_update_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'inventory_update_notifications_list'), ) inventory_script_urls = patterns('awx.api.views', @@ -168,6 +180,9 @@ job_template_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'job_template_schedules_list'), url(r'^(?P[0-9]+)/survey_spec/$', 'job_template_survey_spec'), url(r'^(?P[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'job_template_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'job_template_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'job_template_notifiers_success_list'), ) job_urls = patterns('awx.api.views', @@ -182,6 +197,7 @@ job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/job_tasks/$', 'job_job_tasks_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'job_activity_stream_list'), url(r'^(?P[0-9]+)/stdout/$', 'job_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'job_notifications_list'), ) job_host_summary_urls = patterns('awx.api.views', @@ -224,6 +240,18 @@ system_job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/cancel/$', 'system_job_cancel'), ) +notifier_urls = patterns('awx.api.views', + url(r'^$', 'notifier_list'), + url(r'^(?P[0-9]+)/$', 'notifier_detail'), + url(r'^(?P[0-9]+)/test/$', 'notifier_test'), + url(r'^(?P[0-9]+)/notifications/$', 'notifier_notification_list'), +) + +notification_urls = patterns('awx.api.views', + url(r'^$', 'notification_list'), + url(r'^(?P[0-9]+)/$', 'notification_detail'), +) + schedule_urls = patterns('awx.api.views', url(r'^$', 'schedule_list'), url(r'^(?P[0-9]+)/$', 'schedule_detail'), @@ -273,6 +301,8 @@ v1_urls = patterns('awx.api.views', url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), url(r'^system_job_templates/', include(system_job_template_urls)), url(r'^system_jobs/', include(system_job_urls)), + url(r'^notifiers/', include(notifier_urls)), + url(r'^notifications/', include(notification_urls)), url(r'^unified_job_templates/$', 'unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index b08bbf7e58..a30ea1870b 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -42,9 +42,6 @@ from rest_framework import status from rest_framework_yaml.parsers import YAMLParser from rest_framework_yaml.renderers import YAMLRenderer -# MongoEngine -import mongoengine - # QSStats import qsstats @@ -56,12 +53,11 @@ from social.backends.utils import load_backends # AWX from awx.main.task_engine import TaskSerializer, TASK_FILE, TEMPORARY_TASK_FILE -from awx.main.tasks import mongodb_control +from awx.main.tasks import mongodb_control, send_notifications from awx.main.access import get_user_queryset from awx.main.ha import is_ha_environment from awx.api.authentication import TaskAuthentication, TokenGetAuthentication from awx.api.utils.decorators import paginated -from awx.api.filters import MongoFilterBackend from awx.api.generics import get_view_name from awx.api.generics import * # noqa from awx.api.license import feature_enabled, feature_exists, LicenseForbids @@ -70,7 +66,6 @@ from awx.main.utils import * # noqa from awx.api.permissions import * # noqa from awx.api.renderers import * # noqa from awx.api.serializers import * # noqa -from awx.fact.models import * # noqa from awx.main.utils import emit_websocket_notification from awx.main.conf import tower_settings @@ -137,6 +132,8 @@ class ApiV1RootView(APIView): data['schedules'] = reverse('api:schedule_list') data['roles'] = reverse('api:role_list') data['resources'] = reverse('api:resource_list') + data['notifiers'] = reverse('api:notifier_list') + data['notifications'] = reverse('api:notification_list') data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_jobs'] = reverse('api:unified_job_list') data['activity_stream'] = reverse('api:activity_stream_list') @@ -252,32 +249,12 @@ class ApiV1ConfigView(APIView): # FIX: Log return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST) - # Sanity check: If this license includes system tracking, make - # sure that we have a valid MongoDB to point to, and complain if - # we do not. - if ('features' in license_data and 'system_tracking' in license_data['features'] and - license_data['features']['system_tracking'] and settings.MONGO_HOST == NotImplemented): - return Response({ - 'error': 'This license supports system tracking, which ' - 'requires MongoDB to be installed. Since you are ' - 'running in an HA environment, you will need to ' - 'provide a MongoDB instance. Please re-run the ' - 'installer prior to installing this license.' - }, status=status.HTTP_400_BAD_REQUEST) - # If the license is valid, write it to disk. if license_data['valid_key']: tower_settings.LICENSE = data_actual - - # Spawn a task to ensure that MongoDB is started (or stopped) - # as appropriate, based on whether the license uses it. - if license_data['features']['system_tracking']: - mongodb_control.delay('start') - else: - mongodb_control.delay('stop') - - # Done; return the response. + tower_settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data) + return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): @@ -698,6 +675,35 @@ class OrganizationActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class OrganizationNotifiersList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers' + parent_key = 'organization' + +class OrganizationNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers_any' + +class OrganizationNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers_error' + +class OrganizationNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Organization + relationship = 'notifiers_success' + class TeamList(ListCreateAPIView): model = Team @@ -724,11 +730,8 @@ class TeamRolesList(SubListCreateAttachDetachAPIView): relationship='member_role.children' def get_queryset(self): - # XXX: This needs to be the intersection between - # what roles the user has and what roles the viewer - # has access to see. team = Team.objects.get(pk=self.kwargs['pk']) - return team.member_role.children + return team.member_role.children.filter(id__in=Role.visible_roles(self.request.user)) # XXX: Need to enforce permissions def post(self, request, *args, **kwargs): @@ -868,6 +871,26 @@ class ProjectActivityStreamList(SubListAPIView): return qs.filter(project=parent) return qs.filter(Q(project=parent) | Q(credential__in=parent.credential)) +class ProjectNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Project + relationship = 'notifiers_any' + +class ProjectNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Project + relationship = 'notifiers_error' + +class ProjectNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = Project + relationship = 'notifiers_success' class ProjectUpdatesList(SubListAPIView): @@ -918,6 +941,12 @@ class ProjectUpdateCancel(RetrieveAPIView): else: return self.http_method_not_allowed(request, *args, **kwargs) +class ProjectUpdateNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Project + relationship = 'notifications' class UserList(ListCreateAPIView): @@ -947,13 +976,11 @@ class UserRolesList(SubListCreateAttachDetachAPIView): serializer_class = RoleSerializer parent_model = User relationship='roles' + permission_classes = (IsAuthenticated,) def get_queryset(self): - # XXX: This needs to be the intersection between - # what roles the user has and what roles the viewer - # has access to see. - u = User.objects.get(pk=self.kwargs['pk']) - return u.roles + #u = User.objects.get(pk=self.kwargs['pk']) + return Role.visible_roles(self.request.user).filter(members__in=[int(self.kwargs['pk']), ]) def post(self, request, *args, **kwargs): # Forbid implicit role creation here @@ -963,6 +990,10 @@ class UserRolesList(SubListCreateAttachDetachAPIView): return Response(data, status=status.HTTP_400_BAD_REQUEST) return super(type(self), self).post(request, *args, **kwargs) + def check_parent_access(self, parent=None): + # We hide roles that shouldn't be seen in our queryset + return True + class UserProjectsList(SubListAPIView): @@ -1172,33 +1203,6 @@ class InventoryScanJobTemplateList(SubListAPIView): qs = self.request.user.get_queryset(self.model) return qs.filter(job_type=PERM_INVENTORY_SCAN, inventory=parent) -class InventorySingleFactView(MongoAPIView): - - model = Fact - parent_model = Inventory - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - inventory_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([h.name for h in inventory_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - - class HostList(ListCreateAPIView): model = Host @@ -1285,102 +1289,59 @@ class HostActivityStreamList(SubListAPIView): qs = self.request.user.get_queryset(self.model) return qs.filter(Q(host=parent) | Q(inventory=parent.inventory)) -class HostFactVersionsList(MongoListAPIView): +class SystemTrackingEnforcementMixin(APIView): + ''' + Use check_permissions instead of initial() because it's in the OPTION's path as well + ''' + def check_permissions(self, request): + if not feature_enabled("system_tracking"): + raise LicenseForbids("Your license does not permit use " + "of system tracking.") + return super(SystemTrackingEnforcementMixin, self).check_permissions(request) +class HostFactVersionsList(ListAPIView, ParentMixin, SystemTrackingEnforcementMixin): + + model = Fact serializer_class = FactVersionSerializer parent_model = Host new_in_220 = True - filter_backends = (MongoFilterBackend,) def get_queryset(self): from_spec = self.request.query_params.get('from', None) to_spec = self.request.query_params.get('to', None) module_spec = self.request.query_params.get('module', None) - if not feature_enabled("system_tracking"): - raise LicenseForbids("Your license does not permit use " - "of system tracking.") + if from_spec: + from_spec = dateutil.parser.parse(from_spec) + if to_spec: + to_spec = dateutil.parser.parse(to_spec) - host = self.get_parent_object() - self.check_parent_access(host) + host_obj = self.get_parent_object() - try: - fact_host = FactHost.objects.get(hostname=host.name, inventory_id=host.inventory.pk) - except FactHost.DoesNotExist: - return None - except mongoengine.ConnectionError: - return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST) - - kv = { - 'host': fact_host.id, - } - if module_spec is not None: - kv['module'] = module_spec - if from_spec is not None: - from_actual = dateutil.parser.parse(from_spec) - kv['timestamp__gt'] = from_actual - if to_spec is not None: - to_actual = dateutil.parser.parse(to_spec) - kv['timestamp__lte'] = to_actual - - return FactVersion.objects.filter(**kv).order_by("-timestamp") + return Fact.get_timeline(host_obj.id, module=module_spec, ts_from=from_spec, ts_to=to_spec) def list(self, *args, **kwargs): queryset = self.get_queryset() or [] - try: - serializer = FactVersionSerializer(queryset, many=True, context=dict(host_obj=self.get_parent_object())) - except mongoengine.ConnectionError: - return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST) - return Response(dict(results=serializer.data)) + return Response(dict(results=self.serializer_class(queryset, many=True).data)) -class HostSingleFactView(MongoAPIView): +class HostFactCompareView(SubDetailAPIView, SystemTrackingEnforcementMixin): model = Fact - parent_model = Host - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - host_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([host_obj.name], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - -class HostFactCompareView(MongoAPIView): - new_in_220 = True parent_model = Host serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') + def retrieve(self, request, *args, **kwargs): datetime_spec = request.query_params.get('datetime', None) module_spec = request.query_params.get('module', "ansible") datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() host_obj = self.get_parent_object() - fact_entry = Fact.get_host_version(host_obj.name, host_obj.inventory.pk, datetime_actual, module_spec) - host_data = FactSerializer(fact_entry).data if fact_entry is not None else {} - return Response(host_data) + fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual) + if not fact_entry: + return Response({'detail': 'Fact not found'}, status=status.HTTP_404_NOT_FOUND) + return Response(self.serializer_class(instance=fact_entry).data) class GroupList(ListCreateAPIView): @@ -1549,33 +1510,6 @@ class GroupDetail(RetrieveUpdateDestroyAPIView): obj.mark_inactive_recursive() return Response(status=status.HTTP_204_NO_CONTENT) - -class GroupSingleFactView(MongoAPIView): - - model = Fact - parent_model = Group - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - group_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([h.name for h in group_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - class InventoryGroupsList(SubListCreateAttachDetachAPIView): model = Group @@ -1803,6 +1737,27 @@ class InventorySourceActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class InventorySourceNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = InventorySource + relationship = 'notifiers_any' + +class InventorySourceNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = InventorySource + relationship = 'notifiers_error' + +class InventorySourceNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = InventorySource + relationship = 'notifiers_success' + class InventorySourceHostsList(SubListAPIView): model = Host @@ -1867,6 +1822,13 @@ class InventoryUpdateCancel(RetrieveAPIView): else: return self.http_method_not_allowed(request, *args, **kwargs) +class InventoryUpdateNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = InventoryUpdate + relationship = 'notifications' + class JobTemplateList(ListCreateAPIView): model = JobTemplate @@ -2036,6 +1998,27 @@ class JobTemplateActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class JobTemplateNotifiersAnyList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = JobTemplate + relationship = 'notifiers_any' + +class JobTemplateNotifiersErrorList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = JobTemplate + relationship = 'notifiers_error' + +class JobTemplateNotifiersSuccessList(SubListCreateAttachDetachAPIView): + + model = Notifier + serializer_class = NotifierSerializer + parent_model = JobTemplate + relationship = 'notifiers_success' + class JobTemplateCallback(GenericAPIView): model = JobTemplate @@ -2369,6 +2352,13 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView): headers = {'Location': new_job.get_absolute_url()} return Response(data, status=status.HTTP_201_CREATED, headers=headers) +class JobNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Job + relationship = 'notifications' + class BaseJobHostSummariesList(SubListAPIView): model = JobHostSummary @@ -3022,6 +3012,58 @@ class AdHocCommandStdout(UnifiedJobStdout): model = AdHocCommand new_in_220 = True +class NotifierList(ListCreateAPIView): + + model = Notifier + serializer_class = NotifierSerializer + new_in_300 = True + +class NotifierDetail(RetrieveUpdateDestroyAPIView): + + model = Notifier + serializer_class = NotifierSerializer + new_in_300 = True + +class NotifierTest(GenericAPIView): + + view_name = 'Notifier Test' + model = Notifier + serializer_class = EmptySerializer + new_in_300 = True + + def post(self, request, *args, **kwargs): + obj = self.get_object() + notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, tower_settings.TOWER_URL_BASE), + {"body": "Ansible Tower Test Notification {} {}".format(obj.id, tower_settings.TOWER_URL_BASE)}) + if not notification: + return Response({}, status=status.HTTP_400_BAD_REQUEST) + else: + send_notifications.delay([notification.id]) + headers = {'Location': notification.get_absolute_url()} + return Response({"notification": notification.id}, + headers=headers, + status=status.HTTP_202_ACCEPTED) + +class NotifierNotificationList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Notifier + relationship = 'notifications' + parent_key = 'notifier' + +class NotificationList(ListAPIView): + + model = Notification + serializer_class = NotificationSerializer + new_in_300 = True + +class NotificationDetail(RetrieveAPIView): + + model = Notification + serializer_class = NotificationSerializer + new_in_300 = True + class ActivityStreamList(SimpleListAPIView): model = ActivityStream @@ -3120,29 +3162,27 @@ class SettingsReset(APIView): TowerSettings.objects.filter(key=settings_key).delete() return Response(status=status.HTTP_204_NO_CONTENT) -#class RoleList(ListCreateAPIView): + class RoleList(ListAPIView): model = Role serializer_class = RoleSerializer + permission_classes = (IsAuthenticated,) new_in_300 = True - # XXX: Permissions - only roles the user has access to see should be listed here def get_queryset(self): - return Role.objects + if self.request.user.is_superuser: + return Role.objects + return Role.visible_roles(self.request.user) - # XXX: Need to define who can create custom roles, and then restrict access - # appropriately - # XXX: Need to define how we want to deal with administration of custom roles. -class RoleDetail(RetrieveUpdateAPIView): +class RoleDetail(RetrieveAPIView): model = Role serializer_class = RoleSerializer + permission_classes = (IsAuthenticated,) new_in_300 = True - # XXX: Permissions - only appropriate people should be able to change these - class RoleUsersList(SubListCreateAttachDetachAPIView): @@ -3150,6 +3190,8 @@ class RoleUsersList(SubListCreateAttachDetachAPIView): serializer_class = UserSerializer parent_model = Role relationship = 'members' + permission_classes = (IsAuthenticated,) + new_in_300 = True def get_queryset(self): # XXX: Access control @@ -3171,6 +3213,8 @@ class RoleTeamsList(ListAPIView): serializer_class = TeamSerializer parent_model = Role relationship = 'member_role.parents' + permission_classes = (IsAuthenticated,) + new_in_300 = True def get_queryset(self): # TODO: Check @@ -3201,6 +3245,8 @@ class RoleParentsList(SubListAPIView): serializer_class = RoleSerializer parent_model = Role relationship = 'parents' + permission_classes = (IsAuthenticated,) + new_in_300 = True def get_queryset(self): # XXX: This should be the intersection between the roles of the user @@ -3214,6 +3260,8 @@ class RoleChildrenList(SubListAPIView): serializer_class = RoleSerializer parent_model = Role relationship = 'children' + permission_classes = (IsAuthenticated,) + new_in_300 = True def get_queryset(self): # XXX: This should be the intersection between the roles of the user @@ -3225,6 +3273,7 @@ class ResourceDetail(RetrieveAPIView): model = Resource serializer_class = ResourceSerializer + permission_classes = (IsAuthenticated,) new_in_300 = True # XXX: Permissions - only roles the user has access to see should be listed here @@ -3235,6 +3284,7 @@ class ResourceList(ListAPIView): model = Resource serializer_class = ResourceSerializer + permission_classes = (IsAuthenticated,) new_in_300 = True def get_queryset(self): @@ -3244,6 +3294,7 @@ class ResourceAccessList(ListAPIView): model = User serializer_class = ResourceAccessListElementSerializer + permission_classes = (IsAuthenticated,) new_in_300 = True def get_queryset(self): diff --git a/awx/fact/utils/connection.py b/awx/fact/utils/connection.py deleted file mode 100644 index 4c4019e24d..0000000000 --- a/awx/fact/utils/connection.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved. - -from django.conf import settings -from mongoengine import connect -from mongoengine.connection import ConnectionError -from pymongo.errors import AutoReconnect - -def test_mongo_connection(): - # Connect to Mongo - try: - # Sanity check: If we have intentionally invalid settings, then we - # know we cannot connect. - if settings.MONGO_HOST == NotImplemented: - raise ConnectionError - - # Attempt to connect to the MongoDB database. - db = connect(settings.MONGO_DB, - host=settings.MONGO_HOST, - port=int(settings.MONGO_PORT), - username=settings.MONGO_USERNAME, - password=settings.MONGO_PASSWORD, - tz_aware=settings.USE_TZ) - db[settings.MONGO_DB].command('ping') - return True - except (ConnectionError, AutoReconnect): - return False - diff --git a/awx/main/access.py b/awx/main/access.py index d3fd865de2..092652acc2 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1291,6 +1291,31 @@ class ScheduleAccess(BaseAccess): else: return False +class NotifierAccess(BaseAccess): + ''' + I can see/use a notifier if I have permission to + ''' + model = Notifier + + def get_queryset(self): + qs = self.model.objects.filter(active=True).distinct() + if self.user.is_superuser: + return qs + return qs + +class NotificationAccess(BaseAccess): + ''' + I can see/use a notification if I have permission to + ''' + model = Notification + + def get_queryset(self): + qs = self.model.objects.distinct() + if self.user.is_superuser: + return qs + return qs + + class ActivityStreamAccess(BaseAccess): ''' I can see activity stream events only when I have permission on all objects included in the event @@ -1475,23 +1500,31 @@ class RoleAccess(BaseAccess): def get_queryset(self): if self.user.is_superuser: return self.model.objects.all() - return self.model.objects.none() + return self.model.visible_roles(self.user) def can_change(self, obj, data): return self.user.is_superuser def can_add(self, obj, data): - return self.user.is_superuser + # Unsupported for now + return False def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False): - return self.user.is_superuser + return self.can_unattach(obj, sub_obj, relationship) def can_unattach(self, obj, sub_obj, relationship): - return self.user.is_superuser + if self.user.is_superuser: + return True + if obj.object_id and \ + isinstance(obj.content_object, ResourceMixin) and \ + obj.content_object.accessible_by(self.user, {'write': True}): + return True + return False def can_delete(self, obj): - return self.user.is_superuser + # Unsupported for now + return False class ResourceAccess(BaseAccess): @@ -1550,3 +1583,5 @@ register_access(CustomInventoryScript, CustomInventoryScriptAccess) register_access(TowerSettings, TowerSettingsAccess) register_access(Role, RoleAccess) register_access(Resource, ResourceAccess) +register_access(Notifier, NotifierAccess) +register_access(Notification, NotificationAccess) diff --git a/awx/main/fields.py b/awx/main/fields.py index 1db59e296f..b3efcd20e6 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -134,8 +134,9 @@ def resolve_role_field(obj, field): class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor): """Descriptor Implict Role Fields. Auto-creates the appropriate role entry on first access""" - def __init__(self, role_name, permissions, parent_role, *args, **kwargs): + def __init__(self, role_name, role_description, permissions, parent_role, *args, **kwargs): self.role_name = role_name + self.role_description = role_description if role_description else "" self.permissions = permissions self.parent_role = parent_role @@ -152,7 +153,7 @@ class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor): if connection.needs_rollback: raise TransactionManagementError('Current transaction has failed, cannot create implicit role') - role = Role.objects.create(name=self.role_name, content_object=instance) + role = Role.objects.create(name=self.role_name, description=self.role_description, content_object=instance) if self.parent_role: # Add all non-null parent roles as parents @@ -195,8 +196,9 @@ class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor): class ImplicitRoleField(models.ForeignKey): """Implicitly creates a role entry for a resource""" - def __init__(self, role_name=None, permissions=None, parent_role=None, *args, **kwargs): + def __init__(self, role_name=None, role_description=None, permissions=None, parent_role=None, *args, **kwargs): self.role_name = role_name + self.role_description = role_description self.permissions = permissions self.parent_role = parent_role @@ -211,6 +213,7 @@ class ImplicitRoleField(models.ForeignKey): self.name, ImplicitRoleDescriptor( self.role_name, + self.role_description, self.permissions, self.parent_role, self diff --git a/awx/main/management/commands/cleanup_facts.py b/awx/main/management/commands/cleanup_facts.py index 11d5d88996..578bee3441 100644 --- a/awx/main/management/commands/cleanup_facts.py +++ b/awx/main/management/commands/cleanup_facts.py @@ -12,7 +12,7 @@ from django.db import transaction from django.utils.timezone import now # AWX -from awx.fact.models.fact import * # noqa +from awx.main.models.fact import Fact from awx.api.license import feature_enabled OLDER_THAN = 'older_than' @@ -31,7 +31,7 @@ class CleanupFacts(object): # pivot -= granularity # group by host def cleanup(self, older_than_abs, granularity, module=None): - fact_oldest = FactVersion.objects.all().order_by('timestamp').first() + fact_oldest = Fact.objects.all().order_by('timestamp').first() if not fact_oldest: return 0 @@ -44,7 +44,10 @@ class CleanupFacts(object): # Special case, granularity=0x where x is d, w, or y # The intent is to delete all facts < older_than_abs if granularity == relativedelta(): - return FactVersion.objects.filter(**kv).order_by('-timestamp').delete() + qs = Fact.objects.filter(**kv) + count = qs.count() + qs.delete() + return count total = 0 @@ -61,18 +64,17 @@ class CleanupFacts(object): kv['module'] = module - fact_version_objs = FactVersion.objects.filter(**kv).order_by('-timestamp').limit(1) - if fact_version_objs: - fact_version_obj = fact_version_objs[0] + fact_version_obj = Fact.objects.filter(**kv).order_by('-timestamp').first() + if fact_version_obj: kv = { 'timestamp__lt': fact_version_obj.timestamp, 'timestamp__gt': date_pivot_next } if module: kv['module'] = module - count = FactVersion.objects.filter(**kv).delete() - # FIXME: These two deletes should be a transaction - count = Fact.objects.filter(**kv).delete() + qs = Fact.objects.filter(**kv) + count = qs.count() + qs.delete() total += count date_pivot = date_pivot_next diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index aa3abe1bfd..062cd39693 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -9,9 +9,11 @@ from datetime import datetime # Django from django.core.management.base import NoArgsCommand from django.conf import settings +#from django.core.exceptions import Does # AWX -from awx.fact.models.fact import * # noqa +from awx.main.models.fact import Fact +from awx.main.models.inventory import Host from awx.main.socket import Socket logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver') @@ -47,35 +49,34 @@ class FactCacheReceiver(object): # ansible v2 will not emit this message. Thus, this can be removed at that time. if 'module_setup' in facts_data and len(facts_data) == 1: logger.info('Received module_setup message') - return + return None try: - host = FactHost.objects.get(hostname=hostname, inventory_id=inventory_id) - except FactHost.DoesNotExist: - logger.info('Creating new host <%s, %s>' % (hostname, inventory_id)) - host = FactHost(hostname=hostname, inventory_id=inventory_id) - host.save() - logger.info('Created new host <%s>' % (host.id)) - except FactHost.MultipleObjectsReturned: - query = "db['fact_host'].find(hostname=%s, inventory_id=%s)" % (hostname, inventory_id) - logger.warn('Database inconsistent. Multiple FactHost "%s" exist. Try the query %s to find the records.' % (hostname, query)) + host_obj = Host.objects.get(name=hostname, inventory__id=inventory_id) + except Fact.DoesNotExist: + logger.warn('Failed to intake fact. Host does not exist <%s, %s>' % (hostname, inventory_id)) return + except Fact.MultipleObjectsReturned: + logger.warn('Database inconsistent. Multiple Hosts found for <%s, %s>.' % (hostname, inventory_id)) + return None except Exception, e: logger.error("Exception communicating with Fact Cache Database: %s" % str(e)) - return + return None - (module, facts) = self.process_facts(facts_data) + (module_name, facts) = self.process_facts(facts_data) self.timestamp = datetime.fromtimestamp(date_key, None) - try: - # Update existing Fact entry - version_obj = FactVersion.objects.get(timestamp=self.timestamp, host=host, module=module) - Fact.objects(id=version_obj.fact.id).update_one(fact=facts) - logger.info('Updated existing fact <%s>' % (version_obj.fact.id)) - except FactVersion.DoesNotExist: + # Update existing Fact entry + fact_obj = Fact.objects.filter(host__id=host_obj.id, module=module_name, timestamp=self.timestamp) + if fact_obj: + fact_obj.facts = facts + fact_obj.save() + logger.info('Updated existing fact <%s>' % (fact_obj.id)) + else: # Create new Fact entry - (fact_obj, version_obj) = Fact.add_fact(self.timestamp, facts, host, module) - logger.info('Created new fact <%s, %s>' % (fact_obj.id, version_obj.id)) + fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts) + logger.info('Created new fact <%s, %s>' % (fact_obj.id, module_name)) + return fact_obj def run_receiver(self, use_processing_threads=True): with Socket('fact_cache', 'r') as facts: diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index d49dbf1669..5b5dd3bff0 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -15,7 +15,7 @@ from django.core.management.base import NoArgsCommand # AWX from awx.main.models import * # noqa from awx.main.queue import FifoQueue -from awx.main.tasks import handle_work_error +from awx.main.tasks import handle_work_error, handle_work_success from awx.main.utils import get_system_task_capacity # Celery @@ -265,14 +265,15 @@ def process_graph(graph, task_capacity): [{'type': graph.get_node_type(n['node_object']), 'id': n['node_object'].id} for n in node_dependencies] error_handler = handle_work_error.s(subtasks=dependent_nodes) - start_status = node_obj.start(error_callback=error_handler) + success_handler = handle_work_success.s(task_actual={'type': graph.get_node_type(node_obj), + 'id': node_obj.id}) + start_status = node_obj.start(error_callback=error_handler, success_callback=success_handler) if not start_status: node_obj.status = 'failed' if node_obj.job_explanation: node_obj.job_explanation += ' ' node_obj.job_explanation += 'Task failed pre-start check.' node_obj.save() - # TODO: Run error handler continue remaining_volume -= impact running_impact += impact diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py index 79e8d8d6dd..6d2c78e454 100644 --- a/awx/main/migrations/0001_initial.py +++ b/awx/main/migrations/0001_initial.py @@ -43,7 +43,7 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('host_name', models.CharField(default=b'', max_length=1024, editable=False)), - ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable')])), + ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])), ('event_data', jsonfield.fields.JSONField(default={}, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), diff --git a/awx/main/migrations/0003_v300_changes.py b/awx/main/migrations/0003_v300_changes.py new file mode 100644 index 0000000000..83b8b4b3ab --- /dev/null +++ b/awx/main/migrations/0003_v300_changes.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonfield.fields +import django.db.models.deletion +from django.conf import settings +import taggit.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0002_auto_20150616_2121'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('main', '0002_v300_changes'), + ] + + operations = [ + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])), + ('error', models.TextField(default=b'', editable=False, blank=True)), + ('notifications_sent', models.IntegerField(default=0, editable=False)), + ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])), + ('recipients', models.TextField(default=b'', editable=False, blank=True)), + ('subject', models.TextField(default=b'', editable=False, blank=True)), + ('body', jsonfield.fields.JSONField(default=dict, blank=True)), + ], + options={ + 'ordering': ('pk',), + }, + ), + migrations.CreateModel( + name='Notifier', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('description', models.TextField(default=b'', blank=True)), + ('active', models.BooleanField(default=True, editable=False)), + ('name', models.CharField(unique=True, max_length=512)), + ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])), + ('notification_configuration', jsonfield.fields.JSONField(default=dict)), + ('created_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), + ('modified_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), + ('organization', models.ForeignKey(related_name='notifiers', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)), + ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')), + ], + ), + migrations.AddField( + model_name='notification', + name='notifier', + field=models.ForeignKey(related_name='notifications', editable=False, to='main.Notifier'), + ), + migrations.AddField( + model_name='activitystream', + name='notification', + field=models.ManyToManyField(to='main.Notification', blank=True), + ), + migrations.AddField( + model_name='activitystream', + name='notifier', + field=models.ManyToManyField(to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_any', + field=models.ManyToManyField(related_name='organization_notifiers_for_any', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_error', + field=models.ManyToManyField(related_name='organization_notifiers_for_errors', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_success', + field=models.ManyToManyField(related_name='organization_notifiers_for_success', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjob', + name='notifications', + field=models.ManyToManyField(related_name='unifiedjob_notifications', editable=False, to='main.Notification'), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_any', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_any', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_error', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_errors', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_success', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_success', to='main.Notifier', blank=True), + ), + ] diff --git a/awx/main/migrations/0004_v300_changes.py b/awx/main/migrations/0004_v300_changes.py new file mode 100644 index 0000000000..66e523dc78 --- /dev/null +++ b/awx/main/migrations/0004_v300_changes.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonbfield.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0003_v300_changes'), + ] + + operations = [ + migrations.CreateModel( + name='Fact', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('timestamp', models.DateTimeField(default=None, help_text='Date and time of the corresponding fact scan gathering time.', editable=False)), + ('module', models.CharField(max_length=128)), + ('facts', jsonbfield.fields.JSONField(default={}, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True)), + ('host', models.ForeignKey(related_name='facts', to='main.Host', help_text='Host for the facts that the fact scan captured.')), + ], + ), + migrations.AlterIndexTogether( + name='fact', + index_together=set([('timestamp', 'module', 'host')]), + ), + ] diff --git a/awx/main/migrations/0005_v300_active_field_changes.py b/awx/main/migrations/0005_v300_active_field_changes.py new file mode 100644 index 0000000000..d7582fc5fb --- /dev/null +++ b/awx/main/migrations/0005_v300_active_field_changes.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from awx.main.migrations import _rbac as rbac +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0004_v300_changes'), + ] + + operations = [ + # This is a placeholder for our future active flag removal work + ] diff --git a/awx/main/migrations/0003_rbac_changes.py b/awx/main/migrations/0006_v300_rbac_changes.py similarity index 99% rename from awx/main/migrations/0003_rbac_changes.py rename to awx/main/migrations/0006_v300_rbac_changes.py index 59468e2325..e85421573f 100644 --- a/awx/main/migrations/0003_rbac_changes.py +++ b/awx/main/migrations/0006_v300_rbac_changes.py @@ -14,7 +14,7 @@ class Migration(migrations.Migration): ('taggit', '0002_auto_20150616_2121'), ('contenttypes', '0002_remove_content_type_name'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('main', '0002_v300_changes'), + ('main', '0005_v300_active_field_changes'), ] operations = [ diff --git a/awx/main/migrations/0004_rbac_migrations.py b/awx/main/migrations/0007_v300_rbac_migrations.py similarity index 92% rename from awx/main/migrations/0004_rbac_migrations.py rename to awx/main/migrations/0007_v300_rbac_migrations.py index 62b90a6783..d50069ab48 100644 --- a/awx/main/migrations/0004_rbac_migrations.py +++ b/awx/main/migrations/0007_v300_rbac_migrations.py @@ -8,7 +8,7 @@ from django.db import migrations class Migration(migrations.Migration): dependencies = [ - ('main', '0003_rbac_changes'), + ('main', '0006_v300_rbac_changes'), ] operations = [ diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 41b866f78c..41131f481e 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -20,6 +20,8 @@ from awx.main.models.configuration import * # noqa from awx.main.models.rbac import * # noqa from awx.main.models.user import * # noqa from awx.main.models.mixins import * # noqa +from awx.main.models.notifications import * # noqa +from awx.main.models.fact import * # noqa # Monkeypatch Django serializer to ignore django-taggit fields (which break # the dumpdata command; see https://github.com/alex/django-taggit/issues/155). @@ -62,3 +64,5 @@ activity_stream_registrar.connect(AdHocCommand) activity_stream_registrar.connect(Schedule) activity_stream_registrar.connect(CustomInventoryScript) activity_stream_registrar.connect(TowerSettings) +activity_stream_registrar.connect(Notifier) +activity_stream_registrar.connect(Notification) diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index b695831ada..dfada31484 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -53,6 +53,8 @@ class ActivityStream(models.Model): ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) schedule = models.ManyToManyField("Schedule", blank=True) custom_inventory_script = models.ManyToManyField("CustomInventoryScript", blank=True) + notifier = models.ManyToManyField("Notifier", blank=True) + notification = models.ManyToManyField("Notification", blank=True) def get_absolute_url(self): return reverse('api:activity_stream_detail', args=(self.pk,)) diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 664269a188..c5ab627046 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -5,6 +5,7 @@ import hmac import json import logging +from urlparse import urljoin # Django from django.conf import settings @@ -139,6 +140,9 @@ class AdHocCommand(UnifiedJob): def get_absolute_url(self): return reverse('api:ad_hoc_command_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/ad_hoc_commands/{}".format(self.pk)) + @property def task_auth_token(self): '''Return temporary auth token used for task requests via API.''' @@ -221,8 +225,9 @@ class AdHocCommandEvent(CreatedModifiedModel): ('runner_on_unreachable', _('Host Unreachable'), True), # Tower won't see no_hosts (check is done earlier without callback). #('runner_on_no_hosts', _('No Hosts Matched'), False), - # Tower should probably never see skipped (no conditionals). - #('runner_on_skipped', _('Host Skipped'), False), + # Tower will see skipped (when running in check mode for a module that + # does not support check mode). + ('runner_on_skipped', _('Host Skipped'), False), # Tower does not support async for ad hoc commands. #('runner_on_async_poll', _('Host Polling'), False), #('runner_on_async_ok', _('Host Async OK'), False), diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 61515d7d18..c4edfbd8ba 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -25,7 +25,7 @@ from awx.main.utils import encrypt_field __all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel', 'PasswordFieldsModel', 'PrimordialModel', 'CommonModel', - 'CommonModelNameNotUnique', + 'CommonModelNameNotUnique', 'NotificationFieldsModel', 'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ', 'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN', 'PERM_INVENTORY_CHECK', 'PERM_JOBTEMPLATE_CREATE', 'JOB_TYPE_CHOICES', @@ -337,3 +337,26 @@ class CommonModelNameNotUnique(PrimordialModel): max_length=512, unique=False, ) + +class NotificationFieldsModel(BaseModel): + + class Meta: + abstract = True + + notifiers_error = models.ManyToManyField( + "Notifier", + blank=True, + related_name='%(class)s_notifiers_for_errors' + ) + + notifiers_success = models.ManyToManyField( + "Notifier", + blank=True, + related_name='%(class)s_notifiers_for_success' + ) + + notifiers_any = models.ManyToManyField( + "Notifier", + blank=True, + related_name='%(class)s_notifiers_for_any' + ) diff --git a/awx/main/models/credential.py b/awx/main/models/credential.py index cf2dd262ed..ec47cb1fbb 100644 --- a/awx/main/models/credential.py +++ b/awx/main/models/credential.py @@ -157,11 +157,13 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): ) owner_role = ImplicitRoleField( role_name='Credential Owner', + role_description='Owner of the credential', parent_role='team.admin_role', permissions = {'all': True} ) usage_role = ImplicitRoleField( role_name='Credential User', + role_description='May use this credential, but not read sensitive portions or modify it', parent_role= 'team.member_role', permissions = {'use': True} ) diff --git a/awx/main/models/fact.py b/awx/main/models/fact.py new file mode 100644 index 0000000000..16a67eb45e --- /dev/null +++ b/awx/main/models/fact.py @@ -0,0 +1,64 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from jsonbfield.fields import JSONField + +__all__ = ('Fact', ) + +class Fact(models.Model): + """A model representing a fact returned from Ansible. + Facts are stored as JSON dictionaries. + """ + host = models.ForeignKey( + 'Host', + related_name='facts', + db_index=True, + on_delete=models.CASCADE, + help_text=_('Host for the facts that the fact scan captured.'), + ) + timestamp = models.DateTimeField( + default=None, + editable=False, + help_text=_('Date and time of the corresponding fact scan gathering time.') + ) + module = models.CharField(max_length=128) + facts = JSONField(blank=True, default={}, help_text=_('Arbitrary JSON structure of module facts captured at timestamp for a single host.')) + + class Meta: + app_label = 'main' + index_together = [ + ["timestamp", "module", "host"], + ] + + @staticmethod + def get_host_fact(host_id, module, timestamp): + qs = Fact.objects.filter(host__id=host_id, module=module, timestamp__lte=timestamp).order_by('-timestamp') + if qs: + return qs[0] + else: + return None + + @staticmethod + def get_timeline(host_id, module=None, ts_from=None, ts_to=None): + kwargs = { + 'host__id': host_id, + } + if module: + kwargs['module'] = module + if ts_from and ts_to and ts_from == ts_to: + kwargs['timestamp'] = ts_from + else: + if ts_from: + kwargs['timestamp__gt'] = ts_from + if ts_to: + kwargs['timestamp__lte'] = ts_to + return Fact.objects.filter(**kwargs).order_by('-timestamp').only('timestamp', 'module').order_by('-timestamp', 'module') + + @staticmethod + def add_fact(host_id, module, timestamp, facts): + fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts) + fact_obj.save() + return fact_obj diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 17b51ca923..32175b19d9 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -6,6 +6,7 @@ import datetime import logging import re import copy +from urlparse import urljoin # Django from django.conf import settings @@ -24,7 +25,9 @@ from awx.main.models.base import * # noqa from awx.main.models.jobs import Job from awx.main.models.unified_jobs import * # noqa from awx.main.models.mixins import ResourceMixin +from awx.main.models.notifications import Notifier from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates +from awx.main.conf import tower_settings __all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript'] @@ -95,19 +98,23 @@ class Inventory(CommonModel, ResourceMixin): ) admin_role = ImplicitRoleField( role_name='Inventory Administrator', + role_description='May manage this inventory', parent_role='organization.admin_role', permissions = {'all': True} ) auditor_role = ImplicitRoleField( role_name='Inventory Auditor', + role_description='May view but not modify this inventory', parent_role='organization.auditor_role', permissions = {'read': True} ) updater_role = ImplicitRoleField( role_name='Inventory Updater', + role_description='May update the inventory', ) executor_role = ImplicitRoleField( role_name='Inventory Executor', + role_description='May execute jobs against this inventory', ) def get_absolute_url(self): @@ -1217,6 +1224,14 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin) return True return False + @property + def notifiers(self): + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors=self.inventory.organization)) + success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success=self.inventory.organization)) + any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any=self.inventory.organization)) + return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) + def clean_source(self): source = self.source if source and self.group: @@ -1276,6 +1291,9 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions): def get_absolute_url(self): return reverse('api:inventory_update_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/inventory_sync/{}".format(self.pk)) + def is_blocked_by(self, obj): if type(obj) == InventoryUpdate: if self.inventory_source.inventory == obj.inventory_source.inventory: diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 17ab41dbe4..ba0170bf69 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -6,6 +6,7 @@ import hmac import json import yaml import logging +from urlparse import urljoin # Django from django.conf import settings @@ -22,6 +23,7 @@ from jsonfield import JSONField from awx.main.constants import CLOUD_PROVIDERS from awx.main.models.base import * # noqa from awx.main.models.unified_jobs import * # noqa +from awx.main.models.notifications import Notifier from awx.main.utils import decrypt_field, ignore_inventory_computed_fields from awx.main.utils import emit_websocket_notification from awx.main.redact import PlainTextCleaner @@ -183,16 +185,19 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin): ) admin_role = ImplicitRoleField( role_name='Job Template Administrator', + role_description='Full access to all settings', parent_role='project.admin_role', permissions = {'all': True} ) auditor_role = ImplicitRoleField( role_name='Job Template Auditor', + role_description='Read-only access to all settings', parent_role='project.auditor_role', permissions = {'read': True} ) executor_role = ImplicitRoleField( - role_name='Job Template Executor', + role_name='Job Template Runner', + role_description='May run the job template', permissions = {'read': True, 'execute': True} ) @@ -347,6 +352,20 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin): def _can_update(self): return self.can_start_without_user_input() + @property + def notifiers(self): + # Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type + # TODO: Currently there is no org fk on project so this will need to be added once that is + # available after the rbac pr + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project])) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project])) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project])) + # Get Organization Notifiers + error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.project.organizations.all()))) + success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.project.organizations.all()))) + any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.project.organizations.all()))) + return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers)) class Job(UnifiedJob, JobOptions): ''' @@ -386,6 +405,9 @@ class Job(UnifiedJob, JobOptions): def get_absolute_url(self): return reverse('api:job_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/jobs/{}".format(self.pk)) + @property def task_auth_token(self): '''Return temporary auth token used for task requests via API.''' @@ -502,6 +524,26 @@ class Job(UnifiedJob, JobOptions): dependencies.append(source.create_inventory_update(launch_type='dependency')) return dependencies + def notification_data(self): + data = super(Job, self).notification_data() + all_hosts = {} + for h in self.job_host_summaries.all(): + all_hosts[h.host.name] = dict(failed=h.failed, + changed=h.changed, + dark=h.dark, + failures=h.failures, + ok=h.ok, + processed=h.processed, + skipped=h.skipped) + data.update(dict(inventory=self.inventory.name, + project=self.project.name, + playbook=self.playbook, + credential=self.credential.name, + limit=self.limit, + extra_vars=self.extra_vars, + hosts=all_hosts)) + return data + def handle_extra_data(self, extra_data): extra_vars = {} if isinstance(extra_data, dict): @@ -1082,6 +1124,9 @@ class SystemJob(UnifiedJob, SystemJobOptions): def get_absolute_url(self): return reverse('api:system_job_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/management_jobs/{}".format(self.pk)) + def is_blocked_by(self, obj): return True diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py new file mode 100644 index 0000000000..29a51cf9ac --- /dev/null +++ b/awx/main/models/notifications.py @@ -0,0 +1,172 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +from django.db import models +from django.core.urlresolvers import reverse +from django.core.mail.message import EmailMessage +from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str + +from awx.main.models.base import * # noqa +from awx.main.utils import encrypt_field, decrypt_field +from awx.main.notifications.email_backend import CustomEmailBackend +from awx.main.notifications.slack_backend import SlackBackend +from awx.main.notifications.twilio_backend import TwilioBackend +from awx.main.notifications.pagerduty_backend import PagerDutyBackend +from awx.main.notifications.hipchat_backend import HipChatBackend +from awx.main.notifications.webhook_backend import WebhookBackend +from awx.main.notifications.irc_backend import IrcBackend + +# Django-JSONField +from jsonfield import JSONField + +logger = logging.getLogger('awx.main.models.notifications') + +__all__ = ['Notifier', 'Notification'] + +class Notifier(CommonModel): + + NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend), + ('slack', _('Slack'), SlackBackend), + ('twilio', _('Twilio'), TwilioBackend), + ('pagerduty', _('Pagerduty'), PagerDutyBackend), + ('hipchat', _('HipChat'), HipChatBackend), + ('webhook', _('Webhook'), WebhookBackend), + ('irc', _('IRC'), IrcBackend)] + NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES] + CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES]) + + class Meta: + app_label = 'main' + + organization = models.ForeignKey( + 'Organization', + blank=False, + null=True, + on_delete=models.SET_NULL, + related_name='notifiers', + ) + + notification_type = models.CharField( + max_length = 32, + choices=NOTIFICATION_TYPE_CHOICES, + ) + + notification_configuration = JSONField(blank=False) + + def get_absolute_url(self): + return reverse('api:notifier_detail', args=(self.pk,)) + + @property + def notification_class(self): + return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] + + def save(self, *args, **kwargs): + new_instance = not bool(self.pk) + update_fields = kwargs.get('update_fields', []) + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + if new_instance: + value = self.notification_configuration[field] + setattr(self, '_saved_{}_{}'.format("config", field), value) + self.notification_configuration[field] = '' + else: + encrypted = encrypt_field(self, 'notification_configuration', subfield=field) + self.notification_configuration[field] = encrypted + if 'notification_configuration' not in update_fields: + update_fields.append('notification_configuration') + super(Notifier, self).save(*args, **kwargs) + if new_instance: + update_fields = [] + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '') + self.notification_configuration[field] = saved_value + #setattr(self.notification_configuration, field, saved_value) + if 'notification_configuration' not in update_fields: + update_fields.append('notification_configuration') + self.save(update_fields=update_fields) + + @property + def recipients(self): + return self.notification_configuration[self.notification_class.recipient_parameter] + + def generate_notification(self, subject, message): + notification = Notification(notifier=self, + notification_type=self.notification_type, + recipients=smart_str(self.recipients), + subject=subject, + body=message) + notification.save() + return notification + + def send(self, subject, body): + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + self.notification_configuration[field] = decrypt_field(self, + 'notification_configuration', + subfield=field) + recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter) + if not isinstance(recipients, list): + recipients = [recipients] + sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None) + backend_obj = self.notification_class(**self.notification_configuration) + notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients) + return backend_obj.send_messages([notification_obj]) + +class Notification(CreatedModifiedModel): + ''' + A notification event emitted when a Notifier is run + ''' + + NOTIFICATION_STATE_CHOICES = [ + ('pending', _('Pending')), + ('successful', _('Successful')), + ('failed', _('Failed')), + ] + + class Meta: + app_label = 'main' + ordering = ('pk',) + + notifier = models.ForeignKey( + 'Notifier', + related_name='notifications', + on_delete=models.CASCADE, + editable=False + ) + status = models.CharField( + max_length=20, + choices=NOTIFICATION_STATE_CHOICES, + default='pending', + editable=False, + ) + error = models.TextField( + blank=True, + default='', + editable=False, + ) + notifications_sent = models.IntegerField( + default=0, + editable=False, + ) + notification_type = models.CharField( + max_length = 32, + choices=Notifier.NOTIFICATION_TYPE_CHOICES, + ) + recipients = models.TextField( + blank=True, + default='', + editable=False, + ) + subject = models.TextField( + blank=True, + default='', + editable=False, + ) + body = JSONField(blank=True) + + def get_absolute_url(self): + return reverse('api:notification_detail', args=(self.pk,)) diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index 9709c2d3d0..d1e7ae3c86 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -29,7 +29,7 @@ from awx.main.conf import tower_settings __all__ = ['Organization', 'Team', 'Permission', 'Profile', 'AuthToken'] -class Organization(CommonModel, ResourceMixin): +class Organization(CommonModel, NotificationFieldsModel, ResourceMixin): ''' An organization is the basic unit of multi-tenancy divisions ''' @@ -55,16 +55,20 @@ class Organization(CommonModel, ResourceMixin): ) admin_role = ImplicitRoleField( role_name='Organization Administrator', + role_description='May manage all aspects of this organization', parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, permissions = ALL_PERMISSIONS, ) auditor_role = ImplicitRoleField( role_name='Organization Auditor', + role_description='May read all settings associated with this organization', parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, permissions = {'read': True} ) member_role = ImplicitRoleField( role_name='Organization Member', + role_description='A member of this organization', + parent_role='admin_role', permissions = {'read': True} ) @@ -111,16 +115,19 @@ class Team(CommonModelNameNotUnique, ResourceMixin): ) admin_role = ImplicitRoleField( role_name='Team Administrator', + role_description='May manage this team', parent_role='organization.admin_role', permissions = ALL_PERMISSIONS, ) auditor_role = ImplicitRoleField( role_name='Team Auditor', + role_description='May read all settings associated with this team', parent_role='organization.auditor_role', permissions = {'read': True} ) member_role = ImplicitRoleField( role_name='Team Member', + role_description='A member of this team', parent_role='admin_role', permissions = {'read':True}, ) diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index d0fd122584..4bb66c24d6 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -20,10 +20,12 @@ from django.utils.timezone import now, make_aware, get_default_timezone from awx.lib.compat import slugify from awx.main.models.base import * # noqa from awx.main.models.jobs import Job +from awx.main.models.notifications import Notifier from awx.main.models.unified_jobs import * # noqa from awx.main.models.mixins import ResourceMixin from awx.main.utils import update_scm_url from awx.main.fields import ImplicitRoleField +from awx.main.conf import tower_settings __all__ = ['Project', 'ProjectUpdate'] @@ -209,20 +211,24 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin): ) admin_role = ImplicitRoleField( role_name='Project Administrator', + role_description='May manage this project', parent_role='organizations.admin_role', permissions = {'all': True} ) auditor_role = ImplicitRoleField( role_name='Project Auditor', + role_description='May read all settings associated with this project', parent_role='organizations.auditor_role', permissions = {'read': True} ) member_role = ImplicitRoleField( role_name='Project Member', + role_description='Implies membership within this project', permissions = {'read': True} ) scm_update_role = ImplicitRoleField( role_name='Project Updater', + role_description='May update this project from the source control management system', parent_role='admin_role', permissions = {'scm_update': True} ) @@ -330,6 +336,18 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin): return True return False + @property + def notifiers(self): + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self)) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self)) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self)) + # Get Organization Notifiers + error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.organizations.all()))) + success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.organizations.all()))) + any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.organizations.all()))) + return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers)) + def get_absolute_url(self): return reverse('api:project_detail', args=(self.pk,)) @@ -391,6 +409,9 @@ class ProjectUpdate(UnifiedJob, ProjectOptions): def get_absolute_url(self): return reverse('api:project_update_detail', args=(self.pk,)) + def get_ui_url(self): + return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/scm_update/{}".format(self.pk)) + def _update_parent_instance(self): parent_instance = self._get_parent_instance() if parent_instance: diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index b7b6d50f3e..d9b81c5d42 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -6,6 +6,7 @@ import logging # Django from django.db import models +from django.db.models import Q from django.db.models.aggregates import Max from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ @@ -128,6 +129,10 @@ class Role(CommonModelNameNotUnique): setattr(permission, k, int(permissions[k])) permission.save() + @staticmethod + def visible_roles(user): + return Role.objects.filter(Q(descendents__in=user.roles.filter()) | Q(ancestors__in=user.roles.filter())) + @staticmethod def singleton(name): try: diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 9b4be868c3..3750ccf41e 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -17,6 +17,7 @@ from django.db import models from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from django.utils.timezone import now +from django.utils.encoding import smart_text # Django-JSONField from jsonfield import JSONField @@ -40,7 +41,7 @@ logger = logging.getLogger('awx.main.models.unified_jobs') CAN_CANCEL = ('new', 'pending', 'waiting', 'running') -class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique): +class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, NotificationFieldsModel): ''' Concrete base class for unified job templates. ''' @@ -297,6 +298,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique): ''' return kwargs # Override if needed in subclass. + @property + def notifiers(self): + ''' + Return notifiers relevant to this Unified Job Template + ''' + # NOTE: Derived classes should implement + return Notifier.objects.none() + def create_unified_job(self, **kwargs): ''' Create a new unified job based on this unified job template. @@ -385,6 +394,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique editable=False, related_name='%(class)s_blocked_jobs+', ) + notifications = models.ManyToManyField( + 'Notification', + editable=False, + related_name='%(class)s_notifications', + ) cancel_flag = models.BooleanField( blank=True, default=False, @@ -470,6 +484,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique else: return '' + def get_ui_url(self): + real_instance = self.get_real_instance() + if real_instance != self: + return real_instance.get_ui_url() + else: + return '' + @classmethod def _get_task_class(cls): raise NotImplementedError # Implement in subclasses. @@ -717,7 +738,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique tasks that might preclude creating one''' return [] - def start(self, error_callback, **kwargs): + def notification_data(self): + return dict(id=self.id, + name=self.name, + url=self.get_ui_url(), + created_by=smart_text(self.created_by), + started=self.started.isoformat(), + finished=self.finished.isoformat(), + status=self.status, + traceback=self.result_traceback) + + def start(self, error_callback, success_callback, **kwargs): ''' Start the task running via Celery. ''' @@ -743,7 +774,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # if field not in needed]) if 'extra_vars' in kwargs: self.handle_extra_data(kwargs['extra_vars']) - task_class().apply_async((self.pk,), opts, link_error=error_callback) + task_class().apply_async((self.pk,), opts, link_error=error_callback, link=success_callback) return True def signal_start(self, **kwargs): @@ -765,7 +796,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # Sanity check: If we are running unit tests, then run synchronously. if getattr(settings, 'CELERY_UNIT_TEST', False): - return self.start(None, **kwargs) + return self.start(None, None, **kwargs) # Save the pending status, and inform the SocketIO listener. self.update_fields(start_args=json.dumps(kwargs), status='pending') diff --git a/awx/main/models/user.py b/awx/main/models/user.py index c30696bdb1..fad82ba182 100644 --- a/awx/main/models/user.py +++ b/awx/main/models/user.py @@ -26,5 +26,6 @@ class UserResource(CommonModelNameNotUnique, ResourceMixin): admin_role = ImplicitRoleField( role_name='User Administrator', + role_description='May manage this user', permissions = {'all': True}, ) diff --git a/awx/main/notifications/__init__.py b/awx/main/notifications/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/notifications/base.py b/awx/main/notifications/base.py new file mode 100644 index 0000000000..8129c33e27 --- /dev/null +++ b/awx/main/notifications/base.py @@ -0,0 +1,20 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import pprint + +from django.utils.encoding import smart_text +from django.core.mail.backends.base import BaseEmailBackend + +class TowerBaseEmailBackend(BaseEmailBackend): + + def format_body(self, body): + if "body" in body: + body_actual = body['body'] + else: + body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url'])) + body_actual += pprint.pformat(body, indent=4) + return body_actual diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py new file mode 100644 index 0000000000..9a9d0a9e2d --- /dev/null +++ b/awx/main/notifications/email_backend.py @@ -0,0 +1,28 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import pprint + +from django.utils.encoding import smart_text +from django.core.mail.backends.smtp import EmailBackend + +class CustomEmailBackend(EmailBackend): + + init_parameters = {"host": {"label": "Host", "type": "string"}, + "port": {"label": "Port", "type": "int"}, + "username": {"label": "Username", "type": "string"}, + "password": {"label": "Password", "type": "password"}, + "use_tls": {"label": "Use TLS", "type": "bool"}, + "use_ssl": {"label": "Use SSL", "type": "bool"}, + "sender": {"label": "Sender Email", "type": "string"}, + "recipients": {"label": "Recipient List", "type": "list"}} + recipient_parameter = "recipients" + sender_parameter = "sender" + + def format_body(self, body): + body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url'])) + body_actual += pprint.pformat(body, indent=4) + return body_actual diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py new file mode 100644 index 0000000000..420ef928fa --- /dev/null +++ b/awx/main/notifications/hipchat_backend.py @@ -0,0 +1,49 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +import requests + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.hipchat_backend') + +class HipChatBackend(TowerBaseEmailBackend): + + init_parameters = {"token": {"label": "Token", "type": "password"}, + "channels": {"label": "Destination Channels", "type": "list"}, + "color": {"label": "Notification Color", "type": "string"}, + "api_url": {"label": "API Url (e.g: https://mycompany.hipchat.com)", "type": "string"}, + "notify": {"label": "Notify channel", "type": "bool"}, + "message_from": {"label": "Label to be shown with notification", "type": "string"}} + recipient_parameter = "channels" + sender_parameter = "message_from" + + def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs): + super(HipChatBackend, self).__init__(fail_silently=fail_silently) + self.token = token + self.color = color + self.api_url = api_url + self.notify = notify + + def send_messages(self, messages): + sent_messages = 0 + + for m in messages: + for rcp in m.recipients(): + r = requests.post("{}/v2/room/{}/notification".format(self.api_url, rcp), + params={"auth_token": self.token}, + json={"color": self.color, + "message": m.subject, + "notify": self.notify, + "from": m.from_email, + "message_format": "text"}) + if r.status_code != 204: + logger.error(smart_text("Error sending messages: {}".format(r.text))) + if not self.fail_silently: + raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text))) + sent_messages += 1 + return sent_messages diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py new file mode 100644 index 0000000000..61158bbe5d --- /dev/null +++ b/awx/main/notifications/irc_backend.py @@ -0,0 +1,95 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import time +import ssl +import logging + +import irc.client + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.irc_backend') + +class IrcBackend(TowerBaseEmailBackend): + + init_parameters = {"server": {"label": "IRC Server Address", "type": "string"}, + "port": {"label": "IRC Server Port", "type": "int"}, + "nickname": {"label": "IRC Nick", "type": "string"}, + "password": {"label": "IRC Server Password", "type": "password"}, + "use_ssl": {"label": "SSL Connection", "type": "bool"}, + "targets": {"label": "Destination Channels or Users", "type": "list"}} + recipient_parameter = "targets" + sender_parameter = None + + def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs): + super(IrcBackend, self).__init__(fail_silently=fail_silently) + self.server = server + self.port = port + self.nickname = nickname + self.password = password if password != "" else None + self.use_ssl = use_ssl + self.connection = None + + def open(self): + if self.connection is not None: + return False + if self.use_ssl: + connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket) + else: + connection_factory = irc.connection.Factory() + try: + self.reactor = irc.client.Reactor() + self.connection = self.reactor.server().connect( + self.server, + self.port, + self.nickname, + password=self.password, + connect_factory=connection_factory, + ) + except irc.client.ServerConnectionError as e: + logger.error(smart_text("Exception connecting to irc server: {}".format(e))) + if not self.fail_silently: + raise + return True + + def close(self): + if self.connection is None: + return + self.connection = None + + def on_connect(self, connection, event): + for c in self.channels: + if irc.client.is_channel(c): + connection.join(c) + else: + for m in self.channels[c]: + connection.privmsg(c, m.subject) + self.channels_sent += 1 + + def on_join(self, connection, event): + for m in self.channels[event.target]: + connection.privmsg(event.target, m.subject) + self.channels_sent += 1 + + def send_messages(self, messages): + if self.connection is None: + self.open() + self.channels = {} + self.channels_sent = 0 + for m in messages: + for r in m.recipients(): + if r not in self.channels: + self.channels[r] = [] + self.channels[r].append(m) + self.connection.add_global_handler("welcome", self.on_connect) + self.connection.add_global_handler("join", self.on_join) + start_time = time.time() + process_time = time.time() + while self.channels_sent < len(self.channels) and (process_time - start_time) < 60: + self.reactor.process_once(0.1) + process_time = time.time() + self.reactor.disconnect_all() + return self.channels_sent diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py new file mode 100644 index 0000000000..af6b95cfd6 --- /dev/null +++ b/awx/main/notifications/pagerduty_backend.py @@ -0,0 +1,49 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +import pygerduty + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.pagerduty_backend') + +class PagerDutyBackend(TowerBaseEmailBackend): + + init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"}, + "token": {"label": "API Token", "type": "password"}, + "service_key": {"label": "API Service/Integration Key", "type": "string"}, + "client_name": {"label": "Client Identifier", "type": "string"}} + recipient_parameter = "service_key" + sender_parameter = "client_name" + + def __init__(self, subdomain, token, fail_silently=False, **kwargs): + super(PagerDutyBackend, self).__init__(fail_silently=fail_silently) + self.subdomain = subdomain + self.token = token + + def format_body(self, body): + return body + + def send_messages(self, messages): + sent_messages = 0 + + try: + pager = pygerduty.PagerDuty(self.subdomain, self.token) + except Exception as e: + if not self.fail_silently: + raise + logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e))) + for m in messages: + try: + pager.trigger_incident(m.recipients()[0], + description=m.subject, + details=m.body, + client=m.from_email) + except Exception as e: + logger.error(smart_text("Exception sending messages: {}".format(e))) + if not self.fail_silently: + raise + return sent_messages diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py new file mode 100644 index 0000000000..00f23ed60c --- /dev/null +++ b/awx/main/notifications/slack_backend.py @@ -0,0 +1,52 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +from slackclient import SlackClient + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.slack_backend') + +class SlackBackend(TowerBaseEmailBackend): + + init_parameters = {"token": {"label": "Token", "type": "password"}, + "channels": {"label": "Destination Channels", "type": "list"}} + recipient_parameter = "channels" + sender_parameter = None + + def __init__(self, token, fail_silently=False, **kwargs): + super(SlackBackend, self).__init__(fail_silently=fail_silently) + self.token = token + self.connection = None + + def open(self): + if self.connection is not None: + return False + self.connection = SlackClient(self.token) + if not self.connection.rtm_connect(): + if not self.fail_silently: + raise Exception("Slack Notification Token is invalid") + return True + + def close(self): + if self.connection is None: + return + self.connection = None + + def send_messages(self, messages): + if self.connection is None: + self.open() + sent_messages = 0 + for m in messages: + try: + for r in m.recipients(): + self.connection.rtm_send_message(r, m.subject) + sent_messages += 1 + except Exception as e: + logger.error(smart_text("Exception sending messages: {}".format(e))) + if not self.fail_silently: + raise + return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py new file mode 100644 index 0000000000..df411c68c5 --- /dev/null +++ b/awx/main/notifications/twilio_backend.py @@ -0,0 +1,48 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +from twilio.rest import TwilioRestClient + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.twilio_backend') + +class TwilioBackend(TowerBaseEmailBackend): + + init_parameters = {"account_sid": {"label": "Account SID", "type": "string"}, + "account_token": {"label": "Account Token", "type": "password"}, + "from_number": {"label": "Source Phone Number", "type": "string"}, + "to_numbers": {"label": "Destination SMS Numbers", "type": "list"}} + recipient_parameter = "to_numbers" + sender_parameter = "from_number" + + def __init__(self, account_sid, account_token, fail_silently=False, **kwargs): + super(TwilioBackend, self).__init__(fail_silently=fail_silently) + self.account_sid = account_sid + self.account_token = account_token + + def send_messages(self, messages): + sent_messages = 0 + try: + connection = TwilioRestClient(self.account_sid, self.account_token) + except Exception as e: + if not self.fail_silently: + raise + logger.error(smart_text("Exception connecting to Twilio: {}".format(e))) + + for m in messages: + try: + connection.messages.create( + to=m.to, + from_=m.from_email, + body=m.subject) + sent_messages += 1 + except Exception as e: + logger.error(smart_text("Exception sending messages: {}".format(e))) + if not self.fail_silently: + raise + return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py new file mode 100644 index 0000000000..52d85483ab --- /dev/null +++ b/awx/main/notifications/webhook_backend.py @@ -0,0 +1,39 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +import requests +import json + +from django.utils.encoding import smart_text + +from awx.main.notifications.base import TowerBaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.webhook_backend') + +class WebhookBackend(TowerBaseEmailBackend): + + init_parameters = {"url": {"label": "Target URL", "type": "string"}, + "headers": {"label": "HTTP Headers", "type": "object"}} + recipient_parameter = "url" + sender_parameter = None + + def __init__(self, headers, fail_silently=False, **kwargs): + self.headers = headers + super(WebhookBackend, self).__init__(fail_silently=fail_silently) + + def format_body(self, body): + return body + + def send_messages(self, messages): + sent_messages = 0 + for m in messages: + r = requests.post("{}".format(m.recipients()[0]), + data=json.dumps(m.body), + headers=self.headers) + if r.status_code >= 400: + logger.error(smart_text("Error sending notification webhook: {}".format(r.text))) + if not self.fail_silently: + raise Exception(smart_text("Error sending notification webhook: {}".format(r.text))) + sent_messages += 1 + return sent_messages diff --git a/awx/main/signals.py b/awx/main/signals.py index 01b2bb9d34..7421de462c 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -332,6 +332,8 @@ model_serializer_mapping = { Job: JobSerializer, AdHocCommand: AdHocCommandSerializer, TowerSettings: TowerSettingsSerializer, + Notifier: NotifierSerializer, + Notification: NotificationSerializer, } def activity_stream_create(sender, instance, created, **kwargs): diff --git a/awx/main/tasks.py b/awx/main/tasks.py index acfe2022ae..3942cc78bb 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -39,6 +39,9 @@ from celery import Task, task from django.conf import settings from django.db import transaction, DatabaseError from django.utils.timezone import now +from django.utils.encoding import smart_text +from django.core.mail import send_mail +from django.contrib.auth.models import User # AWX from awx.lib.metrics import task_timer @@ -46,13 +49,14 @@ from awx.main.constants import CLOUD_PROVIDERS from awx.main.models import * # noqa from awx.main.queue import FifoQueue from awx.main.conf import tower_settings +from awx.main.task_engine import TaskSerializer, TASK_TIMEOUT_INTERVAL from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url, ignore_inventory_computed_fields, emit_websocket_notification, check_proot_installed, build_proot_temp_dir, wrap_args_with_proot) -from awx.fact.utils.connection import test_mongo_connection __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', - 'RunAdHocCommand', 'handle_work_error', 'update_inventory_computed_fields'] + 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', + 'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks'] HIDDEN_PASSWORD = '**********' @@ -64,6 +68,48 @@ Try upgrading OpenSSH or providing your private key in an different format. \ logger = logging.getLogger('awx.main.tasks') +@task() +def send_notifications(notification_list, job_id=None): + if not isinstance(notification_list, list): + raise TypeError("notification_list should be of type list") + if job_id is not None: + job_actual = UnifiedJob.objects.get(id=job_id) + for notification_id in notification_list: + notification = Notification.objects.get(id=notification_id) + try: + sent = notification.notifier.send(notification.subject, notification.body) + notification.status = "successful" + notification.notifications_sent = sent + except Exception as e: + logger.error("Send Notification Failed {}".format(e)) + notification.status = "failed" + notification.error = smart_text(e) + finally: + notification.save() + if job_id is not None: + job_actual.notifications.add(notification) + +@task(bind=True) +def run_administrative_checks(self): + if not tower_settings.TOWER_ADMIN_ALERTS: + return + reader = TaskSerializer() + validation_info = reader.from_database() + if validation_info.get('instance_count', 0) < 1: + return + used_percentage = float(validation_info.get('current_instances', 0)) / float(validation_info.get('instance_count', 100)) + tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True) + if (used_percentage * 100) > 90: + send_mail("Ansible Tower host usage over 90%", + "Ansible Tower host usage over 90%", + tower_admin_emails, + fail_silently=True) + if validation_info.get('time_remaining', 0) < TASK_TIMEOUT_INTERVAL: + send_mail("Ansible Tower license will expire soon", + "Ansible Tower license will expire soon", + tower_admin_emails, + fail_silently=True) + @task() def bulk_inventory_element_delete(inventory, hosts=[], groups=[]): from awx.main.signals import disable_activity_stream @@ -134,7 +180,6 @@ def notify_task_runner(metadata_dict): queue = FifoQueue('tower_task_manager') queue.push(metadata_dict) - @task() def mongodb_control(cmd): # Sanity check: Do not send arbitrary commands. @@ -159,6 +204,39 @@ def mongodb_control(cmd): p = subprocess.Popen('sudo mongod --shutdown -f /etc/mongod.conf', shell=True) p.wait() +@task(bind=True) +def handle_work_success(self, result, task_actual): + if task_actual['type'] == 'project_update': + instance = ProjectUpdate.objects.get(id=task_actual['id']) + instance_name = instance.name + notifiers = instance.project.notifiers + friendly_name = "Project Update" + elif task_actual['type'] == 'inventory_update': + instance = InventoryUpdate.objects.get(id=task_actual['id']) + instance_name = instance.name + notifiers = instance.inventory_source.notifiers + friendly_name = "Inventory Update" + elif task_actual['type'] == 'job': + instance = Job.objects.get(id=task_actual['id']) + instance_name = instance.job_template.name + notifiers = instance.job_template.notifiers + friendly_name = "Job" + elif task_actual['type'] == 'ad_hoc_command': + instance = AdHocCommand.objects.get(id=task_actual['id']) + instance_name = instance.module_name + notifiers = [] # TODO: Ad-hoc commands need to notify someone + friendly_name = "AdHoc Command" + else: + return + notification_body = instance.notification_data() + notification_subject = "{} #{} '{}' succeeded on Ansible Tower: {}".format(friendly_name, + task_actual['id'], + instance_name, + notification_body['url']) + send_notifications.delay([n.generate_notification(notification_subject, notification_body) + for n in set(notifiers.get('success', []) + notifiers.get('any', []))], + job_id=task_actual['id']) + @task(bind=True) def handle_work_error(self, task_id, subtasks=None): print('Executing error task id %s, subtasks: %s' % @@ -173,15 +251,23 @@ def handle_work_error(self, task_id, subtasks=None): if each_task['type'] == 'project_update': instance = ProjectUpdate.objects.get(id=each_task['id']) instance_name = instance.name + notifiers = instance.project.notifiers + friendly_name = "Project Update" elif each_task['type'] == 'inventory_update': instance = InventoryUpdate.objects.get(id=each_task['id']) instance_name = instance.name + notifiers = instance.inventory_source.notifiers + friendly_name = "Inventory Update" elif each_task['type'] == 'job': instance = Job.objects.get(id=each_task['id']) instance_name = instance.job_template.name + notifiers = instance.job_template.notifiers + friendly_name = "Job" elif each_task['type'] == 'ad_hoc_command': instance = AdHocCommand.objects.get(id=each_task['id']) instance_name = instance.module_name + notifiers = [] + friendly_name = "AdHoc Command" else: # Unknown task type break @@ -190,6 +276,7 @@ def handle_work_error(self, task_id, subtasks=None): first_task_id = instance.id first_task_type = each_task['type'] first_task_name = instance_name + first_task_friendly_name = friendly_name if instance.celery_task_id != task_id: instance.status = 'failed' instance.failed = True @@ -197,6 +284,16 @@ def handle_work_error(self, task_id, subtasks=None): (first_task_type, first_task_name, first_task_id) instance.save() instance.socketio_emit_status("failed") + notification_body = first_task.notification_data() + notification_subject = "{} #{} '{}' failed on Ansible Tower: {}".format(first_task_friendly_name, + first_task_id, + first_task_name, + notification_body['url']) + notification_body['friendly_name'] = first_task_friendly_name + send_notifications.delay([n.generate_notification(notification_subject, notification_body).id + for n in set(notifiers.get('error', []) + notifiers.get('any', []))], + job_id=first_task_id) + @task() def update_inventory_computed_fields(inventory_id, should_update_hosts=True): @@ -861,11 +958,6 @@ class RunJob(BaseTask): ''' return getattr(tower_settings, 'AWX_PROOT_ENABLED', False) - def pre_run_hook(self, job, **kwargs): - if job.job_type == PERM_INVENTORY_SCAN: - if not test_mongo_connection(): - raise RuntimeError("Fact Scan Database is offline") - def post_run_hook(self, job, **kwargs): ''' Hook for actions to run after job/task has completed. diff --git a/awx/main/tests/functional/ansible.json b/awx/main/tests/functional/ansible.json new file mode 100644 index 0000000000..e877df2ad1 --- /dev/null +++ b/awx/main/tests/functional/ansible.json @@ -0,0 +1,283 @@ +{ + "ansible_all_ipv4_addresses": [ + "172.17.0.7" + ], + "ansible_all_ipv6_addresses": [ + "fe80::42:acff:fe11:7" + ], + "ansible_architecture": "x86_64", + "ansible_bios_date": "12/01/2006", + "ansible_bios_version": "VirtualBox", + "ansible_cmdline": { + "BOOT_IMAGE": "/boot/vmlinuz64", + "base": true, + "console": "tty0", + "initrd": "/boot/initrd.img", + "loglevel": "3", + "noembed": true, + "nomodeset": true, + "norestore": true, + "user": "docker", + "waitusb": "10:LABEL=boot2docker-data" + }, + "ansible_date_time": { + "date": "2016-02-02", + "day": "02", + "epoch": "1454424257", + "hour": "14", + "iso8601": "2016-02-02T14:44:17Z", + "iso8601_basic": "20160202T144417348424", + "iso8601_basic_short": "20160202T144417", + "iso8601_micro": "2016-02-02T14:44:17.348496Z", + "minute": "44", + "month": "02", + "second": "17", + "time": "14:44:17", + "tz": "UTC", + "tz_offset": "+0000", + "weekday": "Tuesday", + "weekday_number": "2", + "weeknumber": "05", + "year": "2016" + }, + "ansible_default_ipv4": { + "address": "172.17.0.7", + "alias": "eth0", + "broadcast": "global", + "gateway": "172.17.0.1", + "interface": "eth0", + "macaddress": "02:42:ac:11:00:07", + "mtu": 1500, + "netmask": "255.255.0.0", + "network": "172.17.0.0", + "type": "ether" + }, + "ansible_default_ipv6": {}, + "ansible_devices": { + "sda": { + "holders": [], + "host": "", + "model": "VBOX HARDDISK", + "partitions": { + "sda1": { + "sectors": "510015555", + "sectorsize": 512, + "size": "243.19 GB", + "start": "1975995" + }, + "sda2": { + "sectors": "1975932", + "sectorsize": 512, + "size": "964.81 MB", + "start": "63" + } + }, + "removable": "0", + "rotational": "0", + "scheduler_mode": "deadline", + "sectors": "512000000", + "sectorsize": "512", + "size": "244.14 GB", + "support_discard": "0", + "vendor": "ATA" + }, + "sr0": { + "holders": [], + "host": "", + "model": "CD-ROM", + "partitions": {}, + "removable": "1", + "rotational": "1", + "scheduler_mode": "deadline", + "sectors": "61440", + "sectorsize": "2048", + "size": "120.00 MB", + "support_discard": "0", + "vendor": "VBOX" + } + }, + "ansible_distribution": "Ubuntu", + "ansible_distribution_major_version": "14", + "ansible_distribution_release": "trusty", + "ansible_distribution_version": "14.04", + "ansible_dns": { + "nameservers": [ + "8.8.8.8" + ] + }, + "ansible_domain": "", + "ansible_env": { + "HOME": "/root", + "HOSTNAME": "ede894599989", + "LANG": "en_US.UTF-8", + "LC_ALL": "en_US.UTF-8", + "LC_MESSAGES": "en_US.UTF-8", + "LESSCLOSE": "/usr/bin/lesspipe %s %s", + "LESSOPEN": "| /usr/bin/lesspipe %s", + "LS_COLORS": "", + "OLDPWD": "/ansible", + "PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "PWD": "/ansible/examples", + "SHLVL": "1", + "_": "/usr/local/bin/ansible", + "container": "docker" + }, + "ansible_eth0": { + "active": true, + "device": "eth0", + "ipv4": { + "address": "172.17.0.7", + "broadcast": "global", + "netmask": "255.255.0.0", + "network": "172.17.0.0" + }, + "ipv6": [ + { + "address": "fe80::42:acff:fe11:7", + "prefix": "64", + "scope": "link" + } + ], + "macaddress": "02:42:ac:11:00:07", + "mtu": 1500, + "promisc": false, + "type": "ether" + }, + "ansible_fips": false, + "ansible_form_factor": "Other", + "ansible_fqdn": "ede894599989", + "ansible_hostname": "ede894599989", + "ansible_interfaces": [ + "lo", + "eth0" + ], + "ansible_kernel": "4.1.12-boot2docker", + "ansible_lo": { + "active": true, + "device": "lo", + "ipv4": { + "address": "127.0.0.1", + "broadcast": "host", + "netmask": "255.0.0.0", + "network": "127.0.0.0" + }, + "ipv6": [ + { + "address": "::1", + "prefix": "128", + "scope": "host" + } + ], + "mtu": 65536, + "promisc": false, + "type": "loopback" + }, + "ansible_lsb": { + "codename": "trusty", + "description": "Ubuntu 14.04.3 LTS", + "id": "Ubuntu", + "major_release": "14", + "release": "14.04" + }, + "ansible_machine": "x86_64", + "ansible_memfree_mb": 3746, + "ansible_memory_mb": { + "nocache": { + "free": 8896, + "used": 3638 + }, + "real": { + "free": 3746, + "total": 12534, + "used": 8788 + }, + "swap": { + "cached": 0, + "free": 4048, + "total": 4048, + "used": 0 + } + }, + "ansible_memtotal_mb": 12534, + "ansible_mounts": [ + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/resolv.conf", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + }, + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/hostname", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + }, + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/hosts", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + } + ], + "ansible_nodename": "ede894599989", + "ansible_os_family": "Debian", + "ansible_pkg_mgr": "apt", + "ansible_processor": [ + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz" + ], + "ansible_processor_cores": 8, + "ansible_processor_count": 1, + "ansible_processor_threads_per_core": 1, + "ansible_processor_vcpus": 8, + "ansible_product_name": "VirtualBox", + "ansible_product_serial": "0", + "ansible_product_uuid": "25C5EA5A-1DF1-48D9-A2C6-81227DA153C0", + "ansible_product_version": "1.2", + "ansible_python_version": "2.7.6", + "ansible_selinux": false, + "ansible_service_mgr": "upstart", + "ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBALF0xsM8UMXgSKiWNw4t19wxbxLnxQX742t/dIM0O8YLx+/lIP+Q69Dv5uoVt0zKV39eFziRlCh96qj2KYkGEJ6XfVZFnhpculL2Pv2CPpSwKuQ1vTbDO/xxUrvY+bHpfNJf9Rh69bFEE2pTsjomFPCgp8M0qGaFtwg6czSaeBONAAAAFQCGEfVtj97JiexTVRqgQITYlFp/eQAAAIEAg+S9qWn+AIb3amwVoLL/usQYOPCmZY9RVPzpkjJ6OG+HI4B7cXeauPtNTJwT0f9vGEqzf4mPpmS+aCShj6iwdmJ+cOwR5+SJlNalab3CMBoXKVLbT1J2XWFlK0szKKnoReP96IDbkAkGQ3fkm4jz0z6Wy0u6wOQVNcd4G5cwLZ4AAACAFvBm+H1LwNrwWBjWio+ayhglZ4Y25mLMEn2+dqBz0gLK5szEbft1HMPOWIVHvl6vi3v34pAJHKpxXpkLlNliTn8iw9BzCOrgP4V8sp2/85mxEuCdI1w/QERj9cHu5iS2pZ0cUwDE3pfuuGBB3IEliaJyaapowdrM8lN12jQl11E=", + "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHiYp4e9RfXpxDcEWpK4EuXPHW9++xcFI9hiB0TYAZgxEF9RIgwfucpPawFk7HIFoNc7EXQMlryilLSbg155KWM=", + "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAILclD2JaC654azEsAfcHRIOA2Ig9/Qk6MX80i/VCEdSH", + "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDeSUGxZaZsgBsezld0mj3HcbAwx6aykGnejceBjcs6lVwSGMHevofzSXIQDPYBhZoyWNl0PYAHv6AsQ8+3khd2SitUMJAuHSz1ZjgHCCGQP9ijXTKHn+lWCKA8rhLG/dwYwiouoOPZfn1G+erbKO6XiVbELrrf2RadnMGuMinESIOKVj3IunXsaGRMsDOQferOnUf7MvH7xpQnoySyQ1+p4rGruaohWG+Y2cDo7+B2FylPVbrpRDDJkfbt4J96WHx0KOdD0qzOicQP8JqDflqQPJJCWcgrvjQOSe4gXdPB6GZDtBl2qgQRwt1IgizPMm+b7Bwbd2VDe1TeWV2gT/7H", + "ansible_swapfree_mb": 4048, + "ansible_swaptotal_mb": 4048, + "ansible_system": "Linux", + "ansible_system_vendor": "innotek GmbH", + "ansible_uptime_seconds": 178398, + "ansible_user_dir": "/root", + "ansible_user_gecos": "root", + "ansible_user_gid": 0, + "ansible_user_id": "root", + "ansible_user_shell": "/bin/bash", + "ansible_user_uid": 0, + "ansible_userspace_architecture": "x86_64", + "ansible_userspace_bits": "64", + "ansible_virtualization_role": "guest", + "ansible_virtualization_type": "docker", + "module_setup": true +} diff --git a/awx/main/tests/functional/test_activity_streams.py b/awx/main/tests/functional/api/test_activity_streams.py similarity index 100% rename from awx/main/tests/functional/test_activity_streams.py rename to awx/main/tests/functional/api/test_activity_streams.py diff --git a/awx/main/tests/functional/api/test_fact_versions.py b/awx/main/tests/functional/api/test_fact_versions.py new file mode 100644 index 0000000000..dfb067a1f8 --- /dev/null +++ b/awx/main/tests/functional/api/test_fact_versions.py @@ -0,0 +1,255 @@ +# Python +import mock +import pytest +from datetime import timedelta +import urlparse +import urllib + +# AWX +from awx.main.models.fact import Fact +from awx.main.utils import timestamp_apiformat + +# Django +from django.core.urlresolvers import reverse +from django.utils import timezone + +def mock_feature_enabled(feature, bypass_database=None): + return True + +def mock_feature_disabled(feature, bypass_database=None): + return False + +def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1): + hosts = hosts(host_count=host_count) + fact_scans(fact_scans=3, timestamp_epoch=epoch) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user('admin', True), data=get_params) + + return (hosts[0], response) + +def check_url(url1_full, fact_known, module): + url1_split = urlparse.urlsplit(url1_full) + url1 = url1_split.path + url1_params = urlparse.parse_qsl(url1_split.query) + + url2 = reverse('api:host_fact_compare_view', args=(fact_known.host.pk,)) + url2_params = [('module', module), ('datetime', timestamp_apiformat(fact_known.timestamp))] + + assert url1 == url2 + assert urllib.urlencode(url1_params) == urllib.urlencode(url2_params) + +def check_response_facts(facts_known, response): + for i, fact_known in enumerate(facts_known): + assert fact_known.module == response.data['results'][i]['module'] + assert timestamp_apiformat(fact_known.timestamp) == response.data['results'][i]['timestamp'] + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +def check_system_tracking_feature_forbidden(response): + assert 402 == response.status_code + assert 'Your license does not permit use of system tracking.' == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled) +@pytest.mark.django_db +@pytest.mark.license_feature +def test_system_tracking_license_get(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + check_system_tracking_feature_forbidden(response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled) +@pytest.mark.django_db +@pytest.mark.license_feature +def test_system_tracking_license_options(hosts, options, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = options(url, None, user('admin', True)) + + check_system_tracking_feature_forbidden(response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +@pytest.mark.license_feature +def test_no_facts_db(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + response_expected = { + 'results': [] + } + assert response_expected == response.data + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch, + 'to': epoch, + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + + results = response.data['results'] + assert 'related' in results[0] + assert 'timestamp' in results[0] + assert 'module' in results[0] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +@pytest.mark.license_feature +def test_basic_options_fields(hosts, fact_scans, options, user): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = options(url, None, user('admin', True), pk=hosts[0].id) + + assert 'related' in response.data['actions']['GET'] + assert 'module' in response.data['actions']['GET'] + assert ("ansible", "Ansible") in response.data['actions']['GET']['module']['choices'] + assert ("services", "Services") in response.data['actions']['GET']['module']['choices'] + assert ("packages", "Packages") in response.data['actions']['GET']['module']['choices'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_related_fact_view(hosts, fact_scans, get, user): + epoch = timezone.now() + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch) + facts_known = Fact.get_timeline(host.id) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + for i, fact_known in enumerate(facts_known): + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_multiple_hosts(hosts, fact_scans, get, user): + epoch = timezone.now() + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, host_count=3) + facts_known = Fact.get_timeline(host.id) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + for i, fact_known in enumerate(facts_known): + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_to_from(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch - timedelta(days=10), + 'to': epoch + timedelta(days=10), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_from=search['from'], ts_to=search['to']) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_module(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'module': 'packages', + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, module=search['module']) + assert 3 == len(facts_known) + assert 3 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_from(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch + timedelta(days=1), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_from=search['from']) + assert 3 == len(facts_known) + assert 3 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_to(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'to': epoch + timedelta(days=1), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_to=search['to']) + assert 6 == len(facts_known) + assert 6 == len(response.data['results']) + + check_response_facts(facts_known, response) + +def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + team_obj.users.add(user_obj) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user_obj) + return response + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_normal_user_403(hosts, fact_scans, get, user, team): + user_bob = user('bob', False) + response = _test_user_access_control(hosts, fact_scans, get, user_bob, team) + + assert 403 == response.status_code + assert "You do not have permission to perform this action." == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_super_user_ok(hosts, fact_scans, get, user, team): + user_super = user('bob', True) + response = _test_user_access_control(hosts, fact_scans, get, user_super, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_ok(organization, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + organization.admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + org2 = organizations(1) + org2[0].admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 403 == response.status_code + diff --git a/awx/main/tests/functional/api/test_fact_view.py b/awx/main/tests/functional/api/test_fact_view.py new file mode 100644 index 0000000000..ad96d48aee --- /dev/null +++ b/awx/main/tests/functional/api/test_fact_view.py @@ -0,0 +1,182 @@ +import mock +import pytest +import json + +from awx.main.utils import timestamp_apiformat +from django.core.urlresolvers import reverse +from django.utils import timezone + +def mock_feature_enabled(feature, bypass_database=None): + return True + +def mock_feature_disabled(feature, bypass_database=None): + return False + +# TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it +def find_fact(facts, host_id, module_name, timestamp): + for f in facts: + if f.host_id == host_id and f.module == module_name and f.timestamp == timestamp: + return f + raise RuntimeError('fact <%s, %s, %s> not found in %s', (host_id, module_name, timestamp, facts)) + +def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name='ansible', get_params={}): + hosts = hosts(host_count=1) + facts = fact_scans(fact_scans=1, timestamp_epoch=epoch) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True), data=get_params) + + fact_known = find_fact(facts, hosts[0].id, module_name, epoch) + return (fact_known, response) + +def check_system_tracking_feature_forbidden(response): + assert 402 == response.status_code + assert 'Your license does not permit use of system tracking.' == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled) +@pytest.mark.django_db +@pytest.mark.license_feature +def test_system_tracking_license_get(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + check_system_tracking_feature_forbidden(response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled) +@pytest.mark.django_db +@pytest.mark.license_feature +def test_system_tracking_license_options(hosts, options, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = options(url, None, user('admin', True)) + + check_system_tracking_feature_forbidden(response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_no_fact_found(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + expected_response = { + "detail": "Fact not found" + } + assert 404 == response.status_code + assert expected_response == response.data + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + assert 'related' in response.data + assert 'id' in response.data + assert 'facts' in response.data + assert 'module' in response.data + assert 'host' in response.data + assert isinstance(response.data['host'], int) + assert 'summary_fields' in response.data + assert 'host' in response.data['summary_fields'] + assert 'name' in response.data['summary_fields']['host'] + assert 'description' in response.data['summary_fields']['host'] + assert 'host' in response.data['related'] + assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_content(hosts, fact_scans, get, user, fact_ansible_json): + (fact_known, response) = setup_common(hosts, fact_scans, get, user) + + assert fact_known.host_id == response.data['host'] + assert fact_ansible_json == json.loads(response.data['facts']) + assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] + assert fact_known.module == response.data['module'] + +def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name): + params = { + 'module': module_name + } + (fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params) + + assert fact_json == json.loads(response.data['facts']) + assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] + assert module_name == response.data['module'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_module_packages(hosts, fact_scans, get, user, fact_packages_json): + _test_search_by_module(hosts, fact_scans, get, user, fact_packages_json, 'packages') + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_module_services(hosts, fact_scans, get, user, fact_services_json): + _test_search_by_module(hosts, fact_scans, get, user, fact_services_json, 'services') + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packages_json): + epoch = timezone.now() + module_name = 'packages' + + (fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, epoch=epoch, get_params=dict(module=module_name, datetime=epoch)) + + assert fact_known.id == response.data['id'] + +def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + team_obj.users.add(user_obj) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user_obj) + return response + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_normal_user_403(hosts, fact_scans, get, user, team): + user_bob = user('bob', False) + response = _test_user_access_control(hosts, fact_scans, get, user_bob, team) + + assert 403 == response.status_code + assert "You do not have permission to perform this action." == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_super_user_ok(hosts, fact_scans, get, user, team): + user_super = user('bob', True) + response = _test_user_access_control(hosts, fact_scans, get, user_super, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_ok(organization, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + organization.admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + org2 = organizations(1) + org2[0].admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 403 == response.status_code + diff --git a/awx/main/tests/functional/api/test_host_detail.py b/awx/main/tests/functional/api/test_host_detail.py new file mode 100644 index 0000000000..79213490b0 --- /dev/null +++ b/awx/main/tests/functional/api/test_host_detail.py @@ -0,0 +1,17 @@ +# TODO: As of writing this our only concern is ensuring that the fact feature is reflected in the Host endpoint. +# Other host tests should live here to make this test suite more complete. +import pytest + +from django.core.urlresolvers import reverse + +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + hosts = hosts(host_count=1) + + url = reverse('api:host_detail', args=(hosts[0].pk,)) + response = get(url, user('admin', True)) + + assert 'related' in response.data + assert 'fact_versions' in response.data['related'] + assert reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) == response.data['related']['fact_versions'] + diff --git a/awx/main/tests/functional/commands/__init__.py b/awx/main/tests/functional/commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/tests/functional/commands/conftest.py b/awx/main/tests/functional/commands/conftest.py new file mode 100644 index 0000000000..2de8846b0a --- /dev/null +++ b/awx/main/tests/functional/commands/conftest.py @@ -0,0 +1,109 @@ +import pytest +import time + +from datetime import datetime + +@pytest.fixture +def fact_msg_base(inventory, hosts): + host_objs = hosts(1) + return { + 'host': host_objs[0].name, + 'date_key': time.mktime(datetime.utcnow().timetuple()), + 'facts' : { }, + 'inventory_id': inventory.id + } + +@pytest.fixture +def fact_msg_small(fact_msg_base): + fact_msg_base['facts'] = { + 'packages': { + "accountsservice": [ + { + "architecture": "amd64", + "name": "accountsservice", + "source": "apt", + "version": "0.6.35-0ubuntu7.1" + } + ], + "acpid": [ + { + "architecture": "amd64", + "name": "acpid", + "source": "apt", + "version": "1:2.0.21-1ubuntu2" + } + ], + "adduser": [ + { + "architecture": "all", + "name": "adduser", + "source": "apt", + "version": "3.113+nmu3ubuntu3" + } + ], + }, + 'services': [ + { + "name": "acpid", + "source": "sysv", + "state": "running" + }, + { + "name": "apparmor", + "source": "sysv", + "state": "stopped" + }, + { + "name": "atd", + "source": "sysv", + "state": "running" + }, + { + "name": "cron", + "source": "sysv", + "state": "running" + } + ], + 'ansible': { + 'ansible_fact_simple': 'hello world', + 'ansible_fact_complex': { + 'foo': 'bar', + 'hello': [ + 'scooby', + 'dooby', + 'doo' + ] + }, + } + } + return fact_msg_base + + +''' +Facts sent from ansible to our fact cache reciever. +The fact module type is implicit i.e + +Note: The 'ansible' module is an expection to this rule. +It is NOT nested in a dict, and thus does NOT contain a first-level +key of 'ansible' + +{ + 'fact_module_name': { ... }, +} +''' + +@pytest.fixture +def fact_msg_ansible(fact_msg_base, fact_ansible_json): + fact_msg_base['facts'] = fact_ansible_json + return fact_msg_base + +@pytest.fixture +def fact_msg_packages(fact_msg_base, fact_packages_json): + fact_msg_base['facts']['packages'] = fact_packages_json + return fact_msg_base + +@pytest.fixture +def fact_msg_services(fact_msg_base, fact_services_json): + fact_msg_base['facts']['services'] = fact_services_json + return fact_msg_base + diff --git a/awx/main/tests/functional/commands/test_cleanup_facts.py b/awx/main/tests/functional/commands/test_cleanup_facts.py new file mode 100644 index 0000000000..93ddb72d14 --- /dev/null +++ b/awx/main/tests/functional/commands/test_cleanup_facts.py @@ -0,0 +1,200 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved + +# Python +import pytest +import mock +from dateutil.relativedelta import relativedelta +from datetime import timedelta + +# Django +from django.utils import timezone +from django.core.management.base import CommandError + +# AWX +from awx.main.management.commands.cleanup_facts import CleanupFacts, Command +from awx.main.models.fact import Fact +from awx.main.models.inventory import Host + +def mock_feature_enabled(feature, bypass_database=None): + return True + +def mock_feature_disabled(feature, bypass_database=None): + return False + +@pytest.mark.django_db +def test_cleanup_granularity(fact_scans, hosts): + epoch = timezone.now() + hosts(5) + fact_scans(10, timestamp_epoch=epoch) + fact_newest = Fact.objects.all().order_by('-timestamp').first() + timestamp_future = fact_newest.timestamp + timedelta(days=365) + granularity = relativedelta(days=2) + + cleanup_facts = CleanupFacts() + deleted_count = cleanup_facts.cleanup(timestamp_future, granularity) + assert 60 == deleted_count + +''' +Delete half of the scans +''' +@pytest.mark.django_db +def test_cleanup_older_than(fact_scans, hosts): + epoch = timezone.now() + hosts(5) + fact_scans(28, timestamp_epoch=epoch) + qs = Fact.objects.all().order_by('-timestamp') + fact_middle = qs[qs.count() / 2] + granularity = relativedelta() + + cleanup_facts = CleanupFacts() + deleted_count = cleanup_facts.cleanup(fact_middle.timestamp, granularity) + assert 210 == deleted_count + +@pytest.mark.django_db +def test_cleanup_older_than_granularity_module(fact_scans, hosts): + epoch = timezone.now() + hosts(5) + fact_scans(10, timestamp_epoch=epoch) + fact_newest = Fact.objects.all().order_by('-timestamp').first() + timestamp_future = fact_newest.timestamp + timedelta(days=365) + granularity = relativedelta(days=2) + + cleanup_facts = CleanupFacts() + deleted_count = cleanup_facts.cleanup(timestamp_future, granularity, module='ansible') + assert 20 == deleted_count + + +''' +Reduce the granularity of half of the facts scans, by half. +''' +@pytest.mark.django_db +def test_cleanup_logic(fact_scans, hosts): + epoch = timezone.now() + hosts = hosts(5) + fact_scans(60, timestamp_epoch=epoch) + timestamp_middle = epoch + timedelta(days=30) + granularity = relativedelta(days=2) + module = 'ansible' + + cleanup_facts = CleanupFacts() + cleanup_facts.cleanup(timestamp_middle, granularity, module=module) + + + host_ids = Host.objects.all().values_list('id', flat=True) + host_facts = {} + for host_id in host_ids: + facts = Fact.objects.filter(host__id=host_id, module=module, timestamp__lt=timestamp_middle).order_by('-timestamp') + host_facts[host_id] = facts + + for host_id, facts in host_facts.iteritems(): + assert 15 == len(facts) + + timestamp_pivot = timestamp_middle + for fact in facts: + timestamp_pivot -= granularity + assert fact.timestamp == timestamp_pivot + +@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_disabled) +@pytest.mark.django_db +@pytest.mark.license_feature +def test_system_tracking_feature_disabled(mocker): + cmd = Command() + with pytest.raises(CommandError) as err: + cmd.handle(None) + assert 'The System Tracking feature is not enabled for your Tower instance' in err.value + +@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_parameters_ok(mocker): + run = mocker.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run') + kv = { + 'older_than': '1d', + 'granularity': '1d', + 'module': None, + } + cmd = Command() + cmd.handle(None, **kv) + run.assert_called_once_with(relativedelta(days=1), relativedelta(days=1), module=None) + +@pytest.mark.django_db +def test_string_time_to_timestamp_ok(): + kvs = [ + { + 'time': '2w', + 'timestamp': relativedelta(weeks=2), + 'msg': '2 weeks', + }, + { + 'time': '23d', + 'timestamp': relativedelta(days=23), + 'msg': '23 days', + }, + { + 'time': '11m', + 'timestamp': relativedelta(months=11), + 'msg': '11 months', + }, + { + 'time': '14y', + 'timestamp': relativedelta(years=14), + 'msg': '14 years', + }, + ] + for kv in kvs: + cmd = Command() + res = cmd.string_time_to_timestamp(kv['time']) + assert kv['timestamp'] == res + +@pytest.mark.django_db +def test_string_time_to_timestamp_invalid(): + kvs = [ + { + 'time': '2weeks', + 'msg': 'weeks instead of w', + }, + { + 'time': '2days', + 'msg': 'days instead of d', + }, + { + 'time': '23', + 'msg': 'no unit specified', + }, + { + 'time': None, + 'msg': 'no value specified', + }, + { + 'time': 'zigzag', + 'msg': 'random string specified', + }, + ] + for kv in kvs: + cmd = Command() + res = cmd.string_time_to_timestamp(kv['time']) + assert res is None + +@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_parameters_fail(mocker): + # Mock run() just in case, but it should never get called because an error should be thrown + mocker.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run') + kvs = [ + { + 'older_than': '1week', + 'granularity': '1d', + 'msg': '--older_than invalid value "1week"', + }, + { + 'older_than': '1d', + 'granularity': '1year', + 'msg': '--granularity invalid value "1year"', + } + ] + for kv in kvs: + cmd = Command() + with pytest.raises(CommandError) as err: + cmd.handle(None, older_than=kv['older_than'], granularity=kv['granularity']) + assert kv['msg'] in err.value + diff --git a/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py b/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py new file mode 100644 index 0000000000..266272e37c --- /dev/null +++ b/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py @@ -0,0 +1,95 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved + +# Python +import pytest +from datetime import datetime +import json + +# Django + +# AWX +from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver +from awx.main.models.fact import Fact +from awx.main.models.inventory import Host + +# TODO: Check that timestamp and other attributes are as expected +def check_process_fact_message_module(fact_returned, data, module_name): + date_key = data['date_key'] + + # Ensure 1, and only 1, fact created + timestamp = datetime.fromtimestamp(date_key, None) + assert 1 == Fact.objects.all().count() + + host_obj = Host.objects.get(name=data['host'], inventory__id=data['inventory_id']) + assert host_obj is not None + fact_known = Fact.get_host_fact(host_obj.id, module_name, timestamp) + assert fact_known is not None + assert fact_known == fact_returned + + assert host_obj == fact_returned.host + if module_name == 'ansible': + assert data['facts'] == fact_returned.facts + else: + assert data['facts'][module_name] == fact_returned.facts + assert timestamp == fact_returned.timestamp + assert module_name == fact_returned.module + +@pytest.mark.django_db +def test_process_fact_message_ansible(fact_msg_ansible): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_ansible) + + check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible') + +@pytest.mark.django_db +def test_process_fact_message_packages(fact_msg_packages): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_packages) + + check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages') + +@pytest.mark.django_db +def test_process_fact_message_services(fact_msg_services): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_services) + + check_process_fact_message_module(fact_returned, fact_msg_services, 'services') + +''' +We pickypack our fact sending onto the Ansible fact interface. +The interface is . Where facts is a json blob of all the facts. +This makes it hard to decipher what facts are new/changed. +Because of this, we handle the same fact module data being sent multiple times +and just keep the newest version. +''' +@pytest.mark.django_db +def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible): + #epoch = timezone.now() + epoch = datetime.fromtimestamp(fact_msg_ansible['date_key']) + fact_scans(fact_scans=1, timestamp_epoch=epoch) + key = 'ansible.overwrite' + value = 'hello world' + + receiver = FactCacheReceiver() + receiver.process_fact_message(fact_msg_ansible) + + fact_msg_ansible['facts'][key] = value + fact_returned = receiver.process_fact_message(fact_msg_ansible) + + fact_obj = Fact.objects.get(id=fact_returned.id) + assert key in fact_obj.facts + assert json.loads(fact_obj.facts) == fact_msg_ansible['facts'] + assert value == json.loads(fact_obj.facts)[key] + +# Ensure that the message flows from the socket through to process_fact_message() +@pytest.mark.django_db +def test_run_receiver(mocker, fact_msg_ansible): + mocker.patch("awx.main.socket.Socket.listen", return_value=[fact_msg_ansible]) + + receiver = FactCacheReceiver() + mocker.patch.object(receiver, 'process_fact_message', return_value=None) + + receiver.run_receiver(use_processing_threads=False) + + receiver.process_fact_message.assert_called_once_with(fact_msg_ansible) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index cea7ad01f5..01c2f000f3 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -1,8 +1,23 @@ -import pytest +# Python +import pytest +import mock +import json +import os +from datetime import timedelta + +# Django from django.core.urlresolvers import resolve from django.utils.six.moves.urllib.parse import urlparse +from django.utils import timezone from django.contrib.auth.models import User +from django.conf import settings + +# AWX +from awx.main.models.projects import Project +from awx.main.models.base import PERM_INVENTORY_READ +from awx.main.models.ha import Instance +from awx.main.models.fact import Fact from rest_framework.test import ( APIRequestFactory, @@ -10,20 +25,34 @@ from rest_framework.test import ( ) from awx.main.models.credential import Credential -from awx.main.models.projects import Project from awx.main.models.jobs import JobTemplate -from awx.main.models.ha import Instance from awx.main.models.inventory import ( - Inventory, Group, ) from awx.main.models.organization import ( Organization, - Team, + Permission, ) from awx.main.models.rbac import Role +''' +Disable all django model signals. +''' +@pytest.fixture(scope="session", autouse=False) +def disable_signals(): + mocked = mock.patch('django.dispatch.Signal.send', autospec=True) + mocked.start() + +''' +FIXME: Not sure how "far" just setting the BROKER_URL will get us. +We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py) + +Allows django signal code to execute without the need for redis +''' +@pytest.fixture(scope="session", autouse=True) +def celery_memory_broker(): + settings.BROKER_URL='memory://localhost/' @pytest.fixture def user(): @@ -60,11 +89,15 @@ def deploy_jobtemplate(project, inventory, credential): @pytest.fixture def team(organization): - return Team.objects.create(organization=organization, name='test-team') + return organization.teams.create(name='test-team') @pytest.fixture -def project(organization): - prj = Project.objects.create(name="test-project", description="test-project-desc") +@mock.patch.object(Project, "update", lambda self, **kwargs: None) +def project(instance, organization): + prj = Project.objects.create(name="test-proj", + description="test-proj-desc", + scm_type="git", + scm_url="https://github.com/jlaska/ansible-playbooks") prj.organizations.add(organization) return prj @@ -87,7 +120,7 @@ def credential(): @pytest.fixture def inventory(organization): - return Inventory.objects.create(name="test-inventory", organization=organization) + return organization.inventories.create(name="test-inv") @pytest.fixture def role(): @@ -105,12 +138,43 @@ def alice(user): def bob(user): return user('bob', False) +@pytest.fixture +def organizations(instance): + def rf(organization_count=1): + orgs = [] + for i in xrange(0, organization_count): + o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc") + orgs.append(o) + return orgs + return rf + @pytest.fixture def group(inventory): def g(name): - return Group.objects.create(inventory=inventory, name=name) + try: + return Group.objects.get(name=name, inventory=inventory) + except: + return Group.objects.create(inventory=inventory, name=name) return g +@pytest.fixture +def hosts(group): + group1 = group('group-1') + + def rf(host_count=1): + hosts = [] + for i in xrange(0, host_count): + name = '%s-host-%s' % (group1.name, i) + (host, created) = group1.inventory.hosts.get_or_create(name=name) + if created: + group1.hosts.add(host) + hosts.append(host) + return hosts + return rf + + + + @pytest.fixture def permissions(): return { @@ -244,7 +308,48 @@ def options(): return response return rf -@pytest.fixture(scope="session", autouse=True) -def celery_memory_broker(): - from django.conf import settings - settings.BROKER_URL='memory://localhost/' + + +@pytest.fixture +def fact_scans(group, fact_ansible_json, fact_packages_json, fact_services_json): + group1 = group('group-1') + + def rf(fact_scans=1, timestamp_epoch=timezone.now()): + facts_json = {} + facts = [] + module_names = ['ansible', 'services', 'packages'] + timestamp_current = timestamp_epoch + + facts_json['ansible'] = fact_ansible_json + facts_json['packages'] = fact_packages_json + facts_json['services'] = fact_services_json + + for i in xrange(0, fact_scans): + for host in group1.hosts.all(): + for module_name in module_names: + facts.append(Fact.objects.create(host=host, timestamp=timestamp_current, module=module_name, facts=facts_json[module_name])) + timestamp_current += timedelta(days=1) + return facts + return rf + +def _fact_json(module_name): + current_dir = os.path.dirname(os.path.realpath(__file__)) + with open('%s/%s.json' % (current_dir, module_name)) as f: + return json.load(f) + +@pytest.fixture +def fact_ansible_json(): + return _fact_json('ansible') + +@pytest.fixture +def fact_packages_json(): + return _fact_json('packages') + +@pytest.fixture +def fact_services_json(): + return _fact_json('services') + +@pytest.fixture +def permission_inv_read(organization, inventory, team): + return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ) + diff --git a/awx/main/tests/functional/models/fact/test_get_host_fact.py b/awx/main/tests/functional/models/fact/test_get_host_fact.py new file mode 100644 index 0000000000..2569417496 --- /dev/null +++ b/awx/main/tests/functional/models/fact/test_get_host_fact.py @@ -0,0 +1,111 @@ +import pytest + +from datetime import timedelta +from django.utils import timezone + +from awx.main.models import Fact + +@pytest.mark.django_db +def test_newest_scan_exact(hosts, fact_scans): + epoch = timezone.now() + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch: + fact_known = f + break + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', epoch) + + assert fact_found == fact_known + +''' +Show me the most recent state of the sytem at any point of time. +or, said differently +For any timestamp, get the first scan that is <= the timestamp. +''' + +''' +Ensure most recent scan run is the scan returned. +Query by future date. +''' +@pytest.mark.django_db +def test_newest_scan_less_than(hosts, fact_scans): + epoch = timezone.now() + timestamp_future = epoch + timedelta(days=10) + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=2): + fact_known = f + break + assert fact_known is not None + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_future) + + assert fact_found == fact_known + +''' +Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp. +''' +@pytest.mark.django_db +def test_query_middle_of_timeline(hosts, fact_scans): + epoch = timezone.now() + timestamp_middle = epoch + timedelta(days=1, hours=3) + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=1): + fact_known = f + break + assert fact_known is not None + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_middle) + + assert fact_found == fact_known + +''' +Query time less than any fact scan. Should return None +''' +@pytest.mark.django_db +def test_query_result_empty(hosts, fact_scans): + epoch = timezone.now() + timestamp_less = epoch - timedelta(days=1) + hosts = hosts(host_count=2) + fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_less) + + assert fact_found is None + +''' +Query by fact module other than 'ansible' +''' +@pytest.mark.django_db +def test_by_module(hosts, fact_scans): + epoch = timezone.now() + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known_services = None + fact_known_packages = None + for f in facts: + if f.host_id == hosts[0].id: + if f.module == 'services' and f.timestamp == epoch: + fact_known_services = f + elif f.module == 'packages' and f.timestamp == epoch: + fact_known_packages = f + assert fact_known_services is not None + assert fact_known_packages is not None + + fact_found_services = Fact.get_host_fact(hosts[0].id, 'services', epoch) + fact_found_packages = Fact.get_host_fact(hosts[0].id, 'packages', epoch) + + assert fact_found_services == fact_known_services + assert fact_found_packages == fact_known_packages + diff --git a/awx/main/tests/functional/models/fact/test_get_timeline.py b/awx/main/tests/functional/models/fact/test_get_timeline.py new file mode 100644 index 0000000000..da3360340a --- /dev/null +++ b/awx/main/tests/functional/models/fact/test_get_timeline.py @@ -0,0 +1,129 @@ +import pytest + +from datetime import timedelta +from django.utils import timezone + +from awx.main.models import Fact + +def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now(), module_name='ansible', ts_known=None): + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + facts_known = [] + for f in facts: + if f.host.id == hosts[0].id: + if module_name and f.module != module_name: + continue + if ts_known and f.timestamp != ts_known: + continue + facts_known.append(f) + fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to) + return (facts_known, fact_objs) + +@pytest.mark.django_db +def test_all(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch) + assert 9 == len(facts_known) + assert 9 == len(fact_objs) + +@pytest.mark.django_db +def test_all_ansible(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch) + assert 3 == len(facts_known) + assert 3 == len(fact_objs) + + for i in xrange(len(facts_known) - 1, 0): + assert facts_known[i].id == fact_objs[i].id + +@pytest.mark.django_db +def test_empty_db(hosts, fact_scans): + hosts = hosts(host_count=2) + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to) + + assert 0 == len(fact_objs) + +@pytest.mark.django_db +def test_no_results(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=100) + ts_to = epoch - timedelta(days=50) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch) + assert 0 == len(fact_objs) + +@pytest.mark.django_db +def test_exact_same_equal(hosts, fact_scans): + epoch = timezone.now() + ts_to = ts_from = epoch + timedelta(days=1) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch) + assert 1 == len(facts_known) + assert 1 == len(fact_objs) + + assert facts_known[0].id == fact_objs[0].id + +@pytest.mark.django_db +def test_exact_from_exclusive_to_inclusive(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch + timedelta(days=1) + ts_to = epoch + timedelta(days=2) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch) + + assert 1 == len(facts_known) + assert 1 == len(fact_objs) + + assert facts_known[0].id == fact_objs[0].id + +@pytest.mark.django_db +def test_to_lte(hosts, fact_scans): + epoch = timezone.now() + ts_to = epoch + timedelta(days=1) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=ts_to, epoch=epoch) + facts_known_subset = filter(lambda x: x.timestamp <= ts_to, facts_known) + + assert 2 == len(facts_known_subset) + assert 2 == len(fact_objs) + + for i in xrange(0, len(fact_objs)): + assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id + +@pytest.mark.django_db +def test_from_gt(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=ts_from, ts_to=None, epoch=epoch) + facts_known_subset = filter(lambda x: x.timestamp > ts_from, facts_known) + + assert 2 == len(facts_known_subset) + assert 2 == len(fact_objs) + + for i in xrange(0, len(fact_objs)): + assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id + +@pytest.mark.django_db +def test_no_ts(hosts, fact_scans): + epoch = timezone.now() + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=epoch) + assert 3 == len(facts_known) + assert 3 == len(fact_objs) + + for i in xrange(len(facts_known) - 1, 0): + assert facts_known[i].id == fact_objs[i].id + + diff --git a/awx/main/tests/functional/packages.json b/awx/main/tests/functional/packages.json new file mode 100644 index 0000000000..7bc735d06f --- /dev/null +++ b/awx/main/tests/functional/packages.json @@ -0,0 +1,2922 @@ +[ + { + "name": "kbd", + "source": "rpm", + "epoch": null, + "version": "1.15.5", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "centos-release", + "source": "rpm", + "epoch": null, + "version": "7", + "release": "0.1406.el7.centos.2.3", + "arch": "x86_64" + }, + { + "name": "postfix", + "source": "rpm", + "epoch": 2, + "version": "2.10.1", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "filesystem", + "source": "rpm", + "epoch": null, + "version": "3.2", + "release": "18.el7", + "arch": "x86_64" + }, + { + "name": "tuned", + "source": "rpm", + "epoch": null, + "version": "2.3.0", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "ncurses-base", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "noarch" + }, + { + "name": "aic94xx-firmware", + "source": "rpm", + "epoch": null, + "version": "30", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "kbd-misc", + "source": "rpm", + "epoch": null, + "version": "1.15.5", + "release": "10.el7", + "arch": "noarch" + }, + { + "name": "irqbalance", + "source": "rpm", + "epoch": 2, + "version": "1.0.6", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "tzdata", + "source": "rpm", + "epoch": null, + "version": "2014b", + "release": "1.el7", + "arch": "noarch" + }, + { + "name": "openssh-clients", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "glibc-common", + "source": "rpm", + "epoch": null, + "version": "2.17", + "release": "55.el7", + "arch": "x86_64" + }, + { + "name": "authconfig", + "source": "rpm", + "epoch": null, + "version": "6.2.8", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "xz-libs", + "source": "rpm", + "epoch": null, + "version": "5.1.2", + "release": "8alpha.el7", + "arch": "x86_64" + }, + { + "name": "btrfs-progs", + "source": "rpm", + "epoch": null, + "version": "3.12", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "ncurses-libs", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "x86_64" + }, + { + "name": "sudo", + "source": "rpm", + "epoch": null, + "version": "1.8.6p7", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libsepol", + "source": "rpm", + "epoch": null, + "version": "2.1.9", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "iprutils", + "source": "rpm", + "epoch": null, + "version": "2.3.16", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libselinux", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "iwl6000g2b-firmware", + "source": "rpm", + "epoch": null, + "version": "17.168.5.2", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "info", + "source": "rpm", + "epoch": null, + "version": "5.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl7260-firmware", + "source": "rpm", + "epoch": null, + "version": "22.0.7.0", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "dbus-libs", + "source": "rpm", + "epoch": 1, + "version": "1.6.12", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libertas-sd8787-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "sed", + "source": "rpm", + "epoch": null, + "version": "4.2.2", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "iwl6050-firmware", + "source": "rpm", + "epoch": null, + "version": "41.28.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "chkconfig", + "source": "rpm", + "epoch": null, + "version": "1.3.61", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl1000-firmware", + "source": "rpm", + "epoch": 1, + "version": "39.31.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "nspr", + "source": "rpm", + "epoch": null, + "version": "4.10.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl6000-firmware", + "source": "rpm", + "epoch": null, + "version": "9.221.4.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "nss-util", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "iwl2000-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "grep", + "source": "rpm", + "epoch": null, + "version": "2.16", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "iwl5150-firmware", + "source": "rpm", + "epoch": null, + "version": "8.24.2.2", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "gawk", + "source": "rpm", + "epoch": null, + "version": "4.0.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl4965-firmware", + "source": "rpm", + "epoch": null, + "version": "228.61.2.24", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "expat", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "iwl3160-firmware", + "source": "rpm", + "epoch": null, + "version": "22.0.7.0", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libattr", + "source": "rpm", + "epoch": null, + "version": "2.4.46", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "iwl3945-firmware", + "source": "rpm", + "epoch": null, + "version": "15.32.2.9", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libcap", + "source": "rpm", + "epoch": null, + "version": "2.22", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libsemanage-python", + "source": "rpm", + "epoch": null, + "version": "2.1.10", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "libxml2", + "source": "rpm", + "epoch": null, + "version": "2.9.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-HTTP-Tiny", + "source": "rpm", + "epoch": null, + "version": "0.033", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libgcrypt", + "source": "rpm", + "epoch": null, + "version": "1.5.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Perldoc", + "source": "rpm", + "epoch": null, + "version": "3.20", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "lua", + "source": "rpm", + "epoch": null, + "version": "5.1.4", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "perl-Encode", + "source": "rpm", + "epoch": null, + "version": "2.51", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "pkgconfig", + "source": "rpm", + "epoch": 1, + "version": "0.27.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Usage", + "source": "rpm", + "epoch": null, + "version": "1.63", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "shared-mime-info", + "source": "rpm", + "epoch": null, + "version": "1.1", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-Exporter", + "source": "rpm", + "epoch": null, + "version": "5.68", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libcap-ng", + "source": "rpm", + "epoch": null, + "version": "0.7.3", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-Time-Local", + "source": "rpm", + "epoch": null, + "version": "1.2300", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "libidn", + "source": "rpm", + "epoch": null, + "version": "1.28", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Carp", + "source": "rpm", + "epoch": null, + "version": "1.26", + "release": "244.el7", + "arch": "noarch" + }, + { + "name": "gmp", + "source": "rpm", + "epoch": 1, + "version": "5.1.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-PathTools", + "source": "rpm", + "epoch": null, + "version": "3.40", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "p11-kit", + "source": "rpm", + "epoch": null, + "version": "0.18.7", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-macros", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "libdaemon", + "source": "rpm", + "epoch": null, + "version": "0.14", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-File-Temp", + "source": "rpm", + "epoch": null, + "version": "0.23.01", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libcroco", + "source": "rpm", + "epoch": null, + "version": "0.6.8", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-threads-shared", + "source": "rpm", + "epoch": null, + "version": "1.43", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libnl3-cli", + "source": "rpm", + "epoch": null, + "version": "3.2.21", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "perl-Filter", + "source": "rpm", + "epoch": null, + "version": "1.49", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "cyrus-sasl-lib", + "source": "rpm", + "epoch": null, + "version": "2.1.26", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "perl-Getopt-Long", + "source": "rpm", + "epoch": null, + "version": "2.40", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "groff-base", + "source": "rpm", + "epoch": null, + "version": "1.22.2", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "04bbaa7b", + "release": "4c881cbf", + "arch": null + }, + { + "name": "libunistring", + "source": "rpm", + "epoch": null, + "version": "0.9.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "libicu", + "source": "rpm", + "epoch": null, + "version": "50.1.2", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "diffutils", + "source": "rpm", + "epoch": null, + "version": "3.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libdnet", + "source": "rpm", + "epoch": null, + "version": "1.12", + "release": "13.1.el7", + "arch": "x86_64" + }, + { + "name": "xz", + "source": "rpm", + "epoch": null, + "version": "5.1.2", + "release": "8alpha.el7", + "arch": "x86_64" + }, + { + "name": "open-vm-tools", + "source": "rpm", + "epoch": null, + "version": "9.4.0", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "sysvinit-tools", + "source": "rpm", + "epoch": null, + "version": "2.88", + "release": "14.dsf.el7", + "arch": "x86_64" + }, + { + "name": "open-vm-tools-deploypkg", + "source": "rpm", + "epoch": 0, + "version": "9.4.10", + "release": "3", + "arch": "x86_64" + }, + { + "name": "newt", + "source": "rpm", + "epoch": null, + "version": "0.52.15", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-kitchen", + "source": "rpm", + "epoch": null, + "version": "1.1.1", + "release": "5.el7", + "arch": "noarch" + }, + { + "name": "ethtool", + "source": "rpm", + "epoch": 2, + "version": "3.8", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "yum-utils", + "source": "rpm", + "epoch": null, + "version": "1.1.31", + "release": "29.el7", + "arch": "noarch" + }, + { + "name": "hostname", + "source": "rpm", + "epoch": null, + "version": "3.13", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "emacs-filesystem", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "gdbm", + "source": "rpm", + "epoch": null, + "version": "1.10", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "emacs-common", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "less", + "source": "rpm", + "epoch": null, + "version": "458", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "epel-release", + "source": "rpm", + "epoch": null, + "version": "7", + "release": "5", + "arch": "noarch" + }, + { + "name": "p11-kit-trust", + "source": "rpm", + "epoch": null, + "version": "0.18.7", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pkcs11-helper", + "source": "rpm", + "epoch": null, + "version": "1.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "nettle", + "source": "rpm", + "epoch": null, + "version": "2.7.1", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "easy-rsa", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "1.el7", + "arch": "noarch" + }, + { + "name": "gobject-introspection", + "source": "rpm", + "epoch": null, + "version": "1.36.0", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libevent", + "source": "rpm", + "epoch": null, + "version": "2.0.21", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "gsettings-desktop-schemas", + "source": "rpm", + "epoch": null, + "version": "3.8.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "dhcp-libs", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "acl", + "source": "rpm", + "epoch": null, + "version": "2.2.51", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "dhcp", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "elfutils-libs", + "source": "rpm", + "epoch": null, + "version": "0.158", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "bind-license", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "noarch" + }, + { + "name": "mozjs17", + "source": "rpm", + "epoch": null, + "version": "17.0.0", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "bind", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "pinentry", + "source": "rpm", + "epoch": null, + "version": "0.8.1", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "bind-libs-lite", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "libselinux-utils", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "audit-libs", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libedit", + "source": "rpm", + "epoch": null, + "version": "3.0", + "release": "12.20121213cvs.el7", + "arch": "x86_64" + }, + { + "name": "audit-libs-python", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libmodman", + "source": "rpm", + "epoch": null, + "version": "2.0.1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "checkpolicy", + "source": "rpm", + "epoch": null, + "version": "2.1.12", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "glib-networking", + "source": "rpm", + "epoch": null, + "version": "2.36.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "setools-libs", + "source": "rpm", + "epoch": null, + "version": "3.3.7", + "release": "46.el7", + "arch": "x86_64" + }, + { + "name": "snappy", + "source": "rpm", + "epoch": null, + "version": "1.1.0", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "audit", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "numactl-libs", + "source": "rpm", + "epoch": null, + "version": "2.0.9", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "autogen-libopts", + "source": "rpm", + "epoch": null, + "version": "5.18", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libverto", + "source": "rpm", + "epoch": null, + "version": "0.2.5", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "ntp", + "source": "rpm", + "epoch": null, + "version": "4.2.6p5", + "release": "19.el7.centos.3", + "arch": "x86_64" + }, + { + "name": "libsemanage", + "source": "rpm", + "epoch": null, + "version": "2.1.10", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "krb5-libs", + "source": "rpm", + "epoch": null, + "version": "1.11.3", + "release": "49.el7", + "arch": "x86_64" + }, + { + "name": "openldap", + "source": "rpm", + "epoch": null, + "version": "2.4.39", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "cracklib", + "source": "rpm", + "epoch": null, + "version": "2.9.0", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libmount", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "systemd-libs", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libpwquality", + "source": "rpm", + "epoch": null, + "version": "1.2.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pam", + "source": "rpm", + "epoch": null, + "version": "1.1.8", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "shadow-utils", + "source": "rpm", + "epoch": 2, + "version": "4.1.5.1", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "util-linux", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "python-libs", + "source": "rpm", + "epoch": null, + "version": "2.7.5", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "python-decorator", + "source": "rpm", + "epoch": null, + "version": "3.4.0", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "gettext", + "source": "rpm", + "epoch": null, + "version": "0.18.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "centos-logos", + "source": "rpm", + "epoch": null, + "version": "70.0.6", + "release": "1.el7.centos", + "arch": "noarch" + }, + { + "name": "libselinux-python", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "python-slip-dbus", + "source": "rpm", + "epoch": null, + "version": "0.4.0", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "pyliblzma", + "source": "rpm", + "epoch": null, + "version": "0.5.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "yum-metadata-parser", + "source": "rpm", + "epoch": null, + "version": "1.1.4", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "pyxattr", + "source": "rpm", + "epoch": null, + "version": "0.5.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "python-backports-ssl_match_hostname", + "source": "rpm", + "epoch": null, + "version": "3.4.0.2", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "python-pyudev", + "source": "rpm", + "epoch": null, + "version": "0.15", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "binutils", + "source": "rpm", + "epoch": null, + "version": "2.23.52.0.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "logrotate", + "source": "rpm", + "epoch": null, + "version": "3.8.6", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "alsa-lib", + "source": "rpm", + "epoch": null, + "version": "1.0.27.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "mariadb-libs", + "source": "rpm", + "epoch": 1, + "version": "5.5.35", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libcurl", + "source": "rpm", + "epoch": null, + "version": "7.29.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "python-urlgrabber", + "source": "rpm", + "epoch": null, + "version": "3.10", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "rpm-libs", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "fipscheck", + "source": "rpm", + "epoch": null, + "version": "1.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "json-c", + "source": "rpm", + "epoch": null, + "version": "0.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "virt-what", + "source": "rpm", + "epoch": null, + "version": "1.13", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libnetfilter_conntrack", + "source": "rpm", + "epoch": null, + "version": "1.0.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "iproute", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "qrencode-libs", + "source": "rpm", + "epoch": null, + "version": "3.4.1", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper-libs", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "systemd", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "systemd-sysv", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "iputils", + "source": "rpm", + "epoch": null, + "version": "20121221", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper-event-libs", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager-glib", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "polkit-pkla-compat", + "source": "rpm", + "epoch": null, + "version": "0.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "cronie-anacron", + "source": "rpm", + "epoch": null, + "version": "1.4.11", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "crontabs", + "source": "rpm", + "epoch": null, + "version": "1.11", + "release": "6.20121102git.el7", + "arch": "noarch" + }, + { + "name": "device-mapper-event", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "avahi-libs", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "avahi-autoipd", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "dnsmasq", + "source": "rpm", + "epoch": null, + "version": "2.66", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "ebtables", + "source": "rpm", + "epoch": null, + "version": "2.0.10", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "libpciaccess", + "source": "rpm", + "epoch": null, + "version": "0.13.1", + "release": "4.1.el7", + "arch": "x86_64" + }, + { + "name": "fxload", + "source": "rpm", + "epoch": null, + "version": "2002_04_11", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "alsa-tools-firmware", + "source": "rpm", + "epoch": null, + "version": "1.0.27", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libpipeline", + "source": "rpm", + "epoch": null, + "version": "1.2.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "gnupg2", + "source": "rpm", + "epoch": null, + "version": "2.0.22", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "rpm-python", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "pygpgme", + "source": "rpm", + "epoch": null, + "version": "0.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "hardlink", + "source": "rpm", + "epoch": 1, + "version": "1.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "dracut-network", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "plymouth", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "teamd", + "source": "rpm", + "epoch": null, + "version": "1.9", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "libestr", + "source": "rpm", + "epoch": null, + "version": "0.1.9", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager-tui", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "kernel", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "dracut-config-rescue", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "man-db", + "source": "rpm", + "epoch": null, + "version": "2.6.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "lvm2", + "source": "rpm", + "epoch": 7, + "version": "2.02.105", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "libgcc", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "setup", + "source": "rpm", + "epoch": null, + "version": "2.8.71", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "microcode_ctl", + "source": "rpm", + "epoch": 2, + "version": "2.1", + "release": "7.1.el7", + "arch": "x86_64" + }, + { + "name": "basesystem", + "source": "rpm", + "epoch": null, + "version": "10.0", + "release": "7.el7.centos", + "arch": "noarch" + }, + { + "name": "biosdevname", + "source": "rpm", + "epoch": null, + "version": "0.5.0", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "linux-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "openssh-server", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "parted", + "source": "rpm", + "epoch": null, + "version": "3.1", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "nss-softokn-freebl", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "selinux-policy-targeted", + "source": "rpm", + "epoch": null, + "version": "3.12.1", + "release": "153.el7", + "arch": "noarch" + }, + { + "name": "glibc", + "source": "rpm", + "epoch": null, + "version": "2.17", + "release": "55.el7", + "arch": "x86_64" + }, + { + "name": "xfsprogs", + "source": "rpm", + "epoch": null, + "version": "3.2.0", + "release": "0.10.alpha2.el7", + "arch": "x86_64" + }, + { + "name": "libstdc++", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "e2fsprogs", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "bash", + "source": "rpm", + "epoch": null, + "version": "4.2.45", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "passwd", + "source": "rpm", + "epoch": null, + "version": "0.79", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pcre", + "source": "rpm", + "epoch": null, + "version": "8.32", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "tar", + "source": "rpm", + "epoch": 2, + "version": "1.26", + "release": "29.el7", + "arch": "x86_64" + }, + { + "name": "zlib", + "source": "rpm", + "epoch": null, + "version": "1.2.7", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "rootfiles", + "source": "rpm", + "epoch": null, + "version": "8.1", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "iwl6000g2a-firmware", + "source": "rpm", + "epoch": null, + "version": "17.168.5.3", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libuuid", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "iwl2030-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "popt", + "source": "rpm", + "epoch": null, + "version": "1.13", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "iwl100-firmware", + "source": "rpm", + "epoch": null, + "version": "39.31.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libcom_err", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl135-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libdb", + "source": "rpm", + "epoch": null, + "version": "5.3.21", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "iwl105-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "bzip2-libs", + "source": "rpm", + "epoch": null, + "version": "1.0.6", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "libertas-sd8686-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "readline", + "source": "rpm", + "epoch": null, + "version": "6.2", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "ivtv-firmware", + "source": "rpm", + "epoch": 2, + "version": "20080701", + "release": "26.el7", + "arch": "noarch" + }, + { + "name": "elfutils-libelf", + "source": "rpm", + "epoch": null, + "version": "0.158", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libertas-usb8388-firmware", + "source": "rpm", + "epoch": 2, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "libgpg-error", + "source": "rpm", + "epoch": null, + "version": "1.12", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "iwl5000-firmware", + "source": "rpm", + "epoch": null, + "version": "8.83.5.1_1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libacl", + "source": "rpm", + "epoch": null, + "version": "2.2.51", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "f4a80eb5", + "release": "53a7ff4b", + "arch": null + }, + { + "name": "cpio", + "source": "rpm", + "epoch": null, + "version": "2.11", + "release": "22.el7", + "arch": "x86_64" + }, + { + "name": "perl-parent", + "source": "rpm", + "epoch": 1, + "version": "0.225", + "release": "244.el7", + "arch": "noarch" + }, + { + "name": "libnl3", + "source": "rpm", + "epoch": null, + "version": "3.2.21", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "perl-podlators", + "source": "rpm", + "epoch": null, + "version": "2.5.1", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "sqlite", + "source": "rpm", + "epoch": null, + "version": "3.7.17", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Escapes", + "source": "rpm", + "epoch": 1, + "version": "1.04", + "release": "285.el7", + "arch": "noarch" + }, + { + "name": "libffi", + "source": "rpm", + "epoch": null, + "version": "3.0.13", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "perl-Text-ParseWords", + "source": "rpm", + "epoch": null, + "version": "3.29", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "glib2", + "source": "rpm", + "epoch": null, + "version": "2.36.3", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-Storable", + "source": "rpm", + "epoch": null, + "version": "2.45", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "dbus-glib", + "source": "rpm", + "epoch": null, + "version": "0.100", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-constant", + "source": "rpm", + "epoch": null, + "version": "1.27", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "findutils", + "source": "rpm", + "epoch": 1, + "version": "4.5.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Socket", + "source": "rpm", + "epoch": null, + "version": "2.010", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "file-libs", + "source": "rpm", + "epoch": null, + "version": "5.11", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "perl-Time-HiRes", + "source": "rpm", + "epoch": 4, + "version": "1.9725", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libtasn1", + "source": "rpm", + "epoch": null, + "version": "3.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Scalar-List-Utils", + "source": "rpm", + "epoch": null, + "version": "1.27", + "release": "248.el7", + "arch": "x86_64" + }, + { + "name": "tcp_wrappers-libs", + "source": "rpm", + "epoch": null, + "version": "7.6", + "release": "77.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Simple", + "source": "rpm", + "epoch": 1, + "version": "3.28", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "file", + "source": "rpm", + "epoch": null, + "version": "5.11", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "perl-File-Path", + "source": "rpm", + "epoch": null, + "version": "2.09", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "nss-softokn", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "perl-threads", + "source": "rpm", + "epoch": null, + "version": "1.87", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libassuan", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-libs", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "e2fsprogs-libs", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "which", + "source": "rpm", + "epoch": null, + "version": "2.20", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "66fd4949", + "release": "4803fe57", + "arch": null + }, + { + "name": "libgomp", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "procps-ng", + "source": "rpm", + "epoch": null, + "version": "3.3.10", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "kmod-libs", + "source": "rpm", + "epoch": null, + "version": "14", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "net-tools", + "source": "rpm", + "epoch": null, + "version": "2.0", + "release": "0.17.20131004git.el7", + "arch": "x86_64" + }, + { + "name": "libnfnetlink", + "source": "rpm", + "epoch": null, + "version": "1.0.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libmspack", + "source": "rpm", + "epoch": 0, + "version": "0.0.20040308alpha", + "release": "2", + "arch": "x86_64" + }, + { + "name": "slang", + "source": "rpm", + "epoch": null, + "version": "2.2.4", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "python-chardet", + "source": "rpm", + "epoch": null, + "version": "2.0.1", + "release": "7.el7", + "arch": "noarch" + }, + { + "name": "lzo", + "source": "rpm", + "epoch": null, + "version": "2.06", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "yum", + "source": "rpm", + "epoch": null, + "version": "3.4.3", + "release": "125.el7.centos", + "arch": "noarch" + }, + { + "name": "pciutils-libs", + "source": "rpm", + "epoch": null, + "version": "3.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "gpm-libs", + "source": "rpm", + "epoch": null, + "version": "1.20.7", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "keyutils-libs", + "source": "rpm", + "epoch": null, + "version": "1.5.8", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "liblockfile", + "source": "rpm", + "epoch": null, + "version": "1.08", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "gettext-libs", + "source": "rpm", + "epoch": null, + "version": "0.18.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "emacs-nox", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libteam", + "source": "rpm", + "epoch": null, + "version": "1.9", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "352c64e5", + "release": "52ae6884", + "arch": null + }, + { + "name": "ca-certificates", + "source": "rpm", + "epoch": null, + "version": "2013.1.95", + "release": "71.el7", + "arch": "noarch" + }, + { + "name": "openvpn", + "source": "rpm", + "epoch": null, + "version": "2.3.7", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "gnutls", + "source": "rpm", + "epoch": null, + "version": "3.1.18", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "strace", + "source": "rpm", + "epoch": null, + "version": "4.8", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "ModemManager-glib", + "source": "rpm", + "epoch": null, + "version": "1.1.0", + "release": "6.git20130913.el7", + "arch": "x86_64" + }, + { + "name": "tmux", + "source": "rpm", + "epoch": null, + "version": "1.8", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "vim-minimal", + "source": "rpm", + "epoch": 2, + "version": "7.4.160", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "dhcp-common", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "device-mapper-persistent-data", + "source": "rpm", + "epoch": null, + "version": "0.3.2", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "dhclient", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "libdb-utils", + "source": "rpm", + "epoch": null, + "version": "5.3.21", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "bind-libs", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "libss", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "bind-utils", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "make", + "source": "rpm", + "epoch": 1, + "version": "3.82", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "nmap-ncat", + "source": "rpm", + "epoch": 2, + "version": "6.40", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "freetype", + "source": "rpm", + "epoch": null, + "version": "2.4.11", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "policycoreutils", + "source": "rpm", + "epoch": null, + "version": "2.2.5", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "ncurses", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "x86_64" + }, + { + "name": "python-IPy", + "source": "rpm", + "epoch": null, + "version": "0.75", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "libproxy", + "source": "rpm", + "epoch": null, + "version": "0.4.11", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libcgroup", + "source": "rpm", + "epoch": null, + "version": "0.41", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libsoup", + "source": "rpm", + "epoch": null, + "version": "2.42.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "policycoreutils-python", + "source": "rpm", + "epoch": null, + "version": "2.2.5", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "libndp", + "source": "rpm", + "epoch": null, + "version": "1.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iftop", + "source": "rpm", + "epoch": null, + "version": "1.0", + "release": "0.7.pre4.el7", + "arch": "x86_64" + }, + { + "name": "libsysfs", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "ntpdate", + "source": "rpm", + "epoch": null, + "version": "4.2.6p5", + "release": "19.el7.centos.3", + "arch": "x86_64" + }, + { + "name": "ustr", + "source": "rpm", + "epoch": null, + "version": "1.0.4", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "nss-tools", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "openssl-libs", + "source": "rpm", + "epoch": 1, + "version": "1.0.1e", + "release": "34.el7", + "arch": "x86_64" + }, + { + "name": "gzip", + "source": "rpm", + "epoch": null, + "version": "1.5", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "cracklib-dicts", + "source": "rpm", + "epoch": null, + "version": "2.9.0", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "nss", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libuser", + "source": "rpm", + "epoch": null, + "version": "0.60", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "coreutils", + "source": "rpm", + "epoch": null, + "version": "8.22", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libblkid", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "libutempter", + "source": "rpm", + "epoch": null, + "version": "1.1.6", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "nss-sysinit", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "python", + "source": "rpm", + "epoch": null, + "version": "2.7.5", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "dbus-python", + "source": "rpm", + "epoch": null, + "version": "1.1.1", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "pygobject3-base", + "source": "rpm", + "epoch": null, + "version": "3.8.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-slip", + "source": "rpm", + "epoch": null, + "version": "0.4.0", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "python-iniparse", + "source": "rpm", + "epoch": null, + "version": "0.4", + "release": "9.el7", + "arch": "noarch" + }, + { + "name": "newt-python", + "source": "rpm", + "epoch": null, + "version": "0.52.15", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-configobj", + "source": "rpm", + "epoch": null, + "version": "4.7.2", + "release": "7.el7", + "arch": "noarch" + }, + { + "name": "python-backports", + "source": "rpm", + "epoch": null, + "version": "1.0", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "python-setuptools", + "source": "rpm", + "epoch": null, + "version": "0.9.8", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "grubby", + "source": "rpm", + "epoch": null, + "version": "8.28", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "kmod", + "source": "rpm", + "epoch": null, + "version": "14", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "openssl", + "source": "rpm", + "epoch": 1, + "version": "1.0.1e", + "release": "34.el7", + "arch": "x86_64" + }, + { + "name": "plymouth-core-libs", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "libssh2", + "source": "rpm", + "epoch": null, + "version": "1.4.3", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "python-pycurl", + "source": "rpm", + "epoch": null, + "version": "7.19.0", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "curl", + "source": "rpm", + "epoch": null, + "version": "7.29.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "rpm", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "selinux-policy", + "source": "rpm", + "epoch": null, + "version": "3.12.1", + "release": "153.el7", + "arch": "noarch" + }, + { + "name": "fipscheck-lib", + "source": "rpm", + "epoch": null, + "version": "1.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "openssh", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "dmidecode", + "source": "rpm", + "epoch": 1, + "version": "2.12", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libmnl", + "source": "rpm", + "epoch": null, + "version": "1.0.3", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "iptables", + "source": "rpm", + "epoch": null, + "version": "1.4.21", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "libpcap", + "source": "rpm", + "epoch": 14, + "version": "1.5.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "cryptsetup-libs", + "source": "rpm", + "epoch": null, + "version": "1.6.3", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "dbus", + "source": "rpm", + "epoch": 1, + "version": "1.6.12", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libgudev1", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "initscripts", + "source": "rpm", + "epoch": null, + "version": "9.49.17", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "polkit", + "source": "rpm", + "epoch": null, + "version": "0.112", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "os-prober", + "source": "rpm", + "epoch": null, + "version": "1.58", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "cronie", + "source": "rpm", + "epoch": null, + "version": "1.4.11", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "grub2-tools", + "source": "rpm", + "epoch": 1, + "version": "2.02", + "release": "0.2.10.el7.centos.1", + "arch": "x86_64" + }, + { + "name": "lvm2-libs", + "source": "rpm", + "epoch": 7, + "version": "2.02.105", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "avahi", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "wpa_supplicant", + "source": "rpm", + "epoch": 1, + "version": "2.0", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "ppp", + "source": "rpm", + "epoch": null, + "version": "2.4.5", + "release": "33.el7", + "arch": "x86_64" + }, + { + "name": "hwdata", + "source": "rpm", + "epoch": null, + "version": "0.252", + "release": "7.3.el7", + "arch": "noarch" + }, + { + "name": "libdrm", + "source": "rpm", + "epoch": null, + "version": "2.4.50", + "release": "1.1.el7", + "arch": "x86_64" + }, + { + "name": "alsa-firmware", + "source": "rpm", + "epoch": null, + "version": "1.0.27", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "kpartx", + "source": "rpm", + "epoch": null, + "version": "0.4.9", + "release": "66.el7", + "arch": "x86_64" + }, + { + "name": "pth", + "source": "rpm", + "epoch": null, + "version": "2.0.7", + "release": "22.el7", + "arch": "x86_64" + }, + { + "name": "rpm-build-libs", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "gpgme", + "source": "rpm", + "epoch": null, + "version": "1.3.2", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "yum-plugin-fastestmirror", + "source": "rpm", + "epoch": null, + "version": "1.1.31", + "release": "24.el7", + "arch": "noarch" + }, + { + "name": "kernel-tools-libs", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "dracut", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "plymouth-scripts", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "jansson", + "source": "rpm", + "epoch": null, + "version": "2.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "rsyslog", + "source": "rpm", + "epoch": null, + "version": "7.4.7", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "kexec-tools", + "source": "rpm", + "epoch": null, + "version": "2.0.4", + "release": "32.el7.centos", + "arch": "x86_64" + }, + { + "name": "grub2", + "source": "rpm", + "epoch": 1, + "version": "2.02", + "release": "0.2.10.el7.centos.1", + "arch": "x86_64" + }, + { + "name": "kernel-tools", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "firewalld", + "source": "rpm", + "epoch": null, + "version": "0.3.9", + "release": "7.el7", + "arch": "noarch" + } +] diff --git a/awx/main/tests/functional/services.json b/awx/main/tests/functional/services.json new file mode 100644 index 0000000000..a86bf4a875 --- /dev/null +++ b/awx/main/tests/functional/services.json @@ -0,0 +1,697 @@ +[ + { + "source": "sysv", + "state": "running", + "name": "iprdump" + }, + { + "source": "sysv", + "state": "running", + "name": "iprinit" + }, + { + "source": "sysv", + "state": "running", + "name": "iprupdate" + }, + { + "source": "sysv", + "state": "stopped", + "name": "netconsole" + }, + { + "source": "sysv", + "state": "running", + "name": "network" + }, + { + "source": "systemd", + "state": "stopped", + "name": "arp-ethers.service" + }, + { + "source": "systemd", + "state": "running", + "name": "auditd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "autovt@.service" + }, + { + "source": "systemd", + "state": "running", + "name": "avahi-daemon.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "blk-availability.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "brandbot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "console-getty.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "console-shell.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "cpupower.service" + }, + { + "source": "systemd", + "state": "running", + "name": "crond.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.fedoraproject.FirewallD1.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.Avahi.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.hostname1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.locale1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.login1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.machine1.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.NetworkManager.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.nm-dispatcher.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.timedate1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "debug-shell.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dhcpd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dhcpd6.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dhcrelay.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dm-event.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dnsmasq.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-cmdline.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-initqueue.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-mount.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-mount.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-pivot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-trigger.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-udev.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-shutdown.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "ebtables.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "emergency.service" + }, + { + "source": "systemd", + "state": "running", + "name": "firewalld.service" + }, + { + "source": "systemd", + "state": "running", + "name": "getty@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "halt-local.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-cleanup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-parse-etc.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-switch-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-udevadm-cleanup-db.service" + }, + { + "source": "systemd", + "state": "running", + "name": "irqbalance.service" + }, + { + "source": "systemd", + "state": "running", + "name": "kdump.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "kmod-static-nodes.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "lvm2-lvmetad.service" + }, + { + "source": "systemd", + "state": "running", + "name": "lvm2-monitor.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "lvm2-pvscan@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "messagebus.service" + }, + { + "source": "systemd", + "state": "running", + "name": "microcode.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "named-setup-rndc.service" + }, + { + "source": "systemd", + "state": "running", + "name": "named.service" + }, + { + "source": "systemd", + "state": "running", + "name": "NetworkManager-dispatcher.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "NetworkManager-wait-online.service" + }, + { + "source": "systemd", + "state": "running", + "name": "NetworkManager.service" + }, + { + "source": "systemd", + "state": "running", + "name": "ntpd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "ntpdate.service" + }, + { + "source": "systemd", + "state": "running", + "name": "openvpn@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-halt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-kexec.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-poweroff.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-quit-wait.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-quit.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-read-write.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-reboot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-start.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-switch-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "polkit.service" + }, + { + "source": "systemd", + "state": "running", + "name": "postfix.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "quotaon.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rc-local.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rdisc.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rescue.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-autorelabel-mark.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-autorelabel.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-configure.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-dmesg.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-domainname.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-import-state.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-loadmodules.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-readonly.service" + }, + { + "source": "systemd", + "state": "running", + "name": "rsyslog.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "serial-getty@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "sshd-keygen.service" + }, + { + "source": "systemd", + "state": "running", + "name": "sshd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "sshd@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-console.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-plymouth.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-wall.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-backlight@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-binfmt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-fsck-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-fsck@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-halt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hibernate.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hostnamed.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hybrid-sleep.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-initctl.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-journal-flush.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-journald.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-kexec.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-localed.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-logind.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-machined.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-modules-load.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-nspawn@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-poweroff.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-quotacheck.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-random-seed.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-collect.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-readahead-done.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-drop.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-replay.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-reboot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-remount-fs.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-shutdownd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-suspend.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-sysctl.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-timedated.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-clean.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-setup-dev.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-setup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udev-settle.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udev-trigger.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udevd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-update-utmp-runlevel.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-update-utmp.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-user-sessions.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-vconsole-setup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "teamd@.service" + }, + { + "source": "systemd", + "state": "running", + "name": "tuned.service" + }, + { + "source": "systemd", + "state": "running", + "name": "vmtoolsd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "wpa_supplicant.service" + } +] diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py new file mode 100644 index 0000000000..ffa6027f73 --- /dev/null +++ b/awx/main/tests/functional/test_notifications.py @@ -0,0 +1,115 @@ +import mock +import pytest + +from awx.main.models.notifications import Notifier +from awx.main.models.inventory import Inventory, Group +from awx.main.models.jobs import JobTemplate + +from django.core.urlresolvers import reverse + +@pytest.fixture +def notifier(): + return Notifier.objects.create(name="test-notification", + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})) + +@pytest.mark.django_db +def test_get_notifier_list(get, user, notifier): + url = reverse('api:notifier_list') + response = get(url, user('admin', True)) + assert response.status_code == 200 + assert len(response.data['results']) == 1 + +@pytest.mark.django_db +def test_basic_parameterization(get, post, user, organization): + u = user('admin-poster', True) + url = reverse('api:notifier_list') + response = post(url, + dict(name="test-webhook", + description="test webhook", + organization=1, + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})), + u) + assert response.status_code == 201 + url = reverse('api:notifier_detail', args=(response.data['id'],)) + response = get(url, u) + assert 'related' in response.data + assert 'organization' in response.data['related'] + assert 'summary_fields' in response.data + assert 'organization' in response.data['summary_fields'] + assert 'notifications' in response.data['related'] + assert 'notification_configuration' in response.data + assert 'url' in response.data['notification_configuration'] + assert 'headers' in response.data['notification_configuration'] + +@pytest.mark.django_db +def test_encrypted_subfields(get, post, user, organization): + def assert_send(self, messages): + assert self.account_token == "shouldhide" + return 1 + u = user('admin-poster', True) + url = reverse('api:notifier_list') + response = post(url, + dict(name="test-twilio", + description="test twilio", + organization=organization.id, + notification_type="twilio", + notification_configuration=dict(account_sid="dummy", + account_token="shouldhide", + from_number="+19999999999", + to_numbers=["9998887777"])), + u) + assert response.status_code == 201 + notifier_actual = Notifier.objects.get(id=response.data['id']) + url = reverse('api:notifier_detail', args=(response.data['id'],)) + response = get(url, u) + assert response.data['notification_configuration']['account_token'] == "$encrypted$" + with mock.patch.object(notifier_actual.notification_class, "send_messages", assert_send): + notifier_actual.send("Test", {'body': "Test"}) + +@pytest.mark.django_db +def test_inherited_notifiers(get, post, user, organization, project): + u = user('admin-poster', True) + url = reverse('api:notifier_list') + notifiers = [] + for nfiers in xrange(3): + response = post(url, + dict(name="test-webhook-{}".format(nfiers), + description="test webhook {}".format(nfiers), + organization=1, + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})), + u) + assert response.status_code == 201 + notifiers.append(response.data['id']) + organization.projects.add(project) + i = Inventory.objects.create(name='test', organization=organization) + i.save() + g = Group.objects.create(name='test', inventory=i) + g.save() + jt = JobTemplate.objects.create(name='test', inventory=i, project=project, playbook='debug.yml') + jt.save() + url = reverse('api:organization_notifiers_any_list', args=(organization.id,)) + response = post(url, dict(id=notifiers[0]), u) + assert response.status_code == 204 + url = reverse('api:project_notifiers_any_list', args=(project.id,)) + response = post(url, dict(id=notifiers[1]), u) + assert response.status_code == 204 + url = reverse('api:job_template_notifiers_any_list', args=(jt.id,)) + response = post(url, dict(id=notifiers[2]), u) + assert response.status_code == 204 + assert len(jt.notifiers['any']) == 3 + assert len(project.notifiers['any']) == 2 + assert len(g.inventory_source.notifiers['any']) == 1 + +@pytest.mark.django_db +def test_notifier_merging(get, post, user, organization, project, notifier): + user('admin-poster', True) + organization.projects.add(project) + organization.notifiers_any.add(notifier) + project.notifiers_any.add(notifier) + assert len(project.notifiers['any']) == 1 diff --git a/awx/main/tests/functional/test_rbac_api.py b/awx/main/tests/functional/test_rbac_api.py index 0cb3166e7c..c99c49aad3 100644 --- a/awx/main/tests/functional/test_rbac_api.py +++ b/awx/main/tests/functional/test_rbac_api.py @@ -2,7 +2,7 @@ import mock # noqa import pytest from django.core.urlresolvers import reverse -from awx.main.models.rbac import Role +from awx.main.models.rbac import Role, ROLE_SINGLETON_SYSTEM_ADMINISTRATOR def mock_feature_enabled(feature, bypass_database=None): return True @@ -24,39 +24,55 @@ def test_get_roles_list_admin(organization, get, admin): assert roles['count'] > 0 @pytest.mark.django_db -@pytest.mark.skipif(True, reason='Unimplemented') -def test_get_roles_list_user(organization, get, user): +def test_get_roles_list_user(organization, inventory, team, get, user): 'Users can see all roles they have access to, but not all roles' - assert False + this_user = user('user-test_get_roles_list_user') + organization.member_role.members.add(this_user) + custom_role = Role.objects.create(name='custom_role-test_get_roles_list_user') + organization.member_role.children.add(custom_role) + + url = reverse('api:role_list') + response = get(url, this_user) + assert response.status_code == 200 + roles = response.data + assert roles['count'] > 0 + assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid + + role_hash = {} + + for r in roles['results']: + role_hash[r['id']] = r + + assert Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).id in role_hash + assert organization.admin_role.id in role_hash + assert organization.member_role.id in role_hash + assert this_user.resource.admin_role.id in role_hash + assert custom_role.id in role_hash + + assert inventory.admin_role.id not in role_hash + assert team.member_role.id not in role_hash + + @pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_create_role(post, admin): - 'Admins can create new roles' - #u = user('admin', True) +def test_cant_create_role(post, admin): + "Ensure we can't create new roles through the api" + # Some day we might want to do this, but until that is speced out, lets + # ensure we don't slip up and allow this implicitly through some helper or + # another response = post(reverse('api:role_list'), {'name': 'New Role'}, admin) - assert response.status_code == 201 + assert response.status_code == 405 @pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_delete_role(post, admin): - 'Admins can delete a custom role' - assert False - - -@pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_user_create_role(organization, get, user): - 'User can create custom roles' - assert False - -@pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_user_delete_role(organization, get, user): - 'User can delete their custom roles, but not any old row' - assert False +def test_cant_delete_role(delete, admin): + "Ensure we can't delete roles through the api" + # Some day we might want to do this, but until that is speced out, lets + # ensure we don't slip up and allow this implicitly through some helper or + # another + response = delete(reverse('api:role_detail', args=(admin.resource.admin_role.id,)), admin) + assert response.status_code == 405 @@ -72,6 +88,53 @@ def test_get_user_roles_list(get, admin): roles = response.data assert roles['count'] > 0 # 'System Administrator' role if nothing else +@pytest.mark.django_db +def test_user_view_other_user_roles(organization, inventory, team, get, alice, bob): + 'Users can see roles for other users, but only the roles that that user has access to see as well' + organization.member_role.members.add(alice) + organization.admins.add(bob) + custom_role = Role.objects.create(name='custom_role-test_user_view_admin_roles_list') + organization.member_role.children.add(custom_role) + team.users.add(bob) + + # alice and bob are in the same org and can see some child role of that org. + # Bob is an org admin, alice can see this. + # Bob is in a team that alice is not, alice cannot see that bob is a member of that team. + + url = reverse('api:user_roles_list', args=(bob.id,)) + response = get(url, alice) + assert response.status_code == 200 + roles = response.data + assert roles['count'] > 0 + assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid + + role_hash = {} + for r in roles['results']: + role_hash[r['id']] = r['name'] + + assert organization.admin_role.id in role_hash + assert custom_role.id not in role_hash # doesn't show up in the user roles list, not an explicit grant + assert Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).id not in role_hash + assert inventory.admin_role.id not in role_hash + assert team.member_role.id not in role_hash # alice can't see this + + # again but this time alice is part of the team, and should be able to see the team role + team.users.add(alice) + response = get(url, alice) + assert response.status_code == 200 + roles = response.data + assert roles['count'] > 0 + assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid + + role_hash = {} + for r in roles['results']: + role_hash[r['id']] = r['name'] + + assert team.member_role.id in role_hash # Alice can now see this + + + + @pytest.mark.django_db def test_add_role_to_user(role, post, admin): assert admin.roles.filter(id=role.id).count() == 0 @@ -165,15 +228,15 @@ def test_get_role(get, admin, role): def test_put_role(put, admin, role): url = reverse('api:role_detail', args=(role.id,)) response = put(url, {'name': 'Some new name'}, admin) - assert response.status_code == 200 - r = Role.objects.get(id=role.id) - assert r.name == 'Some new name' + assert response.status_code == 405 + #r = Role.objects.get(id=role.id) + #assert r.name == 'Some new name' @pytest.mark.django_db def test_put_role_access_denied(put, alice, admin, role): url = reverse('api:role_detail', args=(role.id,)) response = put(url, {'name': 'Some new name'}, alice) - assert response.status_code == 403 + assert response.status_code == 403 or response.status_code == 405 # @@ -204,6 +267,67 @@ def test_remove_user_to_role(post, admin, role): post(url, {'disassociate': True, 'id': admin.id}, admin) assert role.members.filter(id=admin.id).count() == 0 +@pytest.mark.django_db +def test_org_admin_add_user_to_job_template(post, organization, check_jobtemplate, user): + 'Tests that a user with permissions to assign/revoke membership to a particular role can do so' + org_admin = user('org-admin') + joe = user('joe') + organization.admins.add(org_admin) + + assert check_jobtemplate.accessible_by(org_admin, {'write': True}) is True + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False + + post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'id': joe.id}, org_admin) + + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True + + +@pytest.mark.django_db +def test_org_admin_remove_user_to_job_template(post, organization, check_jobtemplate, user): + 'Tests that a user with permissions to assign/revoke membership to a particular role can do so' + org_admin = user('org-admin') + joe = user('joe') + organization.admins.add(org_admin) + check_jobtemplate.executor_role.members.add(joe) + + assert check_jobtemplate.accessible_by(org_admin, {'write': True}) is True + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True + + post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'disassociate': True, 'id': joe.id}, org_admin) + + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False + +@pytest.mark.django_db +def test_user_fail_to_add_user_to_job_template(post, organization, check_jobtemplate, user): + 'Tests that a user without permissions to assign/revoke membership to a particular role cannot do so' + rando = user('rando') + joe = user('joe') + + assert check_jobtemplate.accessible_by(rando, {'write': True}) is False + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False + + res = post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'id': joe.id}, rando) + assert res.status_code == 403 + + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is False + + +@pytest.mark.django_db +def test_user_fail_to_remove_user_to_job_template(post, organization, check_jobtemplate, user): + 'Tests that a user without permissions to assign/revoke membership to a particular role cannot do so' + rando = user('rando') + joe = user('joe') + check_jobtemplate.executor_role.members.add(joe) + + assert check_jobtemplate.accessible_by(rando, {'write': True}) is False + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True + + res = post(reverse('api:role_users_list', args=(check_jobtemplate.executor_role.id,)), {'disassociate': True, 'id': joe.id}, rando) + assert res.status_code == 403 + + assert check_jobtemplate.accessible_by(joe, {'execute': True}) is True + + # # /roles//teams/ # @@ -252,22 +376,6 @@ def test_role_parents(get, team, admin, role): assert response.data['count'] == 1 assert response.data['results'][0]['id'] == team.member_role.id -@pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_role_add_parent(post, team, admin, role): - assert role.parents.count() == 0 - url = reverse('api:role_parents_list', args=(role.id,)) - post(url, {'id': team.member_role.id}, admin) - assert role.parents.count() == 1 - -@pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_role_remove_parent(post, team, admin, role): - role.parents.add(team.member_role) - assert role.parents.count() == 1 - url = reverse('api:role_parents_list', args=(role.id,)) - post(url, {'disassociate': True, 'id': team.member_role.id}, admin) - assert role.parents.count() == 0 # # /roles//children/ @@ -282,22 +390,6 @@ def test_role_children(get, team, admin, role): assert response.data['count'] == 1 assert response.data['results'][0]['id'] == role.id -@pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_role_add_children(post, team, admin, role): - assert role.children.count() == 0 - url = reverse('api:role_children_list', args=(role.id,)) - post(url, {'id': team.member_role.id}, admin) - assert role.children.count() == 1 - -@pytest.mark.django_db -@pytest.mark.skipif(True, reason='Waiting on custom role requirements') -def test_role_remove_children(post, team, admin, role): - role.children.add(team.member_role) - assert role.children.count() == 1 - url = reverse('api:role_children_list', args=(role.id,)) - post(url, {'disassociate': True, 'id': team.member_role.id}, admin) - assert role.children.count() == 0 diff --git a/awx/main/tests/functional/test_rbac_core.py b/awx/main/tests/functional/test_rbac_core.py index 020023f9bd..deae21b3b8 100644 --- a/awx/main/tests/functional/test_rbac_core.py +++ b/awx/main/tests/functional/test_rbac_core.py @@ -138,3 +138,32 @@ def test_content_object(user): assert org.resource.content_object.id == org.id assert org.admin_role.content_object.id == org.id +@pytest.mark.django_db +def test_hierarchy_rebuilding(): + 'Tests some subdtle cases around role hierarchy rebuilding' + + X = Role.objects.create(name='X') + A = Role.objects.create(name='A') + B = Role.objects.create(name='B') + C = Role.objects.create(name='C') + D = Role.objects.create(name='D') + + A.children.add(B) + A.children.add(D) + B.children.add(C) + C.children.add(D) + + assert A.is_ancestor_of(D) + assert X.is_ancestor_of(D) is False + + X.children.add(A) + + assert X.is_ancestor_of(D) is True + + X.children.remove(A) + + # This can be the stickler, the rebuilder needs to ensure that D's role + # hierarchy is built after both A and C are updated. + assert X.is_ancestor_of(D) is False + + diff --git a/awx/main/tests/old/ad_hoc.py b/awx/main/tests/old/ad_hoc.py index 104c67d1fa..a912f7a89b 100644 --- a/awx/main/tests/old/ad_hoc.py +++ b/awx/main/tests/old/ad_hoc.py @@ -128,8 +128,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest): self.assertFalse(ad_hoc_command.passwords_needed_to_start) self.assertTrue(ad_hoc_command.signal_start()) ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk) - self.check_job_result(ad_hoc_command, 'failed') - self.check_ad_hoc_command_events(ad_hoc_command, 'unreachable') + self.check_job_result(ad_hoc_command, 'successful') + self.check_ad_hoc_command_events(ad_hoc_command, 'skipped') @mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('canceled', 0)) def test_cancel_ad_hoc_command(self, ignore): diff --git a/awx/main/tests/old/commands/cleanup_facts.py b/awx/main/tests/old/commands/cleanup_facts.py deleted file mode 100644 index fc0f049aad..0000000000 --- a/awx/main/tests/old/commands/cleanup_facts.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -from datetime import datetime -from dateutil.relativedelta import relativedelta -import mock - -#Django -from django.core.management.base import CommandError - -# AWX -from awx.main.tests.base import BaseTest -from awx.fact.tests.base import MongoDBRequired, FactScanBuilder, TEST_FACT_PACKAGES, TEST_FACT_ANSIBLE, TEST_FACT_SERVICES -from command_base import BaseCommandMixin -from awx.main.management.commands.cleanup_facts import Command, CleanupFacts -from awx.fact.models.fact import * # noqa - -__all__ = ['CommandTest','CleanupFactsUnitTest', 'CleanupFactsCommandFunctionalTest'] - -class CleanupFactsCommandFunctionalTest(BaseCommandMixin, BaseTest, MongoDBRequired): - def setUp(self): - super(CleanupFactsCommandFunctionalTest, self).setUp() - self.create_test_license_file() - self.builder = FactScanBuilder() - self.builder.add_fact('ansible', TEST_FACT_ANSIBLE) - - def test_invoke_zero_ok(self): - self.builder.set_epoch(datetime(year=2015, day=2, month=1, microsecond=0)) - self.builder.build(scan_count=20, host_count=10) - - result, stdout, stderr = self.run_command('cleanup_facts', granularity='2y', older_than='1d') - self.assertEqual(stdout, 'Deleted %s facts.\n' % ((200 / 2))) - - def test_invoke_zero_deleted(self): - result, stdout, stderr = self.run_command('cleanup_facts', granularity='1w',older_than='5d') - self.assertEqual(stdout, 'Deleted 0 facts.\n') - - def test_invoke_all_deleted(self): - self.builder.build(scan_count=20, host_count=10) - - result, stdout, stderr = self.run_command('cleanup_facts', granularity='0d', older_than='0d') - self.assertEqual(stdout, 'Deleted 200 facts.\n') - - def test_invoke_params_required(self): - result, stdout, stderr = self.run_command('cleanup_facts') - self.assertIsInstance(result, CommandError) - self.assertEqual(str(result), 'Both --granularity and --older_than are required.') - - def test_module(self): - self.builder.add_fact('packages', TEST_FACT_PACKAGES) - self.builder.add_fact('services', TEST_FACT_SERVICES) - self.builder.build(scan_count=5, host_count=5) - - result, stdout, stderr = self.run_command('cleanup_facts', granularity='0d', older_than='0d', module='packages') - self.assertEqual(stdout, 'Deleted 25 facts.\n') - -class CommandTest(BaseTest): - def setUp(self): - super(CommandTest, self).setUp() - self.create_test_license_file() - - @mock.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run') - def test_parameters_ok(self, run): - - kv = { - 'older_than': '1d', - 'granularity': '1d', - 'module': None, - } - cmd = Command() - cmd.handle(None, **kv) - run.assert_called_once_with(relativedelta(days=1), relativedelta(days=1), module=None) - - def test_string_time_to_timestamp_ok(self): - kvs = [ - { - 'time': '2w', - 'timestamp': relativedelta(weeks=2), - 'msg': '2 weeks', - }, - { - 'time': '23d', - 'timestamp': relativedelta(days=23), - 'msg': '23 days', - }, - { - 'time': '11m', - 'timestamp': relativedelta(months=11), - 'msg': '11 months', - }, - { - 'time': '14y', - 'timestamp': relativedelta(years=14), - 'msg': '14 years', - }, - ] - for kv in kvs: - cmd = Command() - res = cmd.string_time_to_timestamp(kv['time']) - self.assertEqual(kv['timestamp'], res, "%s should convert to %s" % (kv['time'], kv['msg'])) - - def test_string_time_to_timestamp_invalid(self): - kvs = [ - { - 'time': '2weeks', - 'msg': 'weeks instead of w', - }, - { - 'time': '2days', - 'msg': 'days instead of d', - }, - { - 'time': '23', - 'msg': 'no unit specified', - }, - { - 'time': None, - 'msg': 'no value specified', - }, - { - 'time': 'zigzag', - 'msg': 'random string specified', - }, - ] - for kv in kvs: - cmd = Command() - res = cmd.string_time_to_timestamp(kv['time']) - self.assertIsNone(res, kv['msg']) - - # Mock run() just in case, but it should never get called because an error should be thrown - @mock.patch('awx.main.management.commands.cleanup_facts.CleanupFacts.run') - def test_parameters_fail(self, run): - kvs = [ - { - 'older_than': '1week', - 'granularity': '1d', - 'msg': 'Invalid older_than param value', - }, - { - 'older_than': '1d', - 'granularity': '1year', - 'msg': 'Invalid granularity param value', - } - ] - for kv in kvs: - cmd = Command() - with self.assertRaises(CommandError): - cmd.handle(None, older_than=kv['older_than'], granularity=kv['granularity']) - -class CleanupFactsUnitTest(BaseCommandMixin, BaseTest, MongoDBRequired): - def setUp(self): - super(CleanupFactsUnitTest, self).setUp() - - self.builder = FactScanBuilder() - self.builder.add_fact('ansible', TEST_FACT_ANSIBLE) - self.builder.add_fact('packages', TEST_FACT_PACKAGES) - self.builder.build(scan_count=20, host_count=10) - - ''' - Create 10 hosts with 40 facts each. After cleanup, there should be 20 facts for each host. - Then ensure the correct facts are deleted. - ''' - def test_cleanup_logic(self): - cleanup_facts = CleanupFacts() - fact_oldest = FactVersion.objects.all().order_by('timestamp').first() - granularity = relativedelta(years=2) - - deleted_count = cleanup_facts.cleanup(self.builder.get_timestamp(0), granularity) - self.assertEqual(deleted_count, 2 * (self.builder.get_scan_count() * self.builder.get_host_count()) / 2) - - # Check the number of facts per host - for host in self.builder.get_hosts(): - count = FactVersion.objects.filter(host=host).count() - scan_count = (2 * self.builder.get_scan_count()) / 2 - self.assertEqual(count, scan_count) - - count = Fact.objects.filter(host=host).count() - self.assertEqual(count, scan_count) - - # Ensure that only 2 facts (ansible and packages) exists per granularity time - date_pivot = self.builder.get_timestamp(0) - for host in self.builder.get_hosts(): - while date_pivot > fact_oldest.timestamp: - date_pivot_next = date_pivot - granularity - kv = { - 'timestamp__lte': date_pivot, - 'timestamp__gt': date_pivot_next, - 'host': host, - } - count = FactVersion.objects.filter(**kv).count() - self.assertEqual(count, 2, "should only be 2 FactVersion per the 2 year granularity") - count = Fact.objects.filter(**kv).count() - self.assertEqual(count, 2, "should only be 2 Fact per the 2 year granularity") - date_pivot = date_pivot_next - - ''' - Create 10 hosts with 40 facts each. After cleanup, there should be 30 facts for each host. - Then ensure the correct facts are deleted. - ''' - def test_cleanup_module(self): - cleanup_facts = CleanupFacts() - fact_oldest = FactVersion.objects.all().order_by('timestamp').first() - granularity = relativedelta(years=2) - - deleted_count = cleanup_facts.cleanup(self.builder.get_timestamp(0), granularity, module='ansible') - self.assertEqual(deleted_count, (self.builder.get_scan_count() * self.builder.get_host_count()) / 2) - - # Check the number of facts per host - for host in self.builder.get_hosts(): - count = FactVersion.objects.filter(host=host).count() - self.assertEqual(count, 30) - - count = Fact.objects.filter(host=host).count() - self.assertEqual(count, 30) - - # Ensure that only 1 ansible fact exists per granularity time - date_pivot = self.builder.get_timestamp(0) - for host in self.builder.get_hosts(): - while date_pivot > fact_oldest.timestamp: - date_pivot_next = date_pivot - granularity - kv = { - 'timestamp__lte': date_pivot, - 'timestamp__gt': date_pivot_next, - 'host': host, - 'module': 'ansible', - } - count = FactVersion.objects.filter(**kv).count() - self.assertEqual(count, 1) - count = Fact.objects.filter(**kv).count() - self.assertEqual(count, 1) - date_pivot = date_pivot_next - - - - - - diff --git a/awx/main/tests/old/commands/run_fact_cache_receiver.py b/awx/main/tests/old/commands/run_fact_cache_receiver.py deleted file mode 100644 index 7dedf7657a..0000000000 --- a/awx/main/tests/old/commands/run_fact_cache_receiver.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -import time -from datetime import datetime -import mock -import unittest2 as unittest -from copy import deepcopy -from mock import MagicMock - -# AWX -from awx.main.tests.base import BaseTest -from awx.fact.tests.base import MongoDBRequired -from command_base import BaseCommandMixin -from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver -from awx.fact.models.fact import * # noqa - -__all__ = ['RunFactCacheReceiverUnitTest', 'RunFactCacheReceiverFunctionalTest'] - -TEST_MSG_BASE = { - 'host': 'hostname1', - 'date_key': time.mktime(datetime.utcnow().timetuple()), - 'facts' : { }, - 'inventory_id': 1 -} - -TEST_MSG_MODULES = { - 'packages': { - "accountsservice": [ - { - "architecture": "amd64", - "name": "accountsservice", - "source": "apt", - "version": "0.6.35-0ubuntu7.1" - } - ], - "acpid": [ - { - "architecture": "amd64", - "name": "acpid", - "source": "apt", - "version": "1:2.0.21-1ubuntu2" - } - ], - "adduser": [ - { - "architecture": "all", - "name": "adduser", - "source": "apt", - "version": "3.113+nmu3ubuntu3" - } - ], - }, - 'services': [ - { - "name": "acpid", - "source": "sysv", - "state": "running" - }, - { - "name": "apparmor", - "source": "sysv", - "state": "stopped" - }, - { - "name": "atd", - "source": "sysv", - "state": "running" - }, - { - "name": "cron", - "source": "sysv", - "state": "running" - } - ], - 'ansible': { - 'ansible_fact_simple': 'hello world', - 'ansible_fact_complex': { - 'foo': 'bar', - 'hello': [ - 'scooby', - 'dooby', - 'doo' - ] - }, - } -} -# Derived from TEST_MSG_BASE -TEST_MSG = dict(TEST_MSG_BASE) - -TEST_MSG_LARGE = {u'ansible_product_version': u'To Be Filled By O.E.M.', u'ansible_memory_mb': {u'real': {u'total': 32062, u'used': 8079, u'free': 23983}, u'swap': {u'cached': 0, u'total': 0, u'used': 0, u'free': 0}, u'nocache': {u'used': 4339, u'free': 27723}}, u'ansible_user_dir': u'/root', u'ansible_userspace_bits': u'64', u'ansible_distribution_version': u'14.04', u'ansible_virtualization_role': u'guest', u'ansible_env': {u'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS': u'False', u'LC_CTYPE': u'en_US.UTF-8', u'JOB_CALLBACK_DEBUG': u'1', u'_MP_FORK_LOGFILE_': u'', u'HOME': u'/', u'REST_API_TOKEN': u'122-5deb0d6fcec85f3bf44fec6ce170600c', u'LANG': u'en_US.UTF-8', u'SHELL': u'/bin/bash', u'_MP_FORK_LOGFORMAT_': u'[%(asctime)s: %(levelname)s/%(processName)s] %(message)s', u'_': u'/usr/bin/make', u'DJANGO_PROJECT_DIR': u'/tower_devel', u'MFLAGS': u'-w', u'JOB_ID': u'122', u'PYTHONPATH': u'/tower_devel/awx/lib/site-packages:', u'_MP_FORK_LOGLEVEL_': u'10', u'ANSIBLE_CACHE_PLUGIN_CONNECTION': u'tcp://127.0.0.1:6564', u'ANSIBLE_LIBRARY': u'/tower_devel/awx/plugins/library', u'CELERY_LOG_LEVEL': u'10', u'HOSTNAME': u'2842b3619fa8', u'MAKELEVEL': u'2', u'TMUX_PANE': u'%1', u'DJANGO_LIVE_TEST_SERVER_ADDRESS': u'localhost:9013-9199', u'CELERY_LOG_REDIRECT': u'1', u'PATH': u'/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin', u'CALLBACK_CONSUMER_PORT': u'tcp://127.0.0.1:5557', u'MAKEFLAGS': u'w', u'ANSIBLE_CALLBACK_PLUGINS': u'/tower_devel/awx/plugins/callback', u'TERM': u'screen', u'TZ': u'America/New_York', u'LANGUAGE': u'en_US:en', u'ANSIBLE_SSH_CONTROL_PATH': u'/tmp/ansible_tower_y3xGdA/cp/ansible-ssh-%%h-%%p-%%r', u'SHLVL': u'1', u'CELERY_LOG_FILE': u'', u'ANSIBLE_HOST_KEY_CHECKING': u'False', u'TMUX': u'/tmp/tmux-0/default,3719,0', u'CELERY_LOADER': u'djcelery.loaders.DjangoLoader', u'LC_ALL': u'en_US.UTF-8', u'ANSIBLE_FORCE_COLOR': u'True', u'REST_API_URL': u'http://127.0.0.1:8013', u'CELERY_LOG_REDIRECT_LEVEL': u'WARNING', u'INVENTORY_HOSTVARS': u'True', u'ANSIBLE_CACHE_PLUGIN': u'tower', u'INVENTORY_ID': u'1', u'PWD': u'/tower_devel/awx/playbooks', u'DJANGO_SETTINGS_MODULE': u'awx.settings.development', u'ANSIBLE_CACHE_PLUGINS': u'/tower_devel/awx/plugins/fact_caching'}, u'ansible_lo': {u'mtu': 65536, u'device': u'lo', u'promisc': False, u'ipv4': {u'netmask': u'255.0.0.0', u'network': u'127.0.0.0', u'address': u'127.0.0.1'}, u'ipv6': [{u'scope': u'host', u'prefix': u'128', u'address': u'::1'}], u'active': True, u'type': u'loopback'}, u'ansible_memtotal_mb': 32062, u'ansible_architecture': u'x86_64', u'ansible_default_ipv4': {u'alias': u'eth0', u'netmask': u'255.255.0.0', u'macaddress': u'02:42:ac:11:00:01', u'network': u'172.17.0.0', u'address': u'172.17.0.1', u'interface': u'eth0', u'type': u'ether', u'gateway': u'172.17.42.1', u'mtu': 1500}, u'ansible_swapfree_mb': 0, u'ansible_default_ipv6': {}, u'ansible_cmdline': {u'nomodeset': True, u'rw': True, u'initrd': u'EFIarchinitramfs-arch.img', u'rootfstype': u'ext4', u'root': u'/dev/sda4', u'systemd.unit': u'graphical.target'}, u'ansible_selinux': False, u'ansible_userspace_architecture': u'x86_64', u'ansible_product_uuid': u'00020003-0004-0005-0006-000700080009', u'ansible_pkg_mgr': u'apt', u'ansible_memfree_mb': 23983, u'ansible_distribution': u'Ubuntu', u'ansible_processor_count': 1, u'ansible_hostname': u'2842b3619fa8', u'ansible_all_ipv6_addresses': [u'fe80::42:acff:fe11:1'], u'ansible_interfaces': [u'lo', u'eth0'], u'ansible_kernel': u'4.0.1-1-ARCH', u'ansible_fqdn': u'2842b3619fa8', u'ansible_mounts': [{u'uuid': u'NA', u'size_total': 10434699264, u'mount': u'/', u'size_available': 4918865920, u'fstype': u'ext4', u'device': u'/dev/mapper/docker-8:4-18219321-2842b3619fa885d19e47302009754a4bfd54c1b32c7f21e98f38c7fe7412d3d0', u'options': u'rw,relatime,discard,stripe=16,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/tower_devel', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/resolv.conf', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/hostname', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/hosts', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}], u'ansible_user_shell': u'/bin/bash', u'ansible_nodename': u'2842b3619fa8', u'ansible_product_serial': u'To Be Filled By O.E.M.', u'ansible_form_factor': u'Desktop', u'ansible_fips': False, u'ansible_user_id': u'root', u'ansible_domain': u'', u'ansible_date_time': {u'month': u'05', u'second': u'47', u'iso8601_micro': u'2015-05-01T19:46:47.868456Z', u'year': u'2015', u'date': u'2015-05-01', u'iso8601': u'2015-05-01T19:46:47Z', u'day': u'01', u'minute': u'46', u'tz': u'EDT', u'hour': u'15', u'tz_offset': u'-0400', u'epoch': u'1430509607', u'weekday': u'Friday', u'time': u'15:46:47'}, u'ansible_processor_cores': 4, u'ansible_processor_vcpus': 4, u'ansible_bios_version': u'P1.80', u'ansible_processor': [u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz'], u'ansible_virtualization_type': u'docker', u'ansible_distribution_release': u'trusty', u'ansible_system_vendor': u'To Be Filled By O.E.M.', u'ansible_os_family': u'Debian', u'ansible_user_gid': 0, u'ansible_swaptotal_mb': 0, u'ansible_system': u'Linux', u'ansible_devices': {u'sda': {u'sectorsize': u'4096', u'vendor': u'ATA', u'host': u'', u'support_discard': u'0', u'model': u'ST1000DM003-9YN1', u'size': u'7.28 TB', u'scheduler_mode': u'cfq', u'rotational': u'1', u'sectors': u'1953525168', u'removable': u'0', u'holders': [], u'partitions': {u'sda4': {u'start': u'820979712', u'sectorsize': 512, u'sectors': u'1132545423', u'size': u'540.04 GB'}, u'sda2': {u'start': u'206848', u'sectorsize': 512, u'sectors': u'262144', u'size': u'128.00 MB'}, u'sda3': {u'start': u'468992', u'sectorsize': 512, u'sectors': u'820510720', u'size': u'391.25 GB'}, u'sda1': {u'start': u'2048', u'sectorsize': 512, u'sectors': u'204800', u'size': u'100.00 MB'}}}}, u'ansible_user_uid': 0, u'ansible_distribution_major_version': u'14', u'ansible_lsb': {u'major_release': u'14', u'release': u'14.04', u'codename': u'trusty', u'description': u'Ubuntu 14.04.1 LTS', u'id': u'Ubuntu'}, u'ansible_bios_date': u'12/05/2012', u'ansible_machine': u'x86_64', u'ansible_user_gecos': u'root', u'ansible_processor_threads_per_core': 1, u'ansible_eth0': {u'device': u'eth0', u'promisc': False, u'macaddress': u'02:42:ac:11:00:01', u'ipv4': {u'netmask': u'255.255.0.0', u'network': u'172.17.0.0', u'address': u'172.17.0.1'}, u'ipv6': [{u'scope': u'link', u'prefix': u'64', u'address': u'fe80::42:acff:fe11:1'}], u'active': True, u'type': u'ether', u'mtu': 1500}, u'ansible_product_name': u'To Be Filled By O.E.M.', u'ansible_all_ipv4_addresses': [u'172.17.0.1'], u'ansible_python_version': u'2.7.6'} # noqa - -def copy_only_module(data, module): - data = deepcopy(data) - data['facts'] = {} - if module == 'ansible': - data['facts'] = deepcopy(TEST_MSG_MODULES[module]) - else: - data['facts'][module] = deepcopy(TEST_MSG_MODULES[module]) - return data - - -class RunFactCacheReceiverFunctionalTest(BaseCommandMixin, BaseTest, MongoDBRequired): - @unittest.skip('''\ -TODO: run_fact_cache_receiver enters a while True loop that never exists. \ -This differs from most other commands that we test for. More logic and work \ -would be required to invoke this case from the command line with little return \ -in terms of increase coverage and confidence.''') - def test_invoke(self): - result, stdout, stderr = self.run_command('run_fact_cache_receiver') - self.assertEqual(result, None) - -class RunFactCacheReceiverUnitTest(BaseTest, MongoDBRequired): - - # TODO: Check that timestamp and other attributes are as expected - def check_process_fact_message_module(self, data, module): - fact_found = None - facts = Fact.objects.all() - self.assertEqual(len(facts), 1) - for fact in facts: - if fact.module == module: - fact_found = fact - break - self.assertIsNotNone(fact_found) - #self.assertEqual(data['facts'][module], fact_found[module]) - - fact_found = None - fact_versions = FactVersion.objects.all() - self.assertEqual(len(fact_versions), 1) - for fact in fact_versions: - if fact.module == module: - fact_found = fact - break - self.assertIsNotNone(fact_found) - - - # Ensure that the message flows from the socket through to process_fact_message() - @mock.patch('awx.main.socket.Socket.listen') - def test_run_receiver(self, listen_mock): - listen_mock.return_value = [TEST_MSG] - - receiver = FactCacheReceiver() - receiver.process_fact_message = MagicMock(name='process_fact_message') - receiver.run_receiver(use_processing_threads=False) - - receiver.process_fact_message.assert_called_once_with(TEST_MSG) - - def test_process_fact_message_ansible(self): - data = copy_only_module(TEST_MSG, 'ansible') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'ansible') - - def test_process_fact_message_packages(self): - data = copy_only_module(TEST_MSG, 'packages') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'packages') - - def test_process_fact_message_services(self): - data = copy_only_module(TEST_MSG, 'services') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'services') - - - # Ensure that only a single host gets created for multiple invocations with the same hostname - def test_process_fact_message_single_host_created(self): - receiver = FactCacheReceiver() - - data = deepcopy(TEST_MSG) - receiver.process_fact_message(data) - data = deepcopy(TEST_MSG) - data['date_key'] = time.mktime(datetime.utcnow().timetuple()) - receiver.process_fact_message(data) - - fact_hosts = FactHost.objects.all() - self.assertEqual(len(fact_hosts), 1) - - def test_process_facts_message_ansible_overwrite(self): - data = copy_only_module(TEST_MSG, 'ansible') - key = 'ansible.overwrite' - value = 'hello world' - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - fact = Fact.objects.all()[0] - - data = copy_only_module(TEST_MSG, 'ansible') - data['facts'][key] = value - receiver.process_fact_message(data) - - fact = Fact.objects.get(id=fact.id) - self.assertIn(key, fact.fact) - self.assertEqual(fact.fact[key], value) - self.assertEqual(fact.fact, data['facts']) - - def test_large_overwrite(self): - data = deepcopy(TEST_MSG_BASE) - data['facts'] = { - 'ansible': {} - } - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - fact = Fact.objects.all()[0] - - data['facts']['ansible'] = TEST_MSG_LARGE - receiver.process_fact_message(data) - - fact = Fact.objects.get(id=fact.id) - self.assertEqual(fact.fact, data['facts']['ansible']) diff --git a/awx/main/tests/old/fact/fact_api.py b/awx/main/tests/old/fact/fact_api.py deleted file mode 100644 index d13b17f060..0000000000 --- a/awx/main/tests/old/fact/fact_api.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -import unittest2 as unittest - -# Django -from django.core.urlresolvers import reverse - -# AWX -from awx.main.utils import timestamp_apiformat -from awx.main.models import * # noqa -from awx.main.tests.base import BaseLiveServerTest -from awx.fact.models import * # noqa -from awx.fact.tests.base import BaseFactTestMixin, FactScanBuilder, TEST_FACT_ANSIBLE, TEST_FACT_PACKAGES, TEST_FACT_SERVICES -from awx.main.utils import build_url - -__all__ = ['FactVersionApiTest', 'FactViewApiTest', 'SingleFactApiTest',] - -class FactApiBaseTest(BaseLiveServerTest, BaseFactTestMixin): - def setUp(self): - super(FactApiBaseTest, self).setUp() - self.create_test_license_file() - self.setup_instances() - self.setup_users() - self.organization = self.make_organization(self.super_django_user) - self.organization.admins.add(self.normal_django_user) - self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory') - self.host = self.inventory.hosts.create(name='host.example.com') - self.host2 = self.inventory.hosts.create(name='host2.example.com') - self.host3 = self.inventory.hosts.create(name='host3.example.com') - - def setup_facts(self, scan_count): - self.builder = FactScanBuilder() - self.builder.set_inventory_id(self.inventory.pk) - self.builder.add_fact('ansible', TEST_FACT_ANSIBLE) - self.builder.add_fact('packages', TEST_FACT_PACKAGES) - self.builder.add_fact('services', TEST_FACT_SERVICES) - self.builder.add_hostname('host.example.com') - self.builder.add_hostname('host2.example.com') - self.builder.add_hostname('host3.example.com') - self.builder.build(scan_count=scan_count, host_count=3) - - self.fact_host = FactHost.objects.get(hostname=self.host.name) - -class FactVersionApiTest(FactApiBaseTest): - def check_equal(self, fact_versions, results): - def find(element, set1): - for e in set1: - if all([ e.get(field) == element.get(field) for field in element.keys()]): - return e - return None - - self.assertEqual(len(results), len(fact_versions)) - for v in fact_versions: - v_dict = { - 'timestamp': timestamp_apiformat(v.timestamp), - 'module': v.module - } - e = find(v_dict, results) - self.assertIsNotNone(e, "%s not found in %s" % (v_dict, results)) - - def get_list(self, fact_versions, params=None): - url = build_url('api:host_fact_versions_list', args=(self.host.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - - self.check_equal(fact_versions, response['results']) - return response - - def test_permission_list(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def test_list_empty(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertIn('results', response) - self.assertIsInstance(response['results'], list) - self.assertEqual(len(response['results']), 0) - - def test_list_related_fact_view(self): - self.setup_facts(2) - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - for entry in response['results']: - self.assertIn('fact_view', entry['related']) - self.get(entry['related']['fact_view'], expect=200) - - def test_list(self): - self.setup_facts(2) - self.get_list(FactVersion.objects.filter(host=self.fact_host)) - - def test_list_module(self): - self.setup_facts(10) - self.get_list(FactVersion.objects.filter(host=self.fact_host, module='packages'), dict(module='packages')) - - def test_list_time_from(self): - self.setup_facts(10) - - params = { - 'from': timestamp_apiformat(self.builder.get_timestamp(1)), - } - # 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from']) - self.get_list(fact_versions, params) - - def test_list_time_to(self): - self.setup_facts(10) - - params = { - 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - } - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__lte=params['to']) - self.get_list(fact_versions, params) - - def test_list_time_from_to(self): - self.setup_facts(10) - - params = { - 'from': timestamp_apiformat(self.builder.get_timestamp(1)), - 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - } - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'], timestamp__lte=params['to']) - self.get_list(fact_versions, params) - - -class FactViewApiTest(FactApiBaseTest): - def check_equal(self, fact_obj, results): - fact_dict = { - 'timestamp': timestamp_apiformat(fact_obj.timestamp), - 'module': fact_obj.module, - 'host': { - 'hostname': fact_obj.host.hostname, - 'inventory_id': fact_obj.host.inventory_id, - 'id': str(fact_obj.host.id) - }, - 'fact': fact_obj.fact - } - self.assertEqual(fact_dict, results) - - def test_permission_view(self): - url = reverse('api:host_fact_compare_view', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def get_fact(self, fact_obj, params=None): - url = build_url('api:host_fact_compare_view', args=(self.host.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - - self.check_equal(fact_obj, response) - - def test_view(self): - self.setup_facts(2) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible').order_by('-timestamp')[0]) - - def test_view_module_filter(self): - self.setup_facts(2) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='services').order_by('-timestamp')[0], dict(module='services')) - - def test_view_time_filter(self): - self.setup_facts(6) - ts = self.builder.get_timestamp(3) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible', timestamp__lte=ts).order_by('-timestamp')[0], - dict(datetime=ts)) - - -@unittest.skip("single fact query needs to be updated to use inventory_id attribute on host document") -class SingleFactApiTest(FactApiBaseTest): - def setUp(self): - super(SingleFactApiTest, self).setUp() - - self.group = self.inventory.groups.create(name='test-group') - self.group.hosts.add(self.host, self.host2, self.host3) - - def test_permission_list(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def _test_related(self, url): - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertTrue(len(response['results']) > 0) - for entry in response['results']: - self.assertIn('single_fact', entry['related']) - # Requires fields - self.get(entry['related']['single_fact'], expect=400) - - def test_related_host_list(self): - self.setup_facts(2) - self._test_related(reverse('api:host_list')) - - def test_related_group_list(self): - self.setup_facts(2) - self._test_related(reverse('api:group_list')) - - def test_related_inventory_list(self): - self.setup_facts(2) - self._test_related(reverse('api:inventory_list')) - - def test_params(self): - self.setup_facts(2) - params = { - 'module': 'packages', - 'fact_key': 'name', - 'fact_value': 'acpid', - } - url = build_url('api:inventory_single_fact_view', args=(self.inventory.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertEqual(len(response['results']), 3) - for entry in response['results']: - self.assertEqual(entry['fact'][0]['name'], 'acpid') diff --git a/awx/main/utils.py b/awx/main/utils.py index 00bfc74608..96f8e2c0ff 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -139,12 +139,13 @@ def get_encryption_key(instance, field_name): h.update(field_name) return h.digest()[:16] - -def encrypt_field(instance, field_name, ask=False): +def encrypt_field(instance, field_name, ask=False, subfield=None): ''' Return content of the given instance and field name encrypted. ''' value = getattr(instance, field_name) + if isinstance(value, dict) and subfield is not None: + value = value[subfield] if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'): return value value = smart_str(value) @@ -157,11 +158,13 @@ def encrypt_field(instance, field_name, ask=False): return '$encrypted$%s$%s' % ('AES', b64data) -def decrypt_field(instance, field_name): +def decrypt_field(instance, field_name, subfield=None): ''' Return content of the given instance and field name decrypted. ''' value = getattr(instance, field_name) + if isinstance(value, dict) and subfield is not None: + value = value[subfield] if not value or not value.startswith('$encrypted$'): return value algo, b64data = value[len('$encrypted$'):].split('$', 1) diff --git a/awx/plugins/callback/job_event_callback.py b/awx/plugins/callback/job_event_callback.py index 99573983b2..ddffcaf974 100644 --- a/awx/plugins/callback/job_event_callback.py +++ b/awx/plugins/callback/job_event_callback.py @@ -2,10 +2,10 @@ # This file is a utility Ansible plugin that is not part of the AWX or Ansible # packages. It does not import any code from either package, nor does its # license apply to Ansible or AWX. -# +# # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: -# +# # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # @@ -90,8 +90,12 @@ CENSOR_FIELD_WHITELIST=[ 'skip_reason', ] -def censor(obj): - if obj.get('_ansible_no_log', False): +def censor(obj, no_log=False): + if not isinstance(obj, dict): + if no_log: + return "the output has been hidden due to the fact that 'no_log: true' was specified for this result" + return obj + if obj.get('_ansible_no_log', no_log): new_obj = {} for k in CENSOR_FIELD_WHITELIST: if k in obj: @@ -104,8 +108,12 @@ def censor(obj): new_obj['censored'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result" obj = new_obj if 'results' in obj: - for i in xrange(len(obj['results'])): - obj['results'][i] = censor(obj['results'][i]) + if isinstance(obj['results'], list): + for i in xrange(len(obj['results'])): + obj['results'][i] = censor(obj['results'][i], obj.get('_ansible_no_log', no_log)) + elif obj.get('_ansible_no_log', False): + obj['results'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result" + return obj class TokenAuth(requests.auth.AuthBase): @@ -460,7 +468,7 @@ class JobCallbackModule(BaseCallbackModule): # this from a normal task self._log_event('playbook_on_task_start', task=task, name=task.get_name()) - + def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None): @@ -529,6 +537,7 @@ class AdHocCommandCallbackModule(BaseCallbackModule): def __init__(self): self.ad_hoc_command_id = int(os.getenv('AD_HOC_COMMAND_ID', '0')) self.rest_api_path = '/api/v1/ad_hoc_commands/%d/events/' % self.ad_hoc_command_id + self.skipped_hosts = set() super(AdHocCommandCallbackModule, self).__init__() def _log_event(self, event, **event_data): @@ -539,6 +548,18 @@ class AdHocCommandCallbackModule(BaseCallbackModule): def runner_on_file_diff(self, host, diff): pass # Ignore file diff for ad hoc commands. + def runner_on_ok(self, host, res): + # When running in check mode using a module that does not support check + # mode, Ansible v1.9 will call runner_on_skipped followed by + # runner_on_ok for the same host; only capture the skipped event and + # ignore the ok event. + if host not in self.skipped_hosts: + super(AdHocCommandCallbackModule, self).runner_on_ok(host, res) + + def runner_on_skipped(self, host, item=None): + super(AdHocCommandCallbackModule, self).runner_on_skipped(host, item) + self.skipped_hosts.add(host) + if os.getenv('JOB_ID', ''): CallbackModule = JobCallbackModule diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 85a234314e..76381e5ac0 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -342,6 +342,10 @@ CELERYBEAT_SCHEDULE = { 'task': 'awx.main.tasks.tower_periodic_scheduler', 'schedule': timedelta(seconds=30) }, + 'admin_checks': { + 'task': 'awx.main.tasks.run_administrative_checks', + 'schedule': timedelta(days=30) + }, } # Social Auth configuration. @@ -677,6 +681,10 @@ FACT_CACHE_PORT = 6564 ORG_ADMINS_CAN_SEE_ALL_USERS = True +TOWER_ADMIN_ALERTS = True + +TOWER_URL_BASE = "https://towerhost" + TOWER_SETTINGS_MANIFEST = { "SCHEDULE_MAX_JOBS": { "name": "Maximum Scheduled Jobs", @@ -804,6 +812,20 @@ TOWER_SETTINGS_MANIFEST = { "type": "bool", "category": "system", }, + "TOWER_ADMIN_ALERTS": { + "name": "Enable Tower Administrator Alerts", + "description": "Allow Tower to email Admin users for system events that may require attention", + "default": TOWER_ADMIN_ALERTS, + "type": "bool", + "category": "system", + }, + "TOWER_URL_BASE": { + "name": "Base URL of the Tower host", + "description": "This is used by services like Notifications to render a valid url to the Tower host", + "default": TOWER_URL_BASE, + "type": "string", + "category": "system", + }, "LICENSE": { "name": "Tower License", "description": "Controls what features and functionality is enabled in Tower.", diff --git a/awx/settings/development.py b/awx/settings/development.py index a214ab4670..46df026e06 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -13,7 +13,6 @@ from split_settings.tools import optional, include # Load default settings. from defaults import * # NOQA - MONGO_HOST = '127.0.0.1' MONGO_PORT = 27017 MONGO_USERNAME = None @@ -66,10 +65,13 @@ PASSWORD_HASHERS = ( # Configure a default UUID for development only. SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' -STATSD_CLIENT = 'django_statsd.clients.normal' -STATSD_HOST = 'graphite' +STATSD_CLIENT = 'django_statsd.clients.null' +STATSD_HOST = None +STATSD_PREFIX = None +#STATSD_CLIENT = 'django_statsd.clients.normal' +#STATSD_HOST = 'graphite' STATSD_PORT = 8125 -STATSD_PREFIX = 'tower' +#STATSD_PREFIX = 'tower' STATSD_MAXUDPSIZE = 512 # If there is an `/etc/tower/settings.py`, include it. diff --git a/awx/ui/client/assets/cowsay-about.html b/awx/ui/client/assets/cowsay-about.html deleted file mode 100644 index b1cbcf9d24..0000000000 --- a/awx/ui/client/assets/cowsay-about.html +++ /dev/null @@ -1,29 +0,0 @@ - -
-
-
-
-
- ________________
-/  Tower Version \
-\/
- ----------------
-        \   ^__^
-         \  (oo)\_______
-            (__)\     A)\/\
-                ||----w |
-                ||     ||
-
-
-
-
-
-

Copyright 2015. All rights reserved.

-

Ansible and Ansible Tower are registered trademarks of Red Hat, Inc.

-
- -
-

Visit Ansible.com for more information.

-

-
-
diff --git a/awx/ui/client/legacy-styles/ansible-ui.less b/awx/ui/client/legacy-styles/ansible-ui.less index 06d924b141..4adaeb09eb 100644 --- a/awx/ui/client/legacy-styles/ansible-ui.less +++ b/awx/ui/client/legacy-styles/ansible-ui.less @@ -99,7 +99,9 @@ a:focus { color: @blue-dark; text-decoration: none; } - +.btn{ + text-transform: uppercase; +} /* Old style TB default button with grey background */ .btn-grey { color: #333; @@ -917,15 +919,11 @@ input[type="checkbox"].checkbox-no-label { /* Display list actions next to search widget */ .list-actions { - text-align: right; + text-align: right; - button { - margin-left: 4px; - } - - .fa-lg { - vertical-align: -8%; - } + .fa-lg { + vertical-align: -8%; + } } .jqui-accordion { @@ -1950,11 +1948,6 @@ tr td button i { } } -button.dropdown-toggle, -.input-group-btn { - z-index: 1; -} - #login-modal-body { padding-bottom: 5px; } diff --git a/awx/ui/client/legacy-styles/job-details.less b/awx/ui/client/legacy-styles/job-details.less index 621e0267ca..d3cc2bae50 100644 --- a/awx/ui/client/legacy-styles/job-details.less +++ b/awx/ui/client/legacy-styles/job-details.less @@ -166,9 +166,6 @@ .unreachable-hosts-color { color: @unreachable-hosts-color; } - .missing-hosts { - color: transparent; - } .job_well { padding: 8px; @@ -197,9 +194,6 @@ margin-bottom: 0; } - #job-detail-tables { - margin-top: 20px; - } #job_options { height: 100px; @@ -208,7 +202,6 @@ } #job_plays, #job_tasks { - height: 150px; overflow-y: auto; overflow-x: none; } @@ -221,10 +214,7 @@ } #job-detail-container { - position: relative; - padding-left: 15px; - padding-right: 7px; - width: 58.33333333%; + .well { overflow: hidden; } @@ -292,9 +282,6 @@ .row:first-child { border: none; } - .active { - background-color: @active-color; - } .loading-info { padding-top: 5px; padding-left: 3px; @@ -329,10 +316,6 @@ text-overflow: ellipsis; } - #tasks-table-detail { - height: 150px; - } - #play-section { .table-detail { height: 150px; diff --git a/awx/ui/client/legacy-styles/lists.less b/awx/ui/client/legacy-styles/lists.less index 2dcac015c8..ba6adba673 100644 --- a/awx/ui/client/legacy-styles/lists.less +++ b/awx/ui/client/legacy-styles/lists.less @@ -32,6 +32,7 @@ table, tbody { background-color: @list-header-bg; padding-left: 15px; padding-right: 15px; + border-bottom-width:0px!important; } .List-tableHeader:first-of-type { @@ -69,6 +70,7 @@ table, tbody { .List-tableCell { padding-left: 15px; padding-right: 15px; + border-top:0px!important; } .List-actionButtonCell { @@ -141,7 +143,6 @@ table, tbody { .List-header { display: flex; - height: 34px; align-items: center; } @@ -149,7 +150,7 @@ table, tbody { align-items: center; flex: 1 0 auto; display: flex; - margin-top: -2px; + height: 34px; } .List-titleBadge { @@ -170,15 +171,22 @@ table, tbody { text-transform: uppercase; } -.List-actions { +.List-actionHolder { justify-content: flex-end; display: flex; + height: 34px; +} + +.List-actions { margin-top: -10px; +} + +.List-auxAction + .List-actions { margin-left: 10px; } .List-auxAction { - justify-content: flex-end; + align-items: center; display: flex; } @@ -186,6 +194,10 @@ table, tbody { width: 175px; } +.List-action:not(.ng-hide) ~ .List-action:not(.ng-hide) { + margin-left: 10px; +} + .List-buttonSubmit { background-color: @submit-button-bg; color: @submit-button-text; @@ -350,3 +362,25 @@ table, tbody { display: block; font-size: 13px; } + +@media (max-width: 991px) { + .List-searchWidget + .List-searchWidget { + margin-top: 20px; + } +} + +@media (max-width: 600px) { + .List-header { + flex-direction: column; + align-items: stretch; + } + .List-actionHolder { + justify-content: flex-start; + align-items: center; + flex: 1 0 auto; + margin-top: 12px; + } + .List-well { + margin-top: 20px; + } +} diff --git a/awx/ui/client/legacy-styles/main-layout.less b/awx/ui/client/legacy-styles/main-layout.less index 5b7f8f1c01..48d6709168 100644 --- a/awx/ui/client/legacy-styles/main-layout.less +++ b/awx/ui/client/legacy-styles/main-layout.less @@ -60,7 +60,7 @@ body { } #content-container { - margin-top: 40px; + padding-bottom: 40px; } .group-breadcrumbs { diff --git a/awx/ui/client/legacy-styles/stdout.less b/awx/ui/client/legacy-styles/stdout.less index e8f764dee6..61a29dd706 100644 --- a/awx/ui/client/legacy-styles/stdout.less +++ b/awx/ui/client/legacy-styles/stdout.less @@ -32,6 +32,7 @@ #pre-container { overflow-x: scroll; overflow-y: auto; + padding: 10px; } } diff --git a/awx/ui/client/src/about/about.block.less b/awx/ui/client/src/about/about.block.less index 4e46b24b50..d5453c0cf2 100644 --- a/awx/ui/client/src/about/about.block.less +++ b/awx/ui/client/src/about/about.block.less @@ -1,14 +1,42 @@ /** @define About */ -.About { - height: 309px !important; -} +@import "awx/ui/client/src/shared/branding/colors.default.less"; -.About-cowsay { - margin-top: 30px; +.About-cowsay--container{ + width: 340px; + margin: 0 auto; } - -.About-redhat { - max-width: 100%; - margin-top: -61px; - margin-bottom: -33px; +.About-cowsay--code{ + background-color: @default-bg; + padding-left: 30px; + border-style: none; + max-width: 340px; + padding-left: 30px; } +.About .modal-header{ + border: none; + padding-bottom: 0px; +} +.About .modal-dialog{ + max-width: 500px; +} +.About .modal-body{ + padding-top: 0px; +} +.About-brand--redhat{ + max-width: 420px; + margin: 0 auto; + margin-top: -50px; + margin-bottom: -30px; +} +.About-brand--ansible{ + max-width: 120px; + margin: 0 auto; +} +.About-close{ + position: absolute; + top: 15px; + right: 15px; +} +.About p{ + color: @default-interface-txt; +} \ No newline at end of file diff --git a/awx/ui/client/src/about/about.controller.js b/awx/ui/client/src/about/about.controller.js new file mode 100644 index 0000000000..c35388e8ae --- /dev/null +++ b/awx/ui/client/src/about/about.controller.js @@ -0,0 +1,31 @@ +export default + ['$scope', '$state', 'CheckLicense', function($scope, $state, CheckLicense){ + var processVersion = function(version){ + // prettify version & calculate padding + // e,g 3.0.0-0.git201602191743/ -> 3.0.0 + var split = version.split('-')[0] + var spaces = Math.floor((16-split.length)/2), + paddedStr = ""; + for(var i=0; i<=spaces; i++){ + paddedStr = paddedStr +" "; + } + paddedStr = paddedStr + split; + for(var j = paddedStr.length; j<16; j++){ + paddedStr = paddedStr + " "; + } + return paddedStr + } + var init = function(){ + CheckLicense.get() + .then(function(res){ + $scope.subscription = res.data.license_info.subscription_name; + $scope.version = processVersion(res.data.version); + $('#about-modal').modal('show'); + }); + }; + var back = function(){ + $state.go('setup'); + } + init(); + } + ]; \ No newline at end of file diff --git a/awx/ui/client/src/about/about.partial.html b/awx/ui/client/src/about/about.partial.html new file mode 100644 index 0000000000..afc66724f4 --- /dev/null +++ b/awx/ui/client/src/about/about.partial.html @@ -0,0 +1,32 @@ +