From 7385efef351dfa14e61af9438923a1907ca484b4 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 1 Feb 2016 16:54:34 -0500 Subject: [PATCH 01/38] Adding some early Notifications stubs * A basic NotificationTemplate model class with early notification type definitions * Initial implementations of the Email, Slack, and Twilio Notification backends using the Django email backend system * Some dependencies thereof --- awx/main/models/__init__.py | 1 + awx/main/models/notifications.py | 37 +++++++++++++++++++ awx/main/notifications/__init__.py | 0 awx/main/notifications/email_backend.py | 11 ++++++ awx/main/notifications/slack_backend.py | 46 ++++++++++++++++++++++++ awx/main/notifications/twilio_backend.py | 42 ++++++++++++++++++++++ requirements/requirements.txt | 2 ++ 7 files changed, 139 insertions(+) create mode 100644 awx/main/models/notifications.py create mode 100644 awx/main/notifications/__init__.py create mode 100644 awx/main/notifications/email_backend.py create mode 100644 awx/main/notifications/slack_backend.py create mode 100644 awx/main/notifications/twilio_backend.py diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 23cf591e6b..4e6d45f18f 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -17,6 +17,7 @@ from awx.main.models.schedules import * # noqa from awx.main.models.activity_stream import * # noqa from awx.main.models.ha import * # noqa from awx.main.models.configuration import * # noqa +from awx.main.models.notifications import * # noqa # Monkeypatch Django serializer to ignore django-taggit fields (which break # the dumpdata command; see https://github.com/alex/django-taggit/issues/155). diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py new file mode 100644 index 0000000000..e2539edc4b --- /dev/null +++ b/awx/main/models/notifications.py @@ -0,0 +1,37 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +from django.db import models +from awx.main.models.base import * # noqa +from awx.main.notifications.email_backend import CustomEmailBackend +from awx.main.notifications.slack_backend import SlackBackend +from awx.main.notifications.twilio_backend import TwilioBackend + +# Django-JSONField +from jsonfield import JSONField + +logger = logging.getLogger('awx.main.models.notifications') + +class NotificationTemplate(CommonModel): + + NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend), + ('slack', _('Slack'), SlackBackend), + ('twilio', _('Twilio'), TwilioBackend)] + NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES] + CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES]) + + class Meta: + app_label = 'main' + + notification_type = models.CharField( + max_length = 32, + choices=NOTIFICATION_TYPE_CHOICES, + ) + + notification_configuration = JSONField(blank=False) + + @property + def notification_class(self): + return CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] diff --git a/awx/main/notifications/__init__.py b/awx/main/notifications/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py new file mode 100644 index 0000000000..47c6c6dd00 --- /dev/null +++ b/awx/main/notifications/email_backend.py @@ -0,0 +1,11 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +from django.core.mail.backends.smtp import EmailBackend + +class CustomEmailBackend(EmailBackend): + + init_parameters = ("host", "port", "username", "password", + "use_tls", "use_ssl") diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py new file mode 100644 index 0000000000..07c0f2c4fc --- /dev/null +++ b/awx/main/notifications/slack_backend.py @@ -0,0 +1,46 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +from slackclient import SlackClient + +from django.core.mail.backends.base import BaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.slack_backend') + +class SlackBackend(BaseEmailBackend): + + init_parameters = ('token',) + + def __init__(self, token, fail_silently=False, **kwargs): + super(SlackBackend, self).__init__(fail_silently=fail_silently) + self.token = token + self.connection = None + + def open(self): + if self.connection is not None: + return False + self.connection = SlackClient(self.token) + if not self.connection.rtm_connect(): + if not self.fail_silently: + raise Exception("Slack Notification Token is invalid") + return True + + def close(self): + if self.connection is None: + return + self.connection = None + + def send_messages(self, messages): + if self.connection is None: + self.open() + sent_messages = 0 + for m in messages: + try: + self.connection.rtm_send_message(m.to, m.body) + sent_messages += 1 + except Exception as e: + if not self.fail_silently: + raise + logger.error("Exception sending messages: {}".format(e)) + return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py new file mode 100644 index 0000000000..86d6829c09 --- /dev/null +++ b/awx/main/notifications/twilio_backend.py @@ -0,0 +1,42 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +from twilio.rest import TwilioRestClient + +from django.core.mail.backends.base import BaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.twilio_backend') + +class TwilioBackend(BaseEmailBackend): + + init_parameters = ('account_sid', 'account_token', 'from_phone',) + + def __init__(self, account_sid, account_token, from_phone, fail_silently=False, **kwargs): + super(TwilioBackend, self).__init__(fail_silently=fail_silently) + self.account_sid = account_sid + self.account_token = account_token + self.from_phone = from_phone + + def send_messages(self, messages): + sent_messages = 0 + try: + connection = TwilioRestClient(self.account_sid, self.account_token) + except Exception as e: + if not self.fail_silently: + raise + logger.error("Exception connecting to Twilio: {}".format(e)) + + for m in messages: + try: + connection.messages.create( + to=m.to, + from_=self.from_phone, + body=m.body) + sent_messages += 1 + except Exception as e: + if not self.fail_silently: + raise + logger.error("Exception sending messages: {}".format(e)) + return sent_messages diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 48857bc6d2..73942d9eec 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -114,9 +114,11 @@ requests==2.5.1 requests-oauthlib==0.5.0 simplejson==3.6.0 six==1.9.0 +slackclient==0.16 statsd==3.2.1 stevedore==1.3.0 suds==0.4 +twilio==4.9.1 warlock==1.1.0 wheel==0.24.0 wsgiref==0.1.2 From 805514990b4eb95065407c80311e10641ab9829b Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 1 Feb 2016 16:55:57 -0500 Subject: [PATCH 02/38] Changes to celery tasks to support success signals Linking in a success callback that will be invoked by our UnifiedJobs in the case they terminate normally. This is where we'll hook in the success notification type. --- awx/main/management/commands/run_task_system.py | 7 ++++--- awx/main/models/unified_jobs.py | 6 +++--- awx/main/tasks.py | 9 ++++++++- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index d49dbf1669..5b5dd3bff0 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -15,7 +15,7 @@ from django.core.management.base import NoArgsCommand # AWX from awx.main.models import * # noqa from awx.main.queue import FifoQueue -from awx.main.tasks import handle_work_error +from awx.main.tasks import handle_work_error, handle_work_success from awx.main.utils import get_system_task_capacity # Celery @@ -265,14 +265,15 @@ def process_graph(graph, task_capacity): [{'type': graph.get_node_type(n['node_object']), 'id': n['node_object'].id} for n in node_dependencies] error_handler = handle_work_error.s(subtasks=dependent_nodes) - start_status = node_obj.start(error_callback=error_handler) + success_handler = handle_work_success.s(task_actual={'type': graph.get_node_type(node_obj), + 'id': node_obj.id}) + start_status = node_obj.start(error_callback=error_handler, success_callback=success_handler) if not start_status: node_obj.status = 'failed' if node_obj.job_explanation: node_obj.job_explanation += ' ' node_obj.job_explanation += 'Task failed pre-start check.' node_obj.save() - # TODO: Run error handler continue remaining_volume -= impact running_impact += impact diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 86ab0b3143..cd519af726 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -717,7 +717,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique tasks that might preclude creating one''' return [] - def start(self, error_callback, **kwargs): + def start(self, error_callback, success_callback, **kwargs): ''' Start the task running via Celery. ''' @@ -743,7 +743,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # if field not in needed]) if 'extra_vars' in kwargs: self.handle_extra_data(kwargs['extra_vars']) - task_class().apply_async((self.pk,), opts, link_error=error_callback) + task_class().apply_async((self.pk,), opts, link_error=error_callback, link=success_callback) return True def signal_start(self, **kwargs): @@ -765,7 +765,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # Sanity check: If we are running unit tests, then run synchronously. if getattr(settings, 'CELERY_UNIT_TEST', False): - return self.start(None, **kwargs) + return self.start(None, None, **kwargs) # Save the pending status, and inform the SocketIO listener. self.update_fields(start_args=json.dumps(kwargs), status='pending') diff --git a/awx/main/tasks.py b/awx/main/tasks.py index acfe2022ae..478bb6275c 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -52,7 +52,8 @@ from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, from awx.fact.utils.connection import test_mongo_connection __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', - 'RunAdHocCommand', 'handle_work_error', 'update_inventory_computed_fields'] + 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', + 'update_inventory_computed_fields'] HIDDEN_PASSWORD = '**********' @@ -159,8 +160,14 @@ def mongodb_control(cmd): p = subprocess.Popen('sudo mongod --shutdown -f /etc/mongod.conf', shell=True) p.wait() +@task(bind=True) +def handle_work_success(self, result, task_actual): + # TODO: Perform Notification tasks + pass + @task(bind=True) def handle_work_error(self, task_id, subtasks=None): + # TODO: Perform Notification tasks print('Executing error task id %s, subtasks: %s' % (str(self.request.id), str(subtasks))) first_task = None From 96b0fb168ff4c83477a9247db430610e8b1e10b0 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 4 Feb 2016 11:29:21 -0500 Subject: [PATCH 03/38] Updating makefile migration generator --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index c5735982b9..18e2d951f8 100644 --- a/Makefile +++ b/Makefile @@ -286,7 +286,7 @@ migrate: # Run after making changes to the models to create a new migration. dbchange: - $(PYTHON) manage.py schemamigration main v14_changes --auto + $(PYTHON) manage.py makemigrations # access database shell, asks for password dbshell: From 172207cd4ba129a5d1f4f654d1320bb25c9e1f15 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 4 Feb 2016 11:30:40 -0500 Subject: [PATCH 04/38] Notification endpoints and url expositions Also some changes to the footprint of the notification handler classes --- awx/api/urls.py | 6 ++++++ awx/api/views.py | 13 +++++++++++++ awx/main/access.py | 13 +++++++++++++ awx/main/models/notifications.py | 8 ++++++++ awx/main/notifications/email_backend.py | 9 +++++++-- awx/main/notifications/slack_backend.py | 2 +- awx/main/notifications/twilio_backend.py | 4 +++- 7 files changed, 51 insertions(+), 4 deletions(-) diff --git a/awx/api/urls.py b/awx/api/urls.py index 2b3a93d852..8e48250560 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -209,6 +209,11 @@ system_job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/cancel/$', 'system_job_cancel'), ) +notification_template_urls = patterns('awx.api.views', + url(r'^$', 'notification_template_list'), + url(r'^(?P[0-9]+)/$', 'notification_template_detail'), +) + schedule_urls = patterns('awx.api.views', url(r'^$', 'schedule_list'), url(r'^(?P[0-9]+)/$', 'schedule_detail'), @@ -257,6 +262,7 @@ v1_urls = patterns('awx.api.views', url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), url(r'^system_job_templates/', include(system_job_template_urls)), url(r'^system_jobs/', include(system_job_urls)), + url(r'^notification_templates/', include(notification_template_urls)), url(r'^unified_job_templates/$', 'unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index 9a41e779ea..72e1fb606e 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -135,6 +135,7 @@ class ApiV1RootView(APIView): data['system_job_templates'] = reverse('api:system_job_template_list') data['system_jobs'] = reverse('api:system_job_list') data['schedules'] = reverse('api:schedule_list') + data['notification_templates'] = reverse('api:notification_template_list') data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_jobs'] = reverse('api:unified_job_list') data['activity_stream'] = reverse('api:activity_stream_list') @@ -2919,6 +2920,18 @@ class AdHocCommandStdout(UnifiedJobStdout): model = AdHocCommand new_in_220 = True +class NotificationTemplateList(ListCreateAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + new_in_300 = True + +class NotificationTemplateDetail(RetrieveDestroyAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + new_in_300 = True + class ActivityStreamList(SimpleListAPIView): model = ActivityStream diff --git a/awx/main/access.py b/awx/main/access.py index e17fc59b02..e4ef4653a7 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1484,6 +1484,18 @@ class ScheduleAccess(BaseAccess): else: return False +class NotificationTemplateAccess(BaseAccess): + ''' + I can see/use a notification template if I have permission to + ''' + model = NotificationTemplate + + def get_queryset(self): + qs = self.model.objects.filter(active=True).distinct() + if self.user.is_superuser: + return qs + return qs + class ActivityStreamAccess(BaseAccess): ''' I can see activity stream events only when I have permission on all objects included in the event @@ -1683,3 +1695,4 @@ register_access(UnifiedJob, UnifiedJobAccess) register_access(ActivityStream, ActivityStreamAccess) register_access(CustomInventoryScript, CustomInventoryScriptAccess) register_access(TowerSettings, TowerSettingsAccess) +register_access(NotificationTemplate, NotificationTemplateAccess) diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index e2539edc4b..81c5b31e7f 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -4,6 +4,9 @@ import logging from django.db import models +from django.core.urlresolvers import reverse +from django.utils.translation import ugettext_lazy as _ + from awx.main.models.base import * # noqa from awx.main.notifications.email_backend import CustomEmailBackend from awx.main.notifications.slack_backend import SlackBackend @@ -14,6 +17,8 @@ from jsonfield import JSONField logger = logging.getLogger('awx.main.models.notifications') +__all__ = ['NotificationTemplate'] + class NotificationTemplate(CommonModel): NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend), @@ -32,6 +37,9 @@ class NotificationTemplate(CommonModel): notification_configuration = JSONField(blank=False) + def get_absolute_url(self): + return reverse('api:notification_template_detail', args=(self.pk,)) + @property def notification_class(self): return CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py index 47c6c6dd00..db0a8b3c2f 100644 --- a/awx/main/notifications/email_backend.py +++ b/awx/main/notifications/email_backend.py @@ -7,5 +7,10 @@ from django.core.mail.backends.smtp import EmailBackend class CustomEmailBackend(EmailBackend): - init_parameters = ("host", "port", "username", "password", - "use_tls", "use_ssl") + init_parameters = {"host": {"label": "Host", "type": "string"}, + "port": {"label": "Port", "type": "int"}, + "username": {"label": "Username", "type": "string"}, + "password": {"label": "Password", "type": "password"}, + "use_tls": {"label": "Use TLS", "type": "bool"}, + "use_ssl": {"label": "Use SSL", "type": "bool"}} + diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 07c0f2c4fc..84ae60c3cb 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -10,7 +10,7 @@ logger = logging.getLogger('awx.main.notifications.slack_backend') class SlackBackend(BaseEmailBackend): - init_parameters = ('token',) + init_parameters = {"token": {"label": "Token", "type": "password"}} def __init__(self, token, fail_silently=False, **kwargs): super(SlackBackend, self).__init__(fail_silently=fail_silently) diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index 86d6829c09..d0d2fbfe76 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -11,7 +11,9 @@ logger = logging.getLogger('awx.main.notifications.twilio_backend') class TwilioBackend(BaseEmailBackend): - init_parameters = ('account_sid', 'account_token', 'from_phone',) + init_parameters = {"account_sid": {"label": "Account SID", "type": "string"}, + "account_token": {"label": "Account Token", "type": "password"}, + "from_phone": {"label": "Source Phone Number", "type": "string"}} def __init__(self, account_sid, account_token, from_phone, fail_silently=False, **kwargs): super(TwilioBackend, self).__init__(fail_silently=fail_silently) From 319deffc180578cd21ccdcdd39aaf19dfb9350f1 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 4 Feb 2016 11:31:34 -0500 Subject: [PATCH 05/38] Implement notification serializer and validations --- awx/api/metadata.py | 8 +++++++- awx/api/serializers.py | 15 +++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/awx/api/metadata.py b/awx/api/metadata.py index 46ea3f36da..b5e6d7043a 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -12,7 +12,7 @@ from rest_framework import serializers from rest_framework.request import clone_request # Ansible Tower -from awx.main.models import InventorySource +from awx.main.models import InventorySource, NotificationTemplate class Metadata(metadata.SimpleMetadata): @@ -55,6 +55,12 @@ class Metadata(metadata.SimpleMetadata): get_group_by_choices = getattr(InventorySource, 'get_%s_group_by_choices' % cp) field_info['%s_group_by_choices' % cp] = get_group_by_choices() + # Special handling of notification configuration where the required properties + # are conditional on the type selected. + if field.field_name == 'notification_configuration': + for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES: + field_info[notification_type_name] = notification_type_class.init_parameters + # Update type of fields returned... if field.field_name == 'type': field_info['type'] = 'multiple choice' diff --git a/awx/api/serializers.py b/awx/api/serializers.py index f655b35f4e..b513af2c68 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2035,6 +2035,21 @@ class JobLaunchSerializer(BaseSerializer): attrs = super(JobLaunchSerializer, self).validate(attrs) return attrs +class NotificationTemplateSerializer(BaseSerializer): + + class Meta: + model = NotificationTemplate + fields = ('*', 'notification_type', 'notification_configuration') + + def validate(self, attrs): + notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] + missing_fields = [] + for field in notification_class.init_parameters: + if field not in attrs['notification_configuration']: + missing_fields.append(field) + if missing_fields: + raise serializers.ValidationError("Missing required fields for Notification Configuration: {}".format(missing_fields)) + return attrs class ScheduleSerializer(BaseSerializer): From 8db2f6040534c461bb9ce48662e7b341883f4e23 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 9 Feb 2016 23:12:55 -0500 Subject: [PATCH 06/38] Notification serializers, views, and tasks * Implement concrete Notification model for notification runs * Implement NotificationTemplate and Notification serializers and views * Implement ancillary views * Implement NotificationTemplate trigger m2m fields on all job templates via a fields mixin * Link NotificationTemplates with an org * Link notifications with the activity stream * Implement Notification celery tasks * Extend Backend field parameters to identify sender and receiver as parameters needed by the message and not the backend itself * Updates to backends to better fit the django email backend model as it relates to Messages * Implement success job chain task + notifications * Implement notifications in error job chain task --- awx/api/serializers.py | 45 ++++++- awx/api/urls.py | 24 ++++ awx/api/views.py | 157 ++++++++++++++++++++++- awx/main/access.py | 14 ++ awx/main/models/__init__.py | 2 + awx/main/models/activity_stream.py | 2 + awx/main/models/base.py | 25 +++- awx/main/models/inventory.py | 10 ++ awx/main/models/jobs.py | 11 ++ awx/main/models/notifications.py | 93 +++++++++++++- awx/main/models/organization.py | 2 +- awx/main/models/projects.py | 19 +++ awx/main/models/unified_jobs.py | 16 ++- awx/main/notifications/email_backend.py | 6 +- awx/main/notifications/slack_backend.py | 10 +- awx/main/notifications/twilio_backend.py | 7 +- awx/main/signals.py | 2 + awx/main/tasks.py | 77 ++++++++++- 18 files changed, 502 insertions(+), 20 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index b513af2c68..7bc25f532d 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -457,6 +457,8 @@ class BaseSerializer(serializers.ModelSerializer): ret.pop(parent_key, None) return ret +class EmptySerializer(serializers.Serializer): + pass class BaseFactSerializer(DocumentSerializer): @@ -765,7 +767,11 @@ class OrganizationSerializer(BaseSerializer): users = reverse('api:organization_users_list', args=(obj.pk,)), admins = reverse('api:organization_admins_list', args=(obj.pk,)), teams = reverse('api:organization_teams_list', args=(obj.pk,)), - activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)) + activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)), + notifiers = reverse('api:organization_notifiers_list', args=(obj.pk,)), + notifiers_any = reverse('api:organization_notifications_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:organization_notifications_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:organization_notifications_error_list', args=(obj.pk,)), )) return res @@ -845,6 +851,9 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): project_updates = reverse('api:project_updates_list', args=(obj.pk,)), schedules = reverse('api:project_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:project_activity_stream_list', args=(obj.pk,)), + notifiers_any = reverse('api:project_notifications_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:project_notifications_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:project_notifications_error_list', args=(obj.pk,)), )) # Backwards compatibility. if obj.current_update: @@ -888,6 +897,7 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer): res.update(dict( project = reverse('api:project_detail', args=(obj.project.pk,)), cancel = reverse('api:project_update_cancel', args=(obj.pk,)), + notifications = reverse('api:project_update_notifications_list', args=(obj.pk,)), )) return res @@ -1288,6 +1298,9 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt activity_stream = reverse('api:inventory_activity_stream_list', args=(obj.pk,)), hosts = reverse('api:inventory_source_hosts_list', args=(obj.pk,)), groups = reverse('api:inventory_source_groups_list', args=(obj.pk,)), + notifiers_any = reverse('api:inventory_source_notifications_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:inventory_source_notifications_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:inventory_source_notifications_error_list', args=(obj.pk,)), )) if obj.inventory and obj.inventory.active: res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) @@ -1332,6 +1345,7 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri res.update(dict( inventory_source = reverse('api:inventory_source_detail', args=(obj.inventory_source.pk,)), cancel = reverse('api:inventory_update_cancel', args=(obj.pk,)), + notifications = reverse('api:inventory_update_notifications_list', args=(obj.pk,)), )) return res @@ -1550,6 +1564,9 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): schedules = reverse('api:job_template_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:job_template_activity_stream_list', args=(obj.pk,)), launch = reverse('api:job_template_launch', args=(obj.pk,)), + notifiers_any = reverse('api:job_template_notifications_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:job_template_notifications_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:job_template_notifications_error_list', args=(obj.pk,)), )) if obj.host_config_key: res['callback'] = reverse('api:job_template_callback', args=(obj.pk,)) @@ -1604,6 +1621,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): job_tasks = reverse('api:job_job_tasks_list', args=(obj.pk,)), job_host_summaries = reverse('api:job_job_host_summaries_list', args=(obj.pk,)), activity_stream = reverse('api:job_activity_stream_list', args=(obj.pk,)), + notifications = reverse('api:job_notifications_list', args=(obj.pk,)), )) if obj.job_template and obj.job_template.active: res['job_template'] = reverse('api:job_template_detail', @@ -2039,7 +2057,15 @@ class NotificationTemplateSerializer(BaseSerializer): class Meta: model = NotificationTemplate - fields = ('*', 'notification_type', 'notification_configuration') + fields = ('*', 'organization', 'notification_type', 'notification_configuration') + + def get_related(self, obj): + res = super(NotificationTemplateSerializer, self).get_related(obj) + res.update(dict( + test = reverse('api:notification_template_test', args=(obj.pk,)), + notifications = reverse('api:notification_template_notification_list', args=(obj.pk,)), + )) + return res def validate(self, attrs): notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] @@ -2047,10 +2073,25 @@ class NotificationTemplateSerializer(BaseSerializer): for field in notification_class.init_parameters: if field not in attrs['notification_configuration']: missing_fields.append(field) + # TODO: Type checks if missing_fields: raise serializers.ValidationError("Missing required fields for Notification Configuration: {}".format(missing_fields)) return attrs +class NotificationSerializer(BaseSerializer): + + class Meta: + model = Notification + fields = ('*', '-name', '-description', 'notifier', 'error', 'status', 'notifications_sent', + 'notification_type', 'recipients', 'subject', 'body') + + def get_related(self, obj): + res = super(NotificationSerializer, self).get_related(obj) + res.update(dict( + notification_template = reverse('api:notification_template_detail', args=(obj.notifier.pk,)), + )) + return res + class ScheduleSerializer(BaseSerializer): class Meta: diff --git a/awx/api/urls.py b/awx/api/urls.py index 8e48250560..7e55e46a5c 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -20,6 +20,10 @@ organization_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/projects/$', 'organization_projects_list'), url(r'^(?P[0-9]+)/teams/$', 'organization_teams_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'organization_activity_stream_list'), + url(r'^(?P[0-9]+)/notifiers/$', 'organization_notifiers_list'), + url(r'^(?P[0-9]+)/notifications_any/$', 'organization_notifications_any_list'), + url(r'^(?P[0-9]+)/notifications_error/$', 'organization_notifications_error_list'), + url(r'^(?P[0-9]+)/notifications_success/$', 'organization_notifications_success_list'), ) user_urls = patterns('awx.api.views', @@ -44,12 +48,16 @@ project_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/project_updates/$', 'project_updates_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'project_activity_stream_list'), url(r'^(?P[0-9]+)/schedules/$', 'project_schedules_list'), + url(r'^(?P[0-9]+)/notifications_any/$', 'project_notifications_any_list'), + url(r'^(?P[0-9]+)/notifications_error/$', 'project_notifications_error_list'), + url(r'^(?P[0-9]+)/notifications_success/$', 'project_notifications_success_list'), ) project_update_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'project_update_detail'), url(r'^(?P[0-9]+)/cancel/$', 'project_update_cancel'), url(r'^(?P[0-9]+)/stdout/$', 'project_update_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'project_update_notifications_list'), ) team_urls = patterns('awx.api.views', @@ -120,12 +128,16 @@ inventory_source_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'inventory_source_schedules_list'), url(r'^(?P[0-9]+)/groups/$', 'inventory_source_groups_list'), url(r'^(?P[0-9]+)/hosts/$', 'inventory_source_hosts_list'), + url(r'^(?P[0-9]+)/notifications_any/$', 'inventory_source_notifications_any_list'), + url(r'^(?P[0-9]+)/notifications_error/$', 'inventory_source_notifications_error_list'), + url(r'^(?P[0-9]+)/notifications_success/$', 'inventory_source_notifications_success_list'), ) inventory_update_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'inventory_update_detail'), url(r'^(?P[0-9]+)/cancel/$', 'inventory_update_cancel'), url(r'^(?P[0-9]+)/stdout/$', 'inventory_update_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'inventory_update_notifications_list'), ) inventory_script_urls = patterns('awx.api.views', @@ -153,6 +165,9 @@ job_template_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'job_template_schedules_list'), url(r'^(?P[0-9]+)/survey_spec/$', 'job_template_survey_spec'), url(r'^(?P[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'), + url(r'^(?P[0-9]+)/notifications_any/$', 'job_template_notifications_any_list'), + url(r'^(?P[0-9]+)/notifications_error/$', 'job_template_notifications_error_list'), + url(r'^(?P[0-9]+)/notifications_success/$', 'job_template_notifications_success_list'), ) job_urls = patterns('awx.api.views', @@ -167,6 +182,7 @@ job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/job_tasks/$', 'job_job_tasks_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'job_activity_stream_list'), url(r'^(?P[0-9]+)/stdout/$', 'job_stdout'), + url(r'^(?P[0-9]+)/notifications/$', 'job_notifications_list'), ) job_host_summary_urls = patterns('awx.api.views', @@ -212,6 +228,13 @@ system_job_urls = patterns('awx.api.views', notification_template_urls = patterns('awx.api.views', url(r'^$', 'notification_template_list'), url(r'^(?P[0-9]+)/$', 'notification_template_detail'), + url(r'^(?P[0-9]+)/test/$', 'notification_template_test'), + url(r'^(?P[0-9]+)/notifications/$', 'notification_template_notification_list'), +) + +notification_urls = patterns('awx.api.views', + url(r'^$', 'notification_list'), + url(r'^(?P[0-9]+)/$', 'notification_detail'), ) schedule_urls = patterns('awx.api.views', @@ -263,6 +286,7 @@ v1_urls = patterns('awx.api.views', url(r'^system_job_templates/', include(system_job_template_urls)), url(r'^system_jobs/', include(system_job_urls)), url(r'^notification_templates/', include(notification_template_urls)), + url(r'^notifications/', include(notification_urls)), url(r'^unified_job_templates/$', 'unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index 72e1fb606e..be1bd3b609 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -56,7 +56,7 @@ from social.backends.utils import load_backends # AWX from awx.main.task_engine import TaskSerializer, TASK_FILE, TEMPORARY_TASK_FILE -from awx.main.tasks import mongodb_control +from awx.main.tasks import mongodb_control, send_notifications from awx.main.access import get_user_queryset from awx.main.ha import is_ha_environment from awx.api.authentication import TaskAuthentication, TokenGetAuthentication @@ -136,6 +136,7 @@ class ApiV1RootView(APIView): data['system_jobs'] = reverse('api:system_job_list') data['schedules'] = reverse('api:schedule_list') data['notification_templates'] = reverse('api:notification_template_list') + data['notifications'] = reverse('api:notification_list') data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_jobs'] = reverse('api:unified_job_list') data['activity_stream'] = reverse('api:activity_stream_list') @@ -684,6 +685,35 @@ class OrganizationActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class OrganizationNotifiersList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = Organization + relationship = 'notification_templates' + parent_key = 'organization' + +class OrganizationNotificationsAnyList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = Organization + relationship = 'notification_any' + +class OrganizationNotificationsErrorList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = Organization + relationship = 'notification_erros' + +class OrganizationNotificationsSuccessList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = Organization + relationship = 'notification_success' + class TeamList(ListCreateAPIView): model = Team @@ -849,6 +879,26 @@ class ProjectActivityStreamList(SubListAPIView): return qs.filter(project=parent) return qs.filter(Q(project=parent) | Q(credential__in=parent.credential)) +class ProjectNotificationsAnyList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = Project + relationship = 'notification_any' + +class ProjectNotificationsErrorList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = Project + relationship = 'notification_errors' + +class ProjectNotificationsSuccessList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = Project + relationship = 'notification_success' class ProjectUpdatesList(SubListAPIView): @@ -899,6 +949,12 @@ class ProjectUpdateCancel(RetrieveAPIView): else: return self.http_method_not_allowed(request, *args, **kwargs) +class ProjectUpdateNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Project + relationship = 'notifications' class UserList(ListCreateAPIView): @@ -1725,6 +1781,27 @@ class InventorySourceActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class InventorySourceNotificationsAnyList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = InventorySource + relationship = 'notification_any' + +class InventorySourceNotificationsErrorList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = InventorySource + relationship = 'notification_errors' + +class InventorySourceNotificationsSuccessList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = InventorySource + relationship = 'notification_success' + class InventorySourceHostsList(SubListAPIView): model = Host @@ -1789,6 +1866,13 @@ class InventoryUpdateCancel(RetrieveAPIView): else: return self.http_method_not_allowed(request, *args, **kwargs) +class InventoryUpdateNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = InventoryUpdate + relationship = 'notifications' + class JobTemplateList(ListCreateAPIView): model = JobTemplate @@ -1943,6 +2027,27 @@ class JobTemplateActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) +class JobTemplateNotificationsAnyList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = JobTemplate + relationship = 'notification_any' + +class JobTemplateNotificationsErrorList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = JobTemplate + relationship = 'notification_errors' + +class JobTemplateNotificationsSuccessList(SubListCreateAttachDetachAPIView): + + model = NotificationTemplate + serializer_class = NotificationTemplateSerializer + parent_model = JobTemplate + relationship = 'notification_success' + class JobTemplateCallback(GenericAPIView): model = JobTemplate @@ -2129,7 +2234,7 @@ class SystemJobTemplateDetail(RetrieveAPIView): class SystemJobTemplateLaunch(GenericAPIView): model = SystemJobTemplate - # FIXME: Add serializer class to define fields in OPTIONS request! + serializer_class = EmptySerializer def get(self, request, *args, **kwargs): return Response({}) @@ -2276,6 +2381,13 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView): headers = {'Location': new_job.get_absolute_url()} return Response(data, status=status.HTTP_201_CREATED, headers=headers) +class JobNotificationsList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = Job + relationship = 'notifications' + class BaseJobHostSummariesList(SubListAPIView): model = JobHostSummary @@ -2926,12 +3038,51 @@ class NotificationTemplateList(ListCreateAPIView): serializer_class = NotificationTemplateSerializer new_in_300 = True -class NotificationTemplateDetail(RetrieveDestroyAPIView): +class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView): model = NotificationTemplate serializer_class = NotificationTemplateSerializer new_in_300 = True +class NotificationTemplateTest(GenericAPIView): + + view_name = 'Notification Template Test' + model = NotificationTemplate + serializer_class = EmptySerializer + new_in_300 = True + + def post(self, request, *args, **kwargs): + obj = self.get_object() + notification = obj.generate_notification("Tower Notification Test", "Ansible Tower Test Notification") + if not notification: + return Response({}, status=status.HTTP_400_BAD_REQUEST) + else: + send_notifications.delay([notification.id]) + headers = {'Location': notification.get_absolute_url()} + return Response({"notification": notification.id}, + headers=headers, + status=status.HTTP_202_ACCEPTED) + +class NotificationTemplateNotificationList(SubListAPIView): + + model = Notification + serializer_class = NotificationSerializer + parent_model = NotificationTemplate + relationship = 'notifications' + parent_key = 'notifier' + +class NotificationList(ListAPIView): + + model = Notification + serializer_class = NotificationSerializer + new_in_300 = True + +class NotificationDetail(RetrieveAPIView): + + model = NotificationTemplate + serializer_class = NotificationSerializer + new_in_300 = True + class ActivityStreamList(SimpleListAPIView): model = ActivityStream diff --git a/awx/main/access.py b/awx/main/access.py index e4ef4653a7..3ffbaf7f85 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1496,6 +1496,19 @@ class NotificationTemplateAccess(BaseAccess): return qs return qs +class NotificationAccess(BaseAccess): + ''' + I can see/use a notification if I have permission to + ''' + model = Notification + + def get_queryset(self): + qs = self.model.objects.distinct() + if self.user.is_superuser: + return qs + return qs + + class ActivityStreamAccess(BaseAccess): ''' I can see activity stream events only when I have permission on all objects included in the event @@ -1696,3 +1709,4 @@ register_access(ActivityStream, ActivityStreamAccess) register_access(CustomInventoryScript, CustomInventoryScriptAccess) register_access(TowerSettings, TowerSettingsAccess) register_access(NotificationTemplate, NotificationTemplateAccess) +register_access(Notification, NotificationAccess) diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 4e6d45f18f..2397b6137b 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -61,3 +61,5 @@ activity_stream_registrar.connect(AdHocCommand) activity_stream_registrar.connect(Schedule) activity_stream_registrar.connect(CustomInventoryScript) activity_stream_registrar.connect(TowerSettings) +activity_stream_registrar.connect(NotificationTemplate) +activity_stream_registrar.connect(Notification) diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index b695831ada..12a54c7af2 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -53,6 +53,8 @@ class ActivityStream(models.Model): ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) schedule = models.ManyToManyField("Schedule", blank=True) custom_inventory_script = models.ManyToManyField("CustomInventoryScript", blank=True) + notification_template = models.ManyToManyField("NotificationTemplate", blank=True) + notification = models.ManyToManyField("Notification", blank=True) def get_absolute_url(self): return reverse('api:activity_stream_detail', args=(self.pk,)) diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 61515d7d18..f3f158855c 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -25,7 +25,7 @@ from awx.main.utils import encrypt_field __all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel', 'PasswordFieldsModel', 'PrimordialModel', 'CommonModel', - 'CommonModelNameNotUnique', + 'CommonModelNameNotUnique', 'NotificationFieldsModel', 'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ', 'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN', 'PERM_INVENTORY_CHECK', 'PERM_JOBTEMPLATE_CREATE', 'JOB_TYPE_CHOICES', @@ -337,3 +337,26 @@ class CommonModelNameNotUnique(PrimordialModel): max_length=512, unique=False, ) + +class NotificationFieldsModel(BaseModel): + + class Meta: + abstract = True + + notification_errors = models.ManyToManyField( + "NotificationTemplate", + blank=True, + related_name='%(class)s_notifications_for_errors' + ) + + notification_success = models.ManyToManyField( + "NotificationTemplate", + blank=True, + related_name='%(class)s_notifications_for_success' + ) + + notification_any = models.ManyToManyField( + "NotificationTemplate", + blank=True, + related_name='%(class)s_notifications_for_any' + ) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 37b1dafc4b..febf010f20 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -23,6 +23,7 @@ from awx.main.managers import HostManager from awx.main.models.base import * # noqa from awx.main.models.jobs import Job from awx.main.models.unified_jobs import * # noqa +from awx.main.models.notifications import NotificationTemplate from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates __all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript'] @@ -1180,6 +1181,15 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions): return True return False + @property + def notifiers(self): + # Return all notifiers defined on the Project, and on the Organization for each trigger type + base_notifiers = NotificationTemplate.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(organization_notifications_for_errors__in=self)) + success_notifiers = list(base_notifiers.filter(organization_notifications_for_success__in=self)) + any_notifiers = list(base_notifiers.filter(organization_notifications_for_any__in=self)) + return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) + def clean_source(self): source = self.source if source and self.group: diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 833d20a9b4..42f7ccf676 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -22,6 +22,7 @@ from jsonfield import JSONField from awx.main.constants import CLOUD_PROVIDERS from awx.main.models.base import * # noqa from awx.main.models.unified_jobs import * # noqa +from awx.main.models.notifications import NotificationTemplate from awx.main.utils import decrypt_field, ignore_inventory_computed_fields from awx.main.utils import emit_websocket_notification from awx.main.redact import PlainTextCleaner @@ -330,6 +331,16 @@ class JobTemplate(UnifiedJobTemplate, JobOptions): def _can_update(self): return self.can_start_without_user_input() + @property + def notifiers(self): + # Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type + # TODO: Currently there is no org fk on project so this will need to be added once that is + # available after the rbac pr + base_notifiers = NotificationTemplate.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_errors__in=[self, self.project])) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_success__in=[self, self.project])) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_any__in=[self, self.project])) + return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) class Job(UnifiedJob, JobOptions): ''' diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 81c5b31e7f..a89f460e64 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -5,7 +5,9 @@ import logging from django.db import models from django.core.urlresolvers import reverse +from django.core.mail.message import EmailMessage from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str from awx.main.models.base import * # noqa from awx.main.notifications.email_backend import CustomEmailBackend @@ -17,7 +19,7 @@ from jsonfield import JSONField logger = logging.getLogger('awx.main.models.notifications') -__all__ = ['NotificationTemplate'] +__all__ = ['NotificationTemplate', 'Notification'] class NotificationTemplate(CommonModel): @@ -30,6 +32,14 @@ class NotificationTemplate(CommonModel): class Meta: app_label = 'main' + organization = models.ForeignKey( + 'Organization', + blank=False, + null=True, + on_delete=models.SET_NULL, + related_name='notification_templates', + ) + notification_type = models.CharField( max_length = 32, choices=NOTIFICATION_TYPE_CHOICES, @@ -42,4 +52,83 @@ class NotificationTemplate(CommonModel): @property def notification_class(self): - return CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] + return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] + + @property + def recipients(self): + return self.notification_configuration[self.notification_class.recipient_parameter] + + def generate_notification(self, subject, message): + notification = Notification(notifier=self, + notification_type=self.notification_type, + recipients=smart_str(self.recipients), + subject=subject, + body=message) + notification.save() + return notification + + def send(self, subject, body): + recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter) + sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None) + backend_obj = self.notification_class(**self.notification_configuration) + notification_obj = EmailMessage(subject, body, sender, recipients) + return backend_obj.send_messages([notification_obj]) + +class Notification(CreatedModifiedModel): + ''' + A notification event emitted when a Notifier is run + ''' + + NOTIFICATION_STATE_CHOICES = [ + ('pending', _('Pending')), + ('successful', _('Successful')), + ('failed', _('Failed')), + ] + + class Meta: + app_label = 'main' + ordering = ('pk',) + + notifier = models.ForeignKey( + 'NotificationTemplate', + related_name='notifications', + on_delete=models.CASCADE, + editable=False + ) + status = models.CharField( + max_length=20, + choices=NOTIFICATION_STATE_CHOICES, + default='pending', + editable=False, + ) + error = models.TextField( + blank=True, + default='', + editable=False, + ) + notifications_sent = models.IntegerField( + default=0, + editable=False, + ) + notification_type = models.CharField( + max_length = 32, + choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES, + ) + recipients = models.TextField( + blank=True, + default='', + editable=False, + ) + subject = models.TextField( + blank=True, + default='', + editable=False, + ) + body = models.TextField( + blank=True, + default='', + editable=False, + ) + + def get_absolute_url(self): + return reverse('api:notification_detail', args=(self.pk,)) diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index c22b907082..58f563735b 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -23,7 +23,7 @@ from awx.main.conf import tower_settings __all__ = ['Organization', 'Team', 'Permission', 'Profile', 'AuthToken'] -class Organization(CommonModel): +class Organization(CommonModel, NotificationFieldsModel): ''' An organization is the basic unit of multi-tenancy divisions ''' diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 2fa6512ca0..730604d3e4 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -10,6 +10,7 @@ import urlparse # Django from django.conf import settings from django.db import models +from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import smart_str, smart_text from django.core.exceptions import ValidationError @@ -20,6 +21,7 @@ from django.utils.timezone import now, make_aware, get_default_timezone from awx.lib.compat import slugify from awx.main.models.base import * # noqa from awx.main.models.jobs import Job +from awx.main.models.notifications import NotificationTemplate from awx.main.models.unified_jobs import * # noqa from awx.main.utils import update_scm_url @@ -309,6 +311,23 @@ class Project(UnifiedJobTemplate, ProjectOptions): return True return False + @property + def notifiers(self): + # Return all notifiers defined on the Project, and on the Organization for each trigger type + # TODO: Currently there is no org fk on project so this will need to be added back once that is + # available after the rbac pr + base_notifiers = NotificationTemplate.objects.filter(active=True) + # error_notifiers = list(base_notifiers.filter(Q(project_notifications_for_errors__in=self) | + # Q(organization_notifications_for_errors__in=self.organization))) + # success_notifiers = list(base_notifiers.filter(Q(project_notifications_for_success__in=self) | + # Q(organization_notifications_for_success__in=self.organization))) + # any_notifiers = list(base_notifiers.filter(Q(project_notifications_for_any__in=self) | + # Q(organization_notifications_for_any__in=self.organization))) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_errors=self)) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_success=self)) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_any=self)) + return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) + def get_absolute_url(self): return reverse('api:project_detail', args=(self.pk,)) diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index cd519af726..c6ea2b082b 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -30,6 +30,7 @@ from djcelery.models import TaskMeta # AWX from awx.main.models.base import * # noqa from awx.main.models.schedules import Schedule +from awx.main.models.notifications import Notification from awx.main.utils import decrypt_field, emit_websocket_notification, _inventory_updates from awx.main.redact import UriCleaner @@ -40,7 +41,7 @@ logger = logging.getLogger('awx.main.models.unified_jobs') CAN_CANCEL = ('new', 'pending', 'waiting', 'running') -class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique): +class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, NotificationFieldsModel): ''' Concrete base class for unified job templates. ''' @@ -297,6 +298,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique): ''' return kwargs # Override if needed in subclass. + @property + def notifiers(self): + ''' + Return notifiers relevant to this Unified Job Template + ''' + # NOTE: Derived classes should implement + return NotificationTemplate.objects.none() + def create_unified_job(self, **kwargs): ''' Create a new unified job based on this unified job template. @@ -385,6 +394,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique editable=False, related_name='%(class)s_blocked_jobs+', ) + notifications = models.ManyToManyField( + 'Notification', + editable=False, + related_name='%(class)s_notifications', + ) cancel_flag = models.BooleanField( blank=True, default=False, diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py index db0a8b3c2f..271f585d5c 100644 --- a/awx/main/notifications/email_backend.py +++ b/awx/main/notifications/email_backend.py @@ -12,5 +12,9 @@ class CustomEmailBackend(EmailBackend): "username": {"label": "Username", "type": "string"}, "password": {"label": "Password", "type": "password"}, "use_tls": {"label": "Use TLS", "type": "bool"}, - "use_ssl": {"label": "Use SSL", "type": "bool"}} + "use_ssl": {"label": "Use SSL", "type": "bool"}, + "sender": {"label": "Sender Email", "type": "string"}, + "recipients": {"label": "Recipient List", "type": "list"}} + recipient_parameter = "recipients" + sender_parameter = "sender" diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 84ae60c3cb..950d5c2c6e 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -10,7 +10,10 @@ logger = logging.getLogger('awx.main.notifications.slack_backend') class SlackBackend(BaseEmailBackend): - init_parameters = {"token": {"label": "Token", "type": "password"}} + init_parameters = {"token": {"label": "Token", "type": "password"}, + "channels": {"label": "Destination Channels", "type": "list"}} + recipient_parameter = "channels" + sender_parameter = None def __init__(self, token, fail_silently=False, **kwargs): super(SlackBackend, self).__init__(fail_silently=fail_silently) @@ -37,8 +40,9 @@ class SlackBackend(BaseEmailBackend): sent_messages = 0 for m in messages: try: - self.connection.rtm_send_message(m.to, m.body) - sent_messages += 1 + for r in m.recipients(): + self.connection.rtm_send_message(r, m.body) + sent_messages += 1 except Exception as e: if not self.fail_silently: raise diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index d0d2fbfe76..cf2ced368b 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -13,7 +13,10 @@ class TwilioBackend(BaseEmailBackend): init_parameters = {"account_sid": {"label": "Account SID", "type": "string"}, "account_token": {"label": "Account Token", "type": "password"}, - "from_phone": {"label": "Source Phone Number", "type": "string"}} + "from_number": {"label": "Source Phone Number", "type": "string"}, + "to_numbers": {"label": "Destination SMS Numbers", "type": "list"}} + recipient_parameter = "to_numbers" + sender_parameter = "from_number" def __init__(self, account_sid, account_token, from_phone, fail_silently=False, **kwargs): super(TwilioBackend, self).__init__(fail_silently=fail_silently) @@ -34,7 +37,7 @@ class TwilioBackend(BaseEmailBackend): try: connection.messages.create( to=m.to, - from_=self.from_phone, + from_=m.from_email, body=m.body) sent_messages += 1 except Exception as e: diff --git a/awx/main/signals.py b/awx/main/signals.py index 8b0c22ec9d..f4d0014905 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -307,6 +307,8 @@ model_serializer_mapping = { Job: JobSerializer, AdHocCommand: AdHocCommandSerializer, TowerSettings: TowerSettingsSerializer, + NotificationTemplate: NotificationTemplateSerializer, + Notification: NotificationSerializer, } def activity_stream_create(sender, instance, created, **kwargs): diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 478bb6275c..aff9a6a585 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -53,7 +53,7 @@ from awx.fact.utils.connection import test_mongo_connection __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', - 'update_inventory_computed_fields'] + 'update_inventory_computed_fields', 'send_notifications'] HIDDEN_PASSWORD = '**********' @@ -65,6 +65,26 @@ Try upgrading OpenSSH or providing your private key in an different format. \ logger = logging.getLogger('awx.main.tasks') +@task() +def send_notifications(notification_list, job_id=None): + if not isinstance(notification_list, list): + raise TypeError("notification_list should be of type list") + for notification_id in notification_list: + notification = Notification.objects.get(id=notification_id) + try: + sent = notification.notifier.send(notification.subject, notification.body) + notification.status = "successful" + notification.notifications_sent = sent + except Exception as e: + logger.error("Send Notification Failed {}".format(e)) + notification.status = "failed" + notification.error = str(e) + finally: + notification.save() + if job_id is not None: + j = UnifiedJob.objects.get(id=job_id) + j.notifications.add(notification) + @task() def bulk_inventory_element_delete(inventory, hosts=[], groups=[]): from awx.main.signals import disable_activity_stream @@ -162,12 +182,41 @@ def mongodb_control(cmd): @task(bind=True) def handle_work_success(self, result, task_actual): - # TODO: Perform Notification tasks - pass + if task_actual['type'] == 'project_update': + instance = ProjectUpdate.objects.get(id=task_actual['id']) + instance_name = instance.name + notifiers = instance.project.notifiers + friendly_name = "Project Update" + elif task_actual['type'] == 'inventory_update': + instance = InventoryUpdate.objects.get(id=task_actual['id']) + instance_name = instance.name + notifiers = instance.inventory_source.notifiers + friendly_name = "Inventory Update" + elif task_actual['type'] == 'job': + instance = Job.objects.get(id=task_actual['id']) + instance_name = instance.job_template.name + notifiers = instance.job_template.notifiers + friendly_name = "Job" + elif task_actual['type'] == 'ad_hoc_command': + instance = AdHocCommand.objects.get(id=task_actual['id']) + instance_name = instance.module_name + notifiers = [] # TODO: Ad-hoc commands need to notify someone + friendly_name = "AdHoc Command" + else: + return + notification_subject = "{} #{} '{}' succeeded on Ansible Tower".format(friendly_name, + task_actual['id'], + instance_name) + notification_body = "{} #{} '{}' succeeded on Ansible Tower\nTo view the output: {}".format(friendly_name, + task_actual['id'], + instance_name, + instance.get_absolute_url()) + send_notifications.delay([n.generate_notification(notification_subject, notification_body) + for n in notifiers.get('success', []) + notifiers.get('any', [])], + job_id=task_actual['id']) @task(bind=True) def handle_work_error(self, task_id, subtasks=None): - # TODO: Perform Notification tasks print('Executing error task id %s, subtasks: %s' % (str(self.request.id), str(subtasks))) first_task = None @@ -180,15 +229,23 @@ def handle_work_error(self, task_id, subtasks=None): if each_task['type'] == 'project_update': instance = ProjectUpdate.objects.get(id=each_task['id']) instance_name = instance.name + notifiers = instance.project.notifiers + friendly_name = "Project Update" elif each_task['type'] == 'inventory_update': instance = InventoryUpdate.objects.get(id=each_task['id']) instance_name = instance.name + notifiers = instance.inventory_source.notifiers + friendly_name = "Inventory Update" elif each_task['type'] == 'job': instance = Job.objects.get(id=each_task['id']) instance_name = instance.job_template.name + notifiers = instance.job_template.notifiers + friendly_name = "Job" elif each_task['type'] == 'ad_hoc_command': instance = AdHocCommand.objects.get(id=each_task['id']) instance_name = instance.module_name + notifiers = [] + friendly_name = "AdHoc Command" else: # Unknown task type break @@ -197,6 +254,7 @@ def handle_work_error(self, task_id, subtasks=None): first_task_id = instance.id first_task_type = each_task['type'] first_task_name = instance_name + first_task_friendly_name = friendly_name if instance.celery_task_id != task_id: instance.status = 'failed' instance.failed = True @@ -204,6 +262,17 @@ def handle_work_error(self, task_id, subtasks=None): (first_task_type, first_task_name, first_task_id) instance.save() instance.socketio_emit_status("failed") + notification_subject = "{} #{} '{}' failed on Ansible Tower".format(first_task_friendly_name, + first_task_id, + first_task_name) + notification_body = "{} #{} '{}' failed on Ansible Tower\nTo view the output: {}".format(first_task_friendly_name, + first_task_id, + first_task_name, + first_task.get_absolute_url()) + send_notifications.delay([n.generate_notification(notification_subject, notification_body).id + for n in notifiers.get('error', []) + notifiers.get('any', [])], + job_id=first_task_id) + @task() def update_inventory_computed_fields(inventory_id, should_update_hosts=True): From 34ebe0a8484fe5eeb897d36d5f8deee6d5779f2f Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 10 Feb 2016 15:17:11 -0500 Subject: [PATCH 07/38] Pagerduty and Hipchat backends plus some cleanup --- awx/main/models/notifications.py | 8 +++- awx/main/notifications/hipchat_backend.py | 46 +++++++++++++++++++++ awx/main/notifications/pagerduty_backend.py | 44 ++++++++++++++++++++ awx/main/notifications/twilio_backend.py | 2 +- requirements/requirements.txt | 1 + 5 files changed, 99 insertions(+), 2 deletions(-) create mode 100644 awx/main/notifications/hipchat_backend.py create mode 100644 awx/main/notifications/pagerduty_backend.py diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index a89f460e64..3d4dd9252e 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -13,6 +13,8 @@ from awx.main.models.base import * # noqa from awx.main.notifications.email_backend import CustomEmailBackend from awx.main.notifications.slack_backend import SlackBackend from awx.main.notifications.twilio_backend import TwilioBackend +from awx.main.notifications.pagerduty_backend import PagerDutyBackend +from awx.main.notifications.hipchat_backend import HipChatBackend # Django-JSONField from jsonfield import JSONField @@ -25,7 +27,9 @@ class NotificationTemplate(CommonModel): NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend), ('slack', _('Slack'), SlackBackend), - ('twilio', _('Twilio'), TwilioBackend)] + ('twilio', _('Twilio'), TwilioBackend), + ('pagerduty', _('Pagerduty'), PagerDutyBackend), + ('hipchat', _('HipChat'), HipChatBackend)] NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES] CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES]) @@ -69,6 +73,8 @@ class NotificationTemplate(CommonModel): def send(self, subject, body): recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter) + if not isinstance(recipients, list): + recipients = [recipients] sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None) backend_obj = self.notification_class(**self.notification_configuration) notification_obj = EmailMessage(subject, body, sender, recipients) diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py new file mode 100644 index 0000000000..2bb91b3919 --- /dev/null +++ b/awx/main/notifications/hipchat_backend.py @@ -0,0 +1,46 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +import requests + +from django.core.mail.backends.base import BaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.hipchat_backend') + +class HipChatBackend(BaseEmailBackend): + + init_parameters = {"token": {"label": "Token", "type": "password"}, + "channels": {"label": "Destination Channels", "type": "list"}, + "color": {"label": "Notification Color", "type": "string"}, + "api_url": {"label": "API Url (e.g: https://mycompany.hipchat.com)", "type": "string"}, + "notify": {"label": "Notify channel", "type": "bool"}, + "message_from": {"label": "Label to be shown with notification", "type": "string"}} + recipient_parameter = "channels" + sender_parameter = "message_from" + + def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs): + super(HipChatBackend, self).__init__(fail_silently=fail_silently) + self.token = token + self.color = color + self.api_url = api_url + self.notify = notify + + def send_messages(self, messages): + sent_messages = 0 + + for m in messages: + for rcp in m.recipients(): + r = requests.post("{}/v2/room/{}/notification".format(self.api_url, rcp), + params={"auth_token": self.token}, + json={"color": self.color, + "message": m.body, + "notify": self.notify, + "from": m.from_email, + "message_format": "text"}) + if r.status_code != 204 and not self.fail_silently: + logger.error("Error sending messages: {}".format(r.text)) + raise Exception("Error sending message to hipchat: {}".format(r.text)) + sent_messages += 1 + return sent_messages diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py new file mode 100644 index 0000000000..161bb822bc --- /dev/null +++ b/awx/main/notifications/pagerduty_backend.py @@ -0,0 +1,44 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging +import pygerduty + +from django.core.mail.backends.base import BaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.pagerduty_backend') + +class PagerDutyBackend(BaseEmailBackend): + + init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"}, + "token": {"label": "API Token", "type": "password"}, + "service_key": {"label": "API Service/Integration Key", "type": "string"}, + "client_name": {"label": "Client Identifier", "type": "string"}} + recipient_parameter = "service_key" + sender_parameter = "client_name" + + def __init__(self, subdomain, token, fail_silently=False, **kwargs): + super(PagerDutyBackend, self).__init__(fail_silently=fail_silently) + self.subdomain = subdomain + self.token = token + + def send_messages(self, messages): + sent_messages = 0 + + try: + pager = pygerduty.PagerDuty(self.subdomain, self.token) + except Exception as e: + if not self.fail_silently: + raise + logger.error("Exception connecting to PagerDuty: {}".format(e)) + for m in messages: + try: + pager.trigger_incident(m.recipients()[0], + description=m.subject, + details=m.body, + client=m.from_email) + except Exception as e: + logger.error("Exception sending messages: {}".format(e)) + if not self.fail_silently: + raise + return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index cf2ced368b..d9c4cc43b6 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -41,7 +41,7 @@ class TwilioBackend(BaseEmailBackend): body=m.body) sent_messages += 1 except Exception as e: + logger.error("Exception sending messages: {}".format(e)) if not self.fail_silently: raise - logger.error("Exception sending messages: {}".format(e)) return sent_messages diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 73942d9eec..0f42f235a3 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -82,6 +82,7 @@ psycopg2 pyasn1==0.1.8 pycparser==2.14 pycrypto==2.6.1 +pygerduty==0.32.1 PyJWT==1.4.0 pymongo==2.8 pyOpenSSL==0.15.1 From 7abcb6e30694a2ef9a4c3553856eef0a0e70bbe2 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 10 Feb 2016 17:03:57 -0500 Subject: [PATCH 08/38] Add webhook notification backend --- awx/main/models/notifications.py | 4 ++- awx/main/notifications/hipchat_backend.py | 7 ++--- awx/main/notifications/webhook_backend.py | 32 +++++++++++++++++++++++ 3 files changed, 39 insertions(+), 4 deletions(-) create mode 100644 awx/main/notifications/webhook_backend.py diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 3d4dd9252e..ede9e42795 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -15,6 +15,7 @@ from awx.main.notifications.slack_backend import SlackBackend from awx.main.notifications.twilio_backend import TwilioBackend from awx.main.notifications.pagerduty_backend import PagerDutyBackend from awx.main.notifications.hipchat_backend import HipChatBackend +from awx.main.notifications.webhook_backend import WebhookBackend # Django-JSONField from jsonfield import JSONField @@ -29,7 +30,8 @@ class NotificationTemplate(CommonModel): ('slack', _('Slack'), SlackBackend), ('twilio', _('Twilio'), TwilioBackend), ('pagerduty', _('Pagerduty'), PagerDutyBackend), - ('hipchat', _('HipChat'), HipChatBackend)] + ('hipchat', _('HipChat'), HipChatBackend), + ('webhook', _('Webhook'), WebhookBackend)] NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES] CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES]) diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py index 2bb91b3919..a5b7f561b6 100644 --- a/awx/main/notifications/hipchat_backend.py +++ b/awx/main/notifications/hipchat_backend.py @@ -39,8 +39,9 @@ class HipChatBackend(BaseEmailBackend): "notify": self.notify, "from": m.from_email, "message_format": "text"}) - if r.status_code != 204 and not self.fail_silently: + if r.status_code != 204: logger.error("Error sending messages: {}".format(r.text)) - raise Exception("Error sending message to hipchat: {}".format(r.text)) - sent_messages += 1 + if not self.fail_silently: + raise Exception("Error sending message to hipchat: {}".format(r.text)) + sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py new file mode 100644 index 0000000000..ffef641198 --- /dev/null +++ b/awx/main/notifications/webhook_backend.py @@ -0,0 +1,32 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import logging + +import requests + +from django.core.mail.backends.base import BaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.webhook_backend') + +class WebhookBackend(BaseEmailBackend): + + init_parameters = {"url": {"label": "Target URL", "type": "string"}, + "headers": {"label": "HTTP Headers", "type": "object"}} + recipient_parameter = "url" + sender_parameter = None + + def __init__(self, headers, fail_silently=False, **kwargs): + super(WebhookBackend, self).__init__(fail_silently=fail_silently) + + def send_messages(self, messages): + sent_messages = 0 + for m in messages: + r = requests.post("{}".format(m.recipients()[0]), + headers=self.headers) + if r.status_code >= 400: + logger.error("Error sending notification webhook: {}".format(r.text)) + if not self.fail_silently: + raise Exception("Error sending notification webhook: {}".format(r.text)) + sent_messages += 1 + return sent_messages From 9d6739045ab23e7a966faea3caf1e653f273d70b Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 11 Feb 2016 15:34:27 -0500 Subject: [PATCH 09/38] Implement irc notification backend --- awx/main/models/notifications.py | 4 +- awx/main/notifications/irc_backend.py | 93 +++++++++++++++++++++++++ awx/main/notifications/slack_backend.py | 2 +- requirements/requirements.txt | 1 + 4 files changed, 98 insertions(+), 2 deletions(-) create mode 100644 awx/main/notifications/irc_backend.py diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index ede9e42795..4fc005256b 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -16,6 +16,7 @@ from awx.main.notifications.twilio_backend import TwilioBackend from awx.main.notifications.pagerduty_backend import PagerDutyBackend from awx.main.notifications.hipchat_backend import HipChatBackend from awx.main.notifications.webhook_backend import WebhookBackend +from awx.main.notifications.irc_backend import IrcBackend # Django-JSONField from jsonfield import JSONField @@ -31,7 +32,8 @@ class NotificationTemplate(CommonModel): ('twilio', _('Twilio'), TwilioBackend), ('pagerduty', _('Pagerduty'), PagerDutyBackend), ('hipchat', _('HipChat'), HipChatBackend), - ('webhook', _('Webhook'), WebhookBackend)] + ('webhook', _('Webhook'), WebhookBackend), + ('irc', _('IRC'), IrcBackend)] NOTIFICATION_TYPE_CHOICES = [(x[0], x[1]) for x in NOTIFICATION_TYPES] CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES]) diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py new file mode 100644 index 0000000000..2b0944b74a --- /dev/null +++ b/awx/main/notifications/irc_backend.py @@ -0,0 +1,93 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import time +import ssl +import logging + +import irc.client + +from django.core.mail.backends.base import BaseEmailBackend + +logger = logging.getLogger('awx.main.notifications.irc_backend') + +class IrcBackend(BaseEmailBackend): + + init_parameters = {"server": {"label": "IRC Server Address", "type": "string"}, + "port": {"label": "IRC Server Port", "type": "int"}, + "nickname": {"label": "IRC Nick", "type": "string"}, + "password": {"label": "IRC Server Password", "type": "password"}, + "use_ssl": {"label": "SSL Connection", "type": "bool"}, + "targets": {"label": "Destination Channels or Users", "type": "list"}} + recipient_parameter = "targets" + sender_parameter = None + + def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs): + super(IrcBackend, self).__init__(fail_silently=fail_silently) + self.server = server + self.port = port + self.nickname = nickname + self.password = password if password != "" else None + self.use_ssl = use_ssl + self.connection = None + + def open(self): + if self.connection is not None: + return False + if self.use_ssl: + connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket) + else: + connection_factory = irc.connection.Factory() + try: + self.reactor = irc.client.Reactor() + self.connection = self.reactor.server().connect( + self.server, + self.port, + self.nickname, + password=self.password, + connect_factory=connection_factory, + ) + except irc.client.ServerConnectionError as e: + logger.error("Exception connecting to irc server: {}".format(e)) + if not self.fail_silently: + raise + return True + + def close(self): + if self.connection is None: + return + self.connection = None + + def on_connect(self, connection, event): + for c in self.channels: + if irc.client.is_channel(c): + connection.join(c) + else: + for m in self.channels[c]: + connection.privmsg(c, m.subject) + self.channels_sent += 1 + + def on_join(self, connection, event): + for m in self.channels[event.target]: + connection.privmsg(event.target, m.subject) + self.channels_sent += 1 + + def send_messages(self, messages): + if self.connection is None: + self.open() + self.channels = {} + self.channels_sent = 0 + for m in messages: + for r in m.recipients(): + if r not in self.channels: + self.channels[r] = [] + self.channels[r].append(m) + self.connection.add_global_handler("welcome", self.on_connect) + self.connection.add_global_handler("join", self.on_join) + start_time = time.time() + process_time = time.time() + while self.channels_sent < len(self.channels) and (process_time-start_time) < 60: + self.reactor.process_once(0.1) + process_time = time.time() + self.reactor.disconnect_all() + return self.channels_sent diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 950d5c2c6e..3bf4f32114 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -44,7 +44,7 @@ class SlackBackend(BaseEmailBackend): self.connection.rtm_send_message(r, m.body) sent_messages += 1 except Exception as e: + logger.error("Exception sending messages: {}".format(e)) if not self.fail_silently: raise - logger.error("Exception sending messages: {}".format(e)) return sent_messages diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 0f42f235a3..e8c62488f5 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -47,6 +47,7 @@ httplib2==0.9 idna==2.0 importlib==1.0.3 ipaddress==1.0.14 +irc==13.3.1 iso8601==0.1.10 isodate==0.5.1 jsonpatch==1.11 From 56b0da30f1861d9d0510ff7e05215374c92817cf Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 9 Feb 2016 16:20:44 -0500 Subject: [PATCH 10/38] adds fact model --- .../migrations/0003_auto_20160209_1615.py | 31 ++++++++++++++++++ awx/main/models/__init__.py | 1 + awx/main/models/fact.py | 32 +++++++++++++++++++ docs/licenses/django-jsonbfield.txt | 27 ++++++++++++++++ requirements/requirements.txt | 1 + 5 files changed, 92 insertions(+) create mode 100644 awx/main/migrations/0003_auto_20160209_1615.py create mode 100644 awx/main/models/fact.py create mode 100644 docs/licenses/django-jsonbfield.txt diff --git a/awx/main/migrations/0003_auto_20160209_1615.py b/awx/main/migrations/0003_auto_20160209_1615.py new file mode 100644 index 0000000000..c489c1d830 --- /dev/null +++ b/awx/main/migrations/0003_auto_20160209_1615.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonbfield.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0002_v300_changes'), + ] + + operations = [ + migrations.CreateModel( + name='Fact', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('timestamp', models.DateTimeField(default=None, editable=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('modified', models.DateTimeField(auto_now=True)), + ('module', models.CharField(max_length=128)), + ('facts', jsonbfield.fields.JSONField(default={}, blank=True)), + ('host', models.ForeignKey(related_name='facts', to='main.Host')), + ], + ), + migrations.AlterIndexTogether( + name='fact', + index_together=set([('timestamp', 'module', 'host')]), + ), + ] diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 23cf591e6b..9b00bbc14c 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -17,6 +17,7 @@ from awx.main.models.schedules import * # noqa from awx.main.models.activity_stream import * # noqa from awx.main.models.ha import * # noqa from awx.main.models.configuration import * # noqa +from awx.main.models.fact import * # noqa # Monkeypatch Django serializer to ignore django-taggit fields (which break # the dumpdata command; see https://github.com/alex/django-taggit/issues/155). diff --git a/awx/main/models/fact.py b/awx/main/models/fact.py new file mode 100644 index 0000000000..35288c489c --- /dev/null +++ b/awx/main/models/fact.py @@ -0,0 +1,32 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +from django.db import models +from jsonbfield.fields import JSONField + +from awx.main.models import Host + +__all__ = ('Fact', ) + +class Fact(models.Model): + """A model representing a fact returned from Ansible. + Facts are stored as JSON dictionaries. + """ + host = models.ForeignKey( + Host, + related_name='facts', + db_index=True, + on_delete=models.CASCADE, + ) + timestamp = models.DateTimeField(default=None, editable=False) + created = models.DateTimeField(editable=False, auto_now_add=True) + modified = models.DateTimeField(editable=False, auto_now=True) + module = models.CharField(max_length=128) + facts = JSONField(blank=True, default={}) + + class Meta: + app_label = 'main' + index_together = [ + ["timestamp", "module", "host"], + ] + diff --git a/docs/licenses/django-jsonbfield.txt b/docs/licenses/django-jsonbfield.txt new file mode 100644 index 0000000000..5f4f225dd2 --- /dev/null +++ b/docs/licenses/django-jsonbfield.txt @@ -0,0 +1,27 @@ +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 48857bc6d2..8f889704fb 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -49,6 +49,7 @@ importlib==1.0.3 ipaddress==1.0.14 iso8601==0.1.10 isodate==0.5.1 +git+https://github.com/chrismeyersfsu/django-jsonbfield@master#egg=django-jsonbfield jsonpatch==1.11 jsonpointer==1.9 jsonschema==2.5.1 From dde70dafec20097bfdef7263c5ce2cc592195231 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 17 Feb 2016 15:18:18 +0000 Subject: [PATCH 11/38] Refactor NotificationTemplate to Notifier --- awx/api/metadata.py | 4 +- awx/api/serializers.py | 40 +++---- awx/api/urls.py | 36 +++--- awx/api/views.py | 128 +++++++++++----------- awx/main/access.py | 8 +- awx/main/models/__init__.py | 2 +- awx/main/models/activity_stream.py | 2 +- awx/main/models/base.py | 18 +-- awx/main/models/inventory.py | 10 +- awx/main/models/jobs.py | 10 +- awx/main/models/notifications.py | 12 +- awx/main/models/projects.py | 10 +- awx/main/models/unified_jobs.py | 2 +- awx/main/notifications/webhook_backend.py | 2 + awx/main/signals.py | 2 +- 15 files changed, 145 insertions(+), 141 deletions(-) diff --git a/awx/api/metadata.py b/awx/api/metadata.py index b5e6d7043a..05bd11a4c7 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -12,7 +12,7 @@ from rest_framework import serializers from rest_framework.request import clone_request # Ansible Tower -from awx.main.models import InventorySource, NotificationTemplate +from awx.main.models import InventorySource, Notifier class Metadata(metadata.SimpleMetadata): @@ -58,7 +58,7 @@ class Metadata(metadata.SimpleMetadata): # Special handling of notification configuration where the required properties # are conditional on the type selected. if field.field_name == 'notification_configuration': - for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES: + for (notification_type_name, notification_tr_name, notification_type_class) in Notifier.NOTIFICATION_TYPES: field_info[notification_type_name] = notification_type_class.init_parameters # Update type of fields returned... diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 7bc25f532d..3a324ce21f 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -769,9 +769,9 @@ class OrganizationSerializer(BaseSerializer): teams = reverse('api:organization_teams_list', args=(obj.pk,)), activity_stream = reverse('api:organization_activity_stream_list', args=(obj.pk,)), notifiers = reverse('api:organization_notifiers_list', args=(obj.pk,)), - notifiers_any = reverse('api:organization_notifications_any_list', args=(obj.pk,)), - notifiers_success = reverse('api:organization_notifications_success_list', args=(obj.pk,)), - notifiers_error = reverse('api:organization_notifications_error_list', args=(obj.pk,)), + notifiers_any = reverse('api:organization_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:organization_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:organization_notifiers_error_list', args=(obj.pk,)), )) return res @@ -851,9 +851,9 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): project_updates = reverse('api:project_updates_list', args=(obj.pk,)), schedules = reverse('api:project_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:project_activity_stream_list', args=(obj.pk,)), - notifiers_any = reverse('api:project_notifications_any_list', args=(obj.pk,)), - notifiers_success = reverse('api:project_notifications_success_list', args=(obj.pk,)), - notifiers_error = reverse('api:project_notifications_error_list', args=(obj.pk,)), + notifiers_any = reverse('api:project_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:project_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:project_notifiers_error_list', args=(obj.pk,)), )) # Backwards compatibility. if obj.current_update: @@ -1298,9 +1298,9 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt activity_stream = reverse('api:inventory_activity_stream_list', args=(obj.pk,)), hosts = reverse('api:inventory_source_hosts_list', args=(obj.pk,)), groups = reverse('api:inventory_source_groups_list', args=(obj.pk,)), - notifiers_any = reverse('api:inventory_source_notifications_any_list', args=(obj.pk,)), - notifiers_success = reverse('api:inventory_source_notifications_success_list', args=(obj.pk,)), - notifiers_error = reverse('api:inventory_source_notifications_error_list', args=(obj.pk,)), + notifiers_any = reverse('api:inventory_source_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:inventory_source_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:inventory_source_notifiers_error_list', args=(obj.pk,)), )) if obj.inventory and obj.inventory.active: res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) @@ -1564,9 +1564,9 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): schedules = reverse('api:job_template_schedules_list', args=(obj.pk,)), activity_stream = reverse('api:job_template_activity_stream_list', args=(obj.pk,)), launch = reverse('api:job_template_launch', args=(obj.pk,)), - notifiers_any = reverse('api:job_template_notifications_any_list', args=(obj.pk,)), - notifiers_success = reverse('api:job_template_notifications_success_list', args=(obj.pk,)), - notifiers_error = reverse('api:job_template_notifications_error_list', args=(obj.pk,)), + notifiers_any = reverse('api:job_template_notifiers_any_list', args=(obj.pk,)), + notifiers_success = reverse('api:job_template_notifiers_success_list', args=(obj.pk,)), + notifiers_error = reverse('api:job_template_notifiers_error_list', args=(obj.pk,)), )) if obj.host_config_key: res['callback'] = reverse('api:job_template_callback', args=(obj.pk,)) @@ -2053,22 +2053,24 @@ class JobLaunchSerializer(BaseSerializer): attrs = super(JobLaunchSerializer, self).validate(attrs) return attrs -class NotificationTemplateSerializer(BaseSerializer): +class NotifierSerializer(BaseSerializer): class Meta: - model = NotificationTemplate + model = Notifier fields = ('*', 'organization', 'notification_type', 'notification_configuration') def get_related(self, obj): - res = super(NotificationTemplateSerializer, self).get_related(obj) + res = super(NotifierSerializer, self).get_related(obj) res.update(dict( - test = reverse('api:notification_template_test', args=(obj.pk,)), - notifications = reverse('api:notification_template_notification_list', args=(obj.pk,)), + test = reverse('api:notifier_test', args=(obj.pk,)), + notifications = reverse('api:notifier_notification_list', args=(obj.pk,)), )) + if obj.organization and obj.organization.active: + res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,)) return res def validate(self, attrs): - notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] + notification_class = Notifier.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] missing_fields = [] for field in notification_class.init_parameters: if field not in attrs['notification_configuration']: @@ -2088,7 +2090,7 @@ class NotificationSerializer(BaseSerializer): def get_related(self, obj): res = super(NotificationSerializer, self).get_related(obj) res.update(dict( - notification_template = reverse('api:notification_template_detail', args=(obj.notifier.pk,)), + notifier = reverse('api:notifier_detail', args=(obj.notifier.pk,)), )) return res diff --git a/awx/api/urls.py b/awx/api/urls.py index 7e55e46a5c..d9e0fca5cb 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -21,9 +21,9 @@ organization_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/teams/$', 'organization_teams_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'organization_activity_stream_list'), url(r'^(?P[0-9]+)/notifiers/$', 'organization_notifiers_list'), - url(r'^(?P[0-9]+)/notifications_any/$', 'organization_notifications_any_list'), - url(r'^(?P[0-9]+)/notifications_error/$', 'organization_notifications_error_list'), - url(r'^(?P[0-9]+)/notifications_success/$', 'organization_notifications_success_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'organization_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'organization_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'organization_notifiers_success_list'), ) user_urls = patterns('awx.api.views', @@ -48,9 +48,9 @@ project_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/project_updates/$', 'project_updates_list'), url(r'^(?P[0-9]+)/activity_stream/$', 'project_activity_stream_list'), url(r'^(?P[0-9]+)/schedules/$', 'project_schedules_list'), - url(r'^(?P[0-9]+)/notifications_any/$', 'project_notifications_any_list'), - url(r'^(?P[0-9]+)/notifications_error/$', 'project_notifications_error_list'), - url(r'^(?P[0-9]+)/notifications_success/$', 'project_notifications_success_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'project_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'project_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'project_notifiers_success_list'), ) project_update_urls = patterns('awx.api.views', @@ -128,9 +128,9 @@ inventory_source_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'inventory_source_schedules_list'), url(r'^(?P[0-9]+)/groups/$', 'inventory_source_groups_list'), url(r'^(?P[0-9]+)/hosts/$', 'inventory_source_hosts_list'), - url(r'^(?P[0-9]+)/notifications_any/$', 'inventory_source_notifications_any_list'), - url(r'^(?P[0-9]+)/notifications_error/$', 'inventory_source_notifications_error_list'), - url(r'^(?P[0-9]+)/notifications_success/$', 'inventory_source_notifications_success_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'inventory_source_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'inventory_source_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'inventory_source_notifiers_success_list'), ) inventory_update_urls = patterns('awx.api.views', @@ -165,9 +165,9 @@ job_template_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/schedules/$', 'job_template_schedules_list'), url(r'^(?P[0-9]+)/survey_spec/$', 'job_template_survey_spec'), url(r'^(?P[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'), - url(r'^(?P[0-9]+)/notifications_any/$', 'job_template_notifications_any_list'), - url(r'^(?P[0-9]+)/notifications_error/$', 'job_template_notifications_error_list'), - url(r'^(?P[0-9]+)/notifications_success/$', 'job_template_notifications_success_list'), + url(r'^(?P[0-9]+)/notifiers_any/$', 'job_template_notifiers_any_list'), + url(r'^(?P[0-9]+)/notifiers_error/$', 'job_template_notifiers_error_list'), + url(r'^(?P[0-9]+)/notifiers_success/$', 'job_template_notifiers_success_list'), ) job_urls = patterns('awx.api.views', @@ -225,11 +225,11 @@ system_job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/cancel/$', 'system_job_cancel'), ) -notification_template_urls = patterns('awx.api.views', - url(r'^$', 'notification_template_list'), - url(r'^(?P[0-9]+)/$', 'notification_template_detail'), - url(r'^(?P[0-9]+)/test/$', 'notification_template_test'), - url(r'^(?P[0-9]+)/notifications/$', 'notification_template_notification_list'), +notifier_urls = patterns('awx.api.views', + url(r'^$', 'notifier_list'), + url(r'^(?P[0-9]+)/$', 'notifier_detail'), + url(r'^(?P[0-9]+)/test/$', 'notifier_test'), + url(r'^(?P[0-9]+)/notifications/$', 'notifier_notification_list'), ) notification_urls = patterns('awx.api.views', @@ -285,7 +285,7 @@ v1_urls = patterns('awx.api.views', url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), url(r'^system_job_templates/', include(system_job_template_urls)), url(r'^system_jobs/', include(system_job_urls)), - url(r'^notification_templates/', include(notification_template_urls)), + url(r'^notifiers/', include(notifier_urls)), url(r'^notifications/', include(notification_urls)), url(r'^unified_job_templates/$', 'unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), diff --git a/awx/api/views.py b/awx/api/views.py index be1bd3b609..439de3f845 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -135,7 +135,7 @@ class ApiV1RootView(APIView): data['system_job_templates'] = reverse('api:system_job_template_list') data['system_jobs'] = reverse('api:system_job_list') data['schedules'] = reverse('api:schedule_list') - data['notification_templates'] = reverse('api:notification_template_list') + data['notifiers'] = reverse('api:notifier_list') data['notifications'] = reverse('api:notification_list') data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_jobs'] = reverse('api:unified_job_list') @@ -687,32 +687,32 @@ class OrganizationActivityStreamList(SubListAPIView): class OrganizationNotifiersList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = Organization - relationship = 'notification_templates' + relationship = 'notifiers' parent_key = 'organization' -class OrganizationNotificationsAnyList(SubListCreateAttachDetachAPIView): +class OrganizationNotifiersAnyList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = Organization - relationship = 'notification_any' + relationship = 'notifiers_any' -class OrganizationNotificationsErrorList(SubListCreateAttachDetachAPIView): +class OrganizationNotifiersErrorList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = Organization - relationship = 'notification_erros' + relationship = 'notifiers_error' -class OrganizationNotificationsSuccessList(SubListCreateAttachDetachAPIView): +class OrganizationNotifiersSuccessList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = Organization - relationship = 'notification_success' + relationship = 'notifiers_success' class TeamList(ListCreateAPIView): @@ -879,26 +879,26 @@ class ProjectActivityStreamList(SubListAPIView): return qs.filter(project=parent) return qs.filter(Q(project=parent) | Q(credential__in=parent.credential)) -class ProjectNotificationsAnyList(SubListCreateAttachDetachAPIView): +class ProjectNotifiersAnyList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = Project - relationship = 'notification_any' + relationship = 'notifiers_any' -class ProjectNotificationsErrorList(SubListCreateAttachDetachAPIView): +class ProjectNotifiersErrorList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = Project - relationship = 'notification_errors' + relationship = 'notifiers_error' -class ProjectNotificationsSuccessList(SubListCreateAttachDetachAPIView): +class ProjectNotifiersSuccessList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = Project - relationship = 'notification_success' + relationship = 'notifiers_success' class ProjectUpdatesList(SubListAPIView): @@ -1781,26 +1781,26 @@ class InventorySourceActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) -class InventorySourceNotificationsAnyList(SubListCreateAttachDetachAPIView): +class InventorySourceNotifiersAnyList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = InventorySource - relationship = 'notification_any' + relationship = 'notifiers_any' -class InventorySourceNotificationsErrorList(SubListCreateAttachDetachAPIView): +class InventorySourceNotifiersErrorList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = InventorySource - relationship = 'notification_errors' + relationship = 'notifiers_error' -class InventorySourceNotificationsSuccessList(SubListCreateAttachDetachAPIView): +class InventorySourceNotifiersSuccessList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = InventorySource - relationship = 'notification_success' + relationship = 'notifiers_success' class InventorySourceHostsList(SubListAPIView): @@ -2027,26 +2027,26 @@ class JobTemplateActivityStreamList(SubListAPIView): # Okay, let it through. return super(type(self), self).get(request, *args, **kwargs) -class JobTemplateNotificationsAnyList(SubListCreateAttachDetachAPIView): +class JobTemplateNotifiersAnyList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = JobTemplate - relationship = 'notification_any' + relationship = 'notifiers_any' -class JobTemplateNotificationsErrorList(SubListCreateAttachDetachAPIView): +class JobTemplateNotifiersErrorList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = JobTemplate - relationship = 'notification_errors' + relationship = 'notifiers_error' -class JobTemplateNotificationsSuccessList(SubListCreateAttachDetachAPIView): +class JobTemplateNotifiersSuccessList(SubListCreateAttachDetachAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer parent_model = JobTemplate - relationship = 'notification_success' + relationship = 'notifiers_success' class JobTemplateCallback(GenericAPIView): @@ -3032,22 +3032,22 @@ class AdHocCommandStdout(UnifiedJobStdout): model = AdHocCommand new_in_220 = True -class NotificationTemplateList(ListCreateAPIView): +class NotifierList(ListCreateAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer new_in_300 = True -class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView): +class NotifierDetail(RetrieveUpdateDestroyAPIView): - model = NotificationTemplate - serializer_class = NotificationTemplateSerializer + model = Notifier + serializer_class = NotifierSerializer new_in_300 = True -class NotificationTemplateTest(GenericAPIView): +class NotifierTest(GenericAPIView): - view_name = 'Notification Template Test' - model = NotificationTemplate + view_name = 'Notifier Test' + model = Notifier serializer_class = EmptySerializer new_in_300 = True @@ -3063,11 +3063,11 @@ class NotificationTemplateTest(GenericAPIView): headers=headers, status=status.HTTP_202_ACCEPTED) -class NotificationTemplateNotificationList(SubListAPIView): +class NotifierNotificationList(SubListAPIView): model = Notification serializer_class = NotificationSerializer - parent_model = NotificationTemplate + parent_model = Notifier relationship = 'notifications' parent_key = 'notifier' @@ -3079,7 +3079,7 @@ class NotificationList(ListAPIView): class NotificationDetail(RetrieveAPIView): - model = NotificationTemplate + model = Notification serializer_class = NotificationSerializer new_in_300 = True diff --git a/awx/main/access.py b/awx/main/access.py index 3ffbaf7f85..fb89dd30d8 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1484,11 +1484,11 @@ class ScheduleAccess(BaseAccess): else: return False -class NotificationTemplateAccess(BaseAccess): +class NotifierAccess(BaseAccess): ''' - I can see/use a notification template if I have permission to + I can see/use a notifier if I have permission to ''' - model = NotificationTemplate + model = Notifier def get_queryset(self): qs = self.model.objects.filter(active=True).distinct() @@ -1708,5 +1708,5 @@ register_access(UnifiedJob, UnifiedJobAccess) register_access(ActivityStream, ActivityStreamAccess) register_access(CustomInventoryScript, CustomInventoryScriptAccess) register_access(TowerSettings, TowerSettingsAccess) -register_access(NotificationTemplate, NotificationTemplateAccess) +register_access(Notifier, NotifierAccess) register_access(Notification, NotificationAccess) diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 2397b6137b..cf8235ba18 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -61,5 +61,5 @@ activity_stream_registrar.connect(AdHocCommand) activity_stream_registrar.connect(Schedule) activity_stream_registrar.connect(CustomInventoryScript) activity_stream_registrar.connect(TowerSettings) -activity_stream_registrar.connect(NotificationTemplate) +activity_stream_registrar.connect(Notifier) activity_stream_registrar.connect(Notification) diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 12a54c7af2..dfada31484 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -53,7 +53,7 @@ class ActivityStream(models.Model): ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) schedule = models.ManyToManyField("Schedule", blank=True) custom_inventory_script = models.ManyToManyField("CustomInventoryScript", blank=True) - notification_template = models.ManyToManyField("NotificationTemplate", blank=True) + notifier = models.ManyToManyField("Notifier", blank=True) notification = models.ManyToManyField("Notification", blank=True) def get_absolute_url(self): diff --git a/awx/main/models/base.py b/awx/main/models/base.py index f3f158855c..c4edfbd8ba 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -343,20 +343,20 @@ class NotificationFieldsModel(BaseModel): class Meta: abstract = True - notification_errors = models.ManyToManyField( - "NotificationTemplate", + notifiers_error = models.ManyToManyField( + "Notifier", blank=True, - related_name='%(class)s_notifications_for_errors' + related_name='%(class)s_notifiers_for_errors' ) - notification_success = models.ManyToManyField( - "NotificationTemplate", + notifiers_success = models.ManyToManyField( + "Notifier", blank=True, - related_name='%(class)s_notifications_for_success' + related_name='%(class)s_notifiers_for_success' ) - notification_any = models.ManyToManyField( - "NotificationTemplate", + notifiers_any = models.ManyToManyField( + "Notifier", blank=True, - related_name='%(class)s_notifications_for_any' + related_name='%(class)s_notifiers_for_any' ) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index febf010f20..b0acf0a90b 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -23,7 +23,7 @@ from awx.main.managers import HostManager from awx.main.models.base import * # noqa from awx.main.models.jobs import Job from awx.main.models.unified_jobs import * # noqa -from awx.main.models.notifications import NotificationTemplate +from awx.main.models.notifications import Notifier from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates __all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript'] @@ -1184,10 +1184,10 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions): @property def notifiers(self): # Return all notifiers defined on the Project, and on the Organization for each trigger type - base_notifiers = NotificationTemplate.objects.filter(active=True) - error_notifiers = list(base_notifiers.filter(organization_notifications_for_errors__in=self)) - success_notifiers = list(base_notifiers.filter(organization_notifications_for_success__in=self)) - any_notifiers = list(base_notifiers.filter(organization_notifications_for_any__in=self)) + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors__in=self)) + success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success__in=self)) + any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any__in=self)) return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) def clean_source(self): diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 42f7ccf676..dd772d695d 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -22,7 +22,7 @@ from jsonfield import JSONField from awx.main.constants import CLOUD_PROVIDERS from awx.main.models.base import * # noqa from awx.main.models.unified_jobs import * # noqa -from awx.main.models.notifications import NotificationTemplate +from awx.main.models.notifications import Notifier from awx.main.utils import decrypt_field, ignore_inventory_computed_fields from awx.main.utils import emit_websocket_notification from awx.main.redact import PlainTextCleaner @@ -336,10 +336,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions): # Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added once that is # available after the rbac pr - base_notifiers = NotificationTemplate.objects.filter(active=True) - error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_errors__in=[self, self.project])) - success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_success__in=[self, self.project])) - any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_any__in=[self, self.project])) + base_notifiers = Notifier.objects.filter(active=True) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project])) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project])) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project])) return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) class Job(UnifiedJob, JobOptions): diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 4fc005256b..2e1bb9df2f 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -23,9 +23,9 @@ from jsonfield import JSONField logger = logging.getLogger('awx.main.models.notifications') -__all__ = ['NotificationTemplate', 'Notification'] +__all__ = ['Notifier', 'Notification'] -class NotificationTemplate(CommonModel): +class Notifier(CommonModel): NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend), ('slack', _('Slack'), SlackBackend), @@ -45,7 +45,7 @@ class NotificationTemplate(CommonModel): blank=False, null=True, on_delete=models.SET_NULL, - related_name='notification_templates', + related_name='notifiers', ) notification_type = models.CharField( @@ -56,7 +56,7 @@ class NotificationTemplate(CommonModel): notification_configuration = JSONField(blank=False) def get_absolute_url(self): - return reverse('api:notification_template_detail', args=(self.pk,)) + return reverse('api:notifier_detail', args=(self.pk,)) @property def notification_class(self): @@ -100,7 +100,7 @@ class Notification(CreatedModifiedModel): ordering = ('pk',) notifier = models.ForeignKey( - 'NotificationTemplate', + 'Notifier', related_name='notifications', on_delete=models.CASCADE, editable=False @@ -122,7 +122,7 @@ class Notification(CreatedModifiedModel): ) notification_type = models.CharField( max_length = 32, - choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES, + choices=Notifier.NOTIFICATION_TYPE_CHOICES, ) recipients = models.TextField( blank=True, diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 730604d3e4..8a320e3cfc 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -21,7 +21,7 @@ from django.utils.timezone import now, make_aware, get_default_timezone from awx.lib.compat import slugify from awx.main.models.base import * # noqa from awx.main.models.jobs import Job -from awx.main.models.notifications import NotificationTemplate +from awx.main.models.notifications import Notifier from awx.main.models.unified_jobs import * # noqa from awx.main.utils import update_scm_url @@ -316,16 +316,16 @@ class Project(UnifiedJobTemplate, ProjectOptions): # Return all notifiers defined on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added back once that is # available after the rbac pr - base_notifiers = NotificationTemplate.objects.filter(active=True) + base_notifiers = Notifier.objects.filter(active=True) # error_notifiers = list(base_notifiers.filter(Q(project_notifications_for_errors__in=self) | # Q(organization_notifications_for_errors__in=self.organization))) # success_notifiers = list(base_notifiers.filter(Q(project_notifications_for_success__in=self) | # Q(organization_notifications_for_success__in=self.organization))) # any_notifiers = list(base_notifiers.filter(Q(project_notifications_for_any__in=self) | # Q(organization_notifications_for_any__in=self.organization))) - error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_errors=self)) - success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_success=self)) - any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifications_for_any=self)) + error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self)) + success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self)) + any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self)) return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) def get_absolute_url(self): diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index c6ea2b082b..986be923fb 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -304,7 +304,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio Return notifiers relevant to this Unified Job Template ''' # NOTE: Derived classes should implement - return NotificationTemplate.objects.none() + return Notifier.objects.none() def create_unified_job(self, **kwargs): ''' diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index ffef641198..5bdbff0e02 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -17,12 +17,14 @@ class WebhookBackend(BaseEmailBackend): sender_parameter = None def __init__(self, headers, fail_silently=False, **kwargs): + self.headers = headers super(WebhookBackend, self).__init__(fail_silently=fail_silently) def send_messages(self, messages): sent_messages = 0 for m in messages: r = requests.post("{}".format(m.recipients()[0]), + data=m.body, headers=self.headers) if r.status_code >= 400: logger.error("Error sending notification webhook: {}".format(r.text)) diff --git a/awx/main/signals.py b/awx/main/signals.py index f4d0014905..29c5c7d016 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -307,7 +307,7 @@ model_serializer_mapping = { Job: JobSerializer, AdHocCommand: AdHocCommandSerializer, TowerSettings: TowerSettingsSerializer, - NotificationTemplate: NotificationTemplateSerializer, + Notifier: NotifierSerializer, Notification: NotificationSerializer, } From 52974648dfc8783b095fd14861360f5b2859628f Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 17 Feb 2016 16:01:54 +0000 Subject: [PATCH 12/38] Notification configuration type checking --- awx/api/serializers.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 3a324ce21f..d901184ee0 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2059,6 +2059,9 @@ class NotifierSerializer(BaseSerializer): model = Notifier fields = ('*', 'organization', 'notification_type', 'notification_configuration') + type_map = {"string": str, "int": int, "bool": bool, "list": list, + "password": str, "object": dict} + def get_related(self, obj): res = super(NotifierSerializer, self).get_related(obj) res.update(dict( @@ -2072,12 +2075,22 @@ class NotifierSerializer(BaseSerializer): def validate(self, attrs): notification_class = Notifier.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] missing_fields = [] + incorrect_type_fields = [] for field in notification_class.init_parameters: if field not in attrs['notification_configuration']: missing_fields.append(field) - # TODO: Type checks + continue + field_val = attrs['notification_configuration'][field] + field_type = notification_class.init_parameters[field]['type'] + expected_type = self.type_map[field_type] + if not isinstance(field_val, expected_type): + incorrect_type_fields.append((field, field_type)) if missing_fields: - raise serializers.ValidationError("Missing required fields for Notification Configuration: {}".format(missing_fields)) + error_list = ["Missing required fields for Notification Configuration: {}".format(missing_fields)] + for type_field_error in incorrect_type_fields: + error_list.append("Configuration field {} incorrect type, expected {}".format(type_field_error[0], + type_field_error[1])) + raise serializers.ValidationError(error_list) return attrs class NotificationSerializer(BaseSerializer): From eb3d663d18f904aa263278aa99298435d6078767 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Sun, 21 Feb 2016 23:11:17 -0500 Subject: [PATCH 13/38] Support notification password field encryption Modify encrypt_field and decrypt_field to support sub-fields under a dictionary object. It still uses the parent key when encrypting. --- awx/api/serializers.py | 32 ++++++++++++++++++++++++++------ awx/main/models/notifications.py | 31 +++++++++++++++++++++++++++++++ awx/main/utils.py | 9 ++++++--- 3 files changed, 63 insertions(+), 9 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index d901184ee0..04f5e241c3 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2059,8 +2059,20 @@ class NotifierSerializer(BaseSerializer): model = Notifier fields = ('*', 'organization', 'notification_type', 'notification_configuration') - type_map = {"string": str, "int": int, "bool": bool, "list": list, - "password": str, "object": dict} + type_map = {"string": (str, unicode), + "int": (int,), + "bool": (bool,), + "list": (list,), + "password": (str, unicode), + "object": (dict,)} + + def to_representation(self, obj): + ret = super(NotifierSerializer, self).to_representation(obj) + for field in obj.notification_class.init_parameters: + if field in ret['notification_configuration'] and \ + force_text(ret['notification_configuration'][field]).startswith('$encrypted$'): + ret['notification_configuration'][field] = '$encrypted$' + return ret def get_related(self, obj): res = super(NotifierSerializer, self).get_related(obj) @@ -2076,20 +2088,28 @@ class NotifierSerializer(BaseSerializer): notification_class = Notifier.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']] missing_fields = [] incorrect_type_fields = [] + if 'notification_configuration' not in attrs: + return attrs for field in notification_class.init_parameters: if field not in attrs['notification_configuration']: missing_fields.append(field) continue field_val = attrs['notification_configuration'][field] field_type = notification_class.init_parameters[field]['type'] - expected_type = self.type_map[field_type] - if not isinstance(field_val, expected_type): + expected_types = self.type_map[field_type] + if not type(field_val) in expected_types: incorrect_type_fields.append((field, field_type)) + continue + if field_type == "password" and field_val.startswith('$encrypted$'): + missing_fields.append(field) + error_list = [] if missing_fields: - error_list = ["Missing required fields for Notification Configuration: {}".format(missing_fields)] + error_list.append("Missing required fields for Notification Configuration: {}".format(missing_fields)) + if incorrect_type_fields: for type_field_error in incorrect_type_fields: - error_list.append("Configuration field {} incorrect type, expected {}".format(type_field_error[0], + error_list.append("Configuration field '{}' incorrect type, expected {}".format(type_field_error[0], type_field_error[1])) + if error_list: raise serializers.ValidationError(error_list) return attrs diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 2e1bb9df2f..d6fc9d31b8 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -10,6 +10,7 @@ from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import smart_str from awx.main.models.base import * # noqa +from awx.main.utils import encrypt_field, decrypt_field from awx.main.notifications.email_backend import CustomEmailBackend from awx.main.notifications.slack_backend import SlackBackend from awx.main.notifications.twilio_backend import TwilioBackend @@ -62,6 +63,31 @@ class Notifier(CommonModel): def notification_class(self): return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] + def save(self, *args, **kwargs): + new_instance = not bool(self.pk) + update_fields = kwargs.get('update_fields', []) + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + if new_instance: + value = getattr(self.notification_configuration, field, '') + setattr(self, '_saved_{}'.format(field), value) + self.notification_configuration[field] = '' + else: + encrypted = encrypt_field(self, 'notification_configuration', subfield=field) + self.notification_configuration[field] = encrypted + if 'notification_configuration' not in update_fields: + update_fields.append('notification_configuration') + super(Notifier, self).save(*args, **kwargs) + if new_instance: + update_fields = [] + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + saved_value = getattr(self, '_saved_{}'.format(field), '') + setattr(self.notification_configuration, field, saved_value) + if 'notification_configuration' not in update_fields: + update_fields.append('notification_configuration') + self.save(update_fields=update_fields) + @property def recipients(self): return self.notification_configuration[self.notification_class.recipient_parameter] @@ -76,6 +102,11 @@ class Notifier(CommonModel): return notification def send(self, subject, body): + for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", + self.notification_class.init_parameters): + self.notification_configuration[field] = decrypt_field(self, + 'notification_configuration', + subfield=field) recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter) if not isinstance(recipients, list): recipients = [recipients] diff --git a/awx/main/utils.py b/awx/main/utils.py index 5bd00c2da6..a561648f95 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -139,12 +139,13 @@ def get_encryption_key(instance, field_name): h.update(field_name) return h.digest()[:16] - -def encrypt_field(instance, field_name, ask=False): +def encrypt_field(instance, field_name, ask=False, subfield=None): ''' Return content of the given instance and field name encrypted. ''' value = getattr(instance, field_name) + if isinstance(value, dict) and subfield is not None: + value = value[subfield] if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'): return value value = smart_str(value) @@ -157,11 +158,13 @@ def encrypt_field(instance, field_name, ask=False): return '$encrypted$%s$%s' % ('AES', b64data) -def decrypt_field(instance, field_name): +def decrypt_field(instance, field_name, subfield=None): ''' Return content of the given instance and field name decrypted. ''' value = getattr(instance, field_name) + if isinstance(value, dict) and subfield is not None: + value = value[subfield] if not value or not value.startswith('$encrypted$'): return value algo, b64data = value[len('$encrypted$'):].split('$', 1) From 00a41ef2b95109302fdf95db2f3aa522e5caddbd Mon Sep 17 00:00:00 2001 From: Michael Abashian Date: Mon, 22 Feb 2016 15:23:41 -0500 Subject: [PATCH 14/38] Removed a lot of LogViewer related logic since stdout is no longer a modal. Moved the LogViewerHelper to StandardOutHelper. --- awx/ui/client/src/app.js | 2 +- awx/ui/client/src/controllers/Home.js | 71 +--- awx/ui/client/src/controllers/Inventories.js | 26 +- awx/ui/client/src/controllers/Projects.js | 33 +- awx/ui/client/src/helpers.js | 4 +- awx/ui/client/src/helpers/EventViewer.js | 4 +- awx/ui/client/src/helpers/Groups.js | 21 +- awx/ui/client/src/helpers/Hosts.js | 13 +- awx/ui/client/src/helpers/Jobs.js | 6 +- awx/ui/client/src/helpers/LogViewer.js | 390 ------------------ awx/ui/client/src/helpers/StandardOut.js | 40 ++ .../standard-out/standard-out.controller.js | 4 +- 12 files changed, 92 insertions(+), 522 deletions(-) delete mode 100644 awx/ui/client/src/helpers/LogViewer.js create mode 100644 awx/ui/client/src/helpers/StandardOut.js diff --git a/awx/ui/client/src/app.js b/awx/ui/client/src/app.js index 43ad6627fb..47031f3bc0 100644 --- a/awx/ui/client/src/app.js +++ b/awx/ui/client/src/app.js @@ -171,7 +171,7 @@ var tower = angular.module('Tower', [ 'SchedulesHelper', 'JobsListDefinition', 'LogViewerStatusDefinition', - 'LogViewerHelper', + 'StandardOutHelper', 'LogViewerOptionsDefinition', 'EventViewerHelper', 'HostEventsViewerHelper', diff --git a/awx/ui/client/src/controllers/Home.js b/awx/ui/client/src/controllers/Home.js index d857330d4b..3d8adebad2 100644 --- a/awx/ui/client/src/controllers/Home.js +++ b/awx/ui/client/src/controllers/Home.js @@ -149,7 +149,7 @@ Home.$inject = ['$scope', '$compile', '$stateParams', '$rootScope', '$location', * @description This controls the 'home/groups' page that is loaded from the dashboard * */ -export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $location, $stateParams, LogViewer, HomeGroupList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope, +export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $location, $stateParams, HomeGroupList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope, GetBasePath, SearchInit, PaginateInit, FormatDate, GetHostsStatusMsg, GetSyncStatusMsg, ViewUpdateStatus, GroupsEdit, Wait, Alert, Rest, Empty, InventoryUpdate, Find, GroupsCancelUpdate, Store) { @@ -461,58 +461,6 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio attachElem(event, html, title); }); - if (scope.removeGroupSummaryReady) { - scope.removeGroupSummaryReady(); - } - scope.removeGroupSummaryReady = scope.$on('GroupSummaryReady', function(e, event, inventory, data) { - var html, title; - - Wait('stop'); - - // Build the html for our popover - html = "\n"; - html += "\n"; - html += ""; - html += ""; - html += ""; - html += ""; - html += ""; - html += "\n"; - html += "\n"; - data.results.forEach( function(row) { - html += ""; - html += ""; - html += ""; - html += ""; - html += "\n"; - }); - html += "\n"; - html += "
StatusLast SyncGroup
" + ($filter('longDate')(row.last_updated)).replace(/ /,'
') + "
" + ellipsis(row.summary_fields.group.name) + "
\n"; - title = "Sync Status"; - attachElem(event, html, title); - }); - - scope.showGroupSummary = function(event, id) { - var group, status; - if (!Empty(id)) { - group = Find({ list: scope.home_groups, key: 'id', val: id }); - status = group.summary_fields.inventory_source.status; - if (status === 'running' || status === 'failed' || status === 'error' || status === 'successful') { - Wait('start'); - Rest.setUrl(group.related.inventory_sources + '?or__source=ec2&or__source=rax&order_by=-last_job_run&page_size=5'); - Rest.get() - .success(function(data) { - scope.$emit('GroupSummaryReady', event, group, data); - }) - .error(function(data, status) { - ProcessErrors( scope, data, status, null, { hdr: 'Error!', - msg: 'Call to ' + group.related.inventory_sources + ' failed. GET returned status: ' + status - }); - }); - } - } - }; - scope.showHostSummary = function(event, id) { var url, jobs = []; if (!Empty(id)) { @@ -549,13 +497,6 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio } }; - scope.viewJob = function(url) { - LogViewer({ - scope: modal_scope, - url: url - }); - }; - scope.cancelUpdate = function(id) { var group = Find({ list: scope.home_groups, key: 'id', val: id }); GroupsCancelUpdate({ scope: scope, group: group }); @@ -564,7 +505,7 @@ export function HomeGroups($rootScope, $log, $scope, $filter, $compile, $locatio } -HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$location', '$stateParams', 'LogViewer', 'HomeGroupList', 'generateList', 'ProcessErrors', 'ReturnToCaller', +HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$location', '$stateParams', 'HomeGroupList', 'generateList', 'ProcessErrors', 'ReturnToCaller', 'ClearScope', 'GetBasePath', 'SearchInit', 'PaginateInit', 'FormatDate', 'GetHostsStatusMsg', 'GetSyncStatusMsg', 'ViewUpdateStatus', 'GroupsEdit', 'Wait', 'Alert', 'Rest', 'Empty', 'InventoryUpdate', 'Find', 'GroupsCancelUpdate', 'Store', 'Socket' ]; @@ -578,7 +519,7 @@ HomeGroups.$inject = ['$rootScope', '$log', '$scope', '$filter', '$compile', '$l */ export function HomeHosts($scope, $location, $stateParams, HomeHostList, GenerateList, ProcessErrors, ReturnToCaller, ClearScope, - GetBasePath, SearchInit, PaginateInit, FormatDate, SetStatus, ToggleHostEnabled, HostsEdit, Find, ShowJobSummary, ViewJob) { + GetBasePath, SearchInit, PaginateInit, FormatDate, SetStatus, ToggleHostEnabled, HostsEdit, Find, ShowJobSummary) { ClearScope('htmlTemplate'); //Garbage collection. Don't leave behind any listeners/watchers from the prior //scope. @@ -647,10 +588,6 @@ export function HomeHosts($scope, $location, $stateParams, HomeHostList, Generat $scope.search(list.iterator); }; - $scope.viewJob = function(id) { - ViewJob({ scope: $scope, id: id }); - }; - $scope.toggleHostEnabled = function (id, sources) { ToggleHostEnabled({ host_id: id, @@ -687,5 +624,5 @@ export function HomeHosts($scope, $location, $stateParams, HomeHostList, Generat HomeHosts.$inject = ['$scope', '$location', '$stateParams', 'HomeHostList', 'generateList', 'ProcessErrors', 'ReturnToCaller', 'ClearScope', 'GetBasePath', 'SearchInit', 'PaginateInit', 'FormatDate', 'SetStatus', 'ToggleHostEnabled', 'HostsEdit', - 'Find', 'ShowJobSummary', 'ViewJob' + 'Find', 'ShowJobSummary' ]; diff --git a/awx/ui/client/src/controllers/Inventories.js b/awx/ui/client/src/controllers/Inventories.js index a506d78ead..234747fdb7 100644 --- a/awx/ui/client/src/controllers/Inventories.js +++ b/awx/ui/client/src/controllers/Inventories.js @@ -16,7 +16,7 @@ export function InventoriesList($scope, $rootScope, $location, $log, $stateParams, $compile, $filter, sanitizeFilter, Rest, Alert, InventoryList, generateList, Prompt, SearchInit, PaginateInit, ReturnToCaller, ClearScope, ProcessErrors, GetBasePath, Wait, - EditInventoryProperties, Find, Empty, LogViewer, $state) { + EditInventoryProperties, Find, Empty, $state) { var list = InventoryList, defaultUrl = GetBasePath('inventory'), @@ -295,10 +295,12 @@ export function InventoriesList($scope, $rootScope, $location, $log, }; $scope.viewJob = function(url) { - LogViewer({ - scope: $scope, - url: url - }); + + // Pull the id out of the URL + var id = url.replace(/^\//, '').split('/')[3]; + + $state.go('inventorySyncStdout', {id: id}); + }; $scope.editInventoryProperties = function (inventory_id) { @@ -364,7 +366,7 @@ export function InventoriesList($scope, $rootScope, $location, $log, InventoriesList.$inject = ['$scope', '$rootScope', '$location', '$log', '$stateParams', '$compile', '$filter', 'sanitizeFilter', 'Rest', 'Alert', 'InventoryList', 'generateList', 'Prompt', 'SearchInit', 'PaginateInit', 'ReturnToCaller', 'ClearScope', 'ProcessErrors', - 'GetBasePath', 'Wait', 'EditInventoryProperties', 'Find', 'Empty', 'LogViewer', '$state' + 'GetBasePath', 'Wait', 'EditInventoryProperties', 'Find', 'Empty', '$state' ]; @@ -781,7 +783,7 @@ export function InventoriesManage ($log, $scope, $rootScope, $location, GetHostsStatusMsg, GroupsEdit, InventoryUpdate, GroupsCancelUpdate, ViewUpdateStatus, GroupsDelete, Store, HostsEdit, HostsDelete, EditInventoryProperties, ToggleHostEnabled, ShowJobSummary, - InventoryGroupsHelp, HelpDialog, ViewJob, + InventoryGroupsHelp, HelpDialog, GroupsCopy, HostsCopy, $stateParams) { var PreviousSearchParams, @@ -1254,12 +1256,8 @@ export function InventoriesManage ($log, $scope, $rootScope, $location, opts.autoShow = params.autoShow || false; } HelpDialog(opts); - }; - - $scope.viewJob = function(id) { - ViewJob({ scope: $scope, id: id }); - }; - + } +; $scope.showHosts = function (group_id, show_failures) { // Clicked on group if (group_id !== null) { @@ -1293,6 +1291,6 @@ InventoriesManage.$inject = ['$log', '$scope', '$rootScope', '$location', 'GroupsEdit', 'InventoryUpdate', 'GroupsCancelUpdate', 'ViewUpdateStatus', 'GroupsDelete', 'Store', 'HostsEdit', 'HostsDelete', 'EditInventoryProperties', 'ToggleHostEnabled', 'ShowJobSummary', - 'InventoryGroupsHelp', 'HelpDialog', 'ViewJob', 'GroupsCopy', + 'InventoryGroupsHelp', 'HelpDialog', 'GroupsCopy', 'HostsCopy', '$stateParams' ]; diff --git a/awx/ui/client/src/controllers/Projects.js b/awx/ui/client/src/controllers/Projects.js index d7f5786b6d..5650500088 100644 --- a/awx/ui/client/src/controllers/Projects.js +++ b/awx/ui/client/src/controllers/Projects.js @@ -15,7 +15,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams, Rest, Alert, ProjectList, GenerateList, Prompt, SearchInit, PaginateInit, ReturnToCaller, ClearScope, ProcessErrors, GetBasePath, SelectionInit, ProjectUpdate, Refresh, Wait, GetChoices, Empty, - Find, LogViewer, GetProjectIcon, GetProjectToolTip, $filter, $state) { + Find, GetProjectIcon, GetProjectToolTip, $filter, $state) { ClearScope(); @@ -200,24 +200,19 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams, $state.transitionTo('projects.edit', {id: id}); }; - if ($scope.removeShowLogViewer) { - $scope.removeShowLogViewer(); + if ($scope.removeGoToJobDetails) { + $scope.removeGoToJobDetails(); } - $scope.removeShowLogViewer = $scope.$on('ShowLogViewer', function(e, data) { - if (data.related.current_update) { + $scope.removeGoToJobDetails = $scope.$on('GoToJobDetails', function(e, data) { + if (data.related.current_update || data.related.last_update) { + Wait('start'); - LogViewer({ - scope: $scope, - url: data.related.current_update, - getIcon: GetProjectIcon - }); - } else if (data.related.last_update) { - Wait('start'); - LogViewer({ - scope: $scope, - url: data.related.last_update, - getIcon: GetProjectIcon - }); + + // Pull the id out of the URL + var id = (data.related.current_update) ? data.related.current_update.replace(/^\//, '').split('/')[3] : data.related.last_update.replace(/^\//, '').split('/')[3]; + + $state.go('scmUpdateStdout', {id: id}); + } else { Alert('No Updates Available', 'There is no SCM update information available for this project. An update has not yet been ' + ' completed. If you have not already done so, start an update for this project.', 'alert-info'); @@ -235,7 +230,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams, Rest.setUrl(project.url); Rest.get() .success(function(data) { - $scope.$emit('ShowLogViewer', data); + $scope.$emit('GoToJobDetails', data); }) .error(function(data, status) { ProcessErrors($scope, data, status, null, { hdr: 'Error!', @@ -374,7 +369,7 @@ ProjectsList.$inject = ['$scope', '$rootScope', '$location', '$log', 'SearchInit', 'PaginateInit', 'ReturnToCaller', 'ClearScope', 'ProcessErrors', 'GetBasePath', 'SelectionInit', 'ProjectUpdate', 'Refresh', 'Wait', 'GetChoices', 'Empty', 'Find', - 'LogViewer', 'GetProjectIcon', 'GetProjectToolTip', '$filter', '$state' + 'GetProjectIcon', 'GetProjectToolTip', '$filter', '$state' ]; diff --git a/awx/ui/client/src/helpers.js b/awx/ui/client/src/helpers.js index 4a277970da..94acbd1382 100644 --- a/awx/ui/client/src/helpers.js +++ b/awx/ui/client/src/helpers.js @@ -21,7 +21,7 @@ import JobTemplates from "./helpers/JobTemplates"; import Jobs from "./helpers/Jobs"; import License from "./helpers/License"; import LoadConfig from "./helpers/LoadConfig"; -import LogViewer from "./helpers/LogViewer"; +import StandardOut from "./helpers/StandardOut"; import Lookup from "./helpers/Lookup"; import PaginationHelpers from "./helpers/PaginationHelpers"; import Parse from "./helpers/Parse"; @@ -59,7 +59,7 @@ export Jobs, License, LoadConfig, - LogViewer, + StandardOut, Lookup, PaginationHelpers, Parse, diff --git a/awx/ui/client/src/helpers/EventViewer.js b/awx/ui/client/src/helpers/EventViewer.js index c56d7f05a4..cb075fa5e9 100644 --- a/awx/ui/client/src/helpers/EventViewer.js +++ b/awx/ui/client/src/helpers/EventViewer.js @@ -13,8 +13,8 @@ export default angular.module('EventViewerHelper', ['ModalDialog', 'Utilities', 'EventsViewerFormDefinition', 'HostsHelper']) - .factory('EventViewer', ['$compile', 'CreateDialog', 'GetEvent', 'Wait', 'EventAddTable', 'GetBasePath', 'LookUpName', 'Empty', 'EventAddPreFormattedText', - function($compile, CreateDialog, GetEvent, Wait, EventAddTable, GetBasePath, LookUpName, Empty, EventAddPreFormattedText) { + .factory('EventViewer', ['$compile', 'CreateDialog', 'GetEvent', 'Wait', 'EventAddTable', 'GetBasePath', 'Empty', 'EventAddPreFormattedText', + function($compile, CreateDialog, GetEvent, Wait, EventAddTable, GetBasePath, Empty, EventAddPreFormattedText) { return function(params) { var parent_scope = params.scope, url = params.url, diff --git a/awx/ui/client/src/helpers/Groups.js b/awx/ui/client/src/helpers/Groups.js index 920c4641bc..b366a02a50 100644 --- a/awx/ui/client/src/helpers/Groups.js +++ b/awx/ui/client/src/helpers/Groups.js @@ -18,7 +18,7 @@ export default angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name, 'GroupListDefinition', 'SearchHelper', 'PaginationHelpers', listGenerator.name, 'GroupsHelper', 'InventoryHelper', 'SelectionHelper', 'JobSubmissionHelper', 'RefreshHelper', 'PromptDialog', 'CredentialsListDefinition', 'InventoryTree', - 'InventoryStatusDefinition', 'VariablesHelper', 'SchedulesListDefinition', 'SourceFormDefinition', 'LogViewerHelper', + 'InventoryStatusDefinition', 'VariablesHelper', 'SchedulesListDefinition', 'SourceFormDefinition', 'StandardOutHelper', 'SchedulesHelper' ]) @@ -65,8 +65,8 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name * TODO: Document * */ -.factory('ViewUpdateStatus', ['Rest', 'ProcessErrors', 'GetBasePath', 'Alert', 'Wait', 'Empty', 'Find', 'LogViewer', - function (Rest, ProcessErrors, GetBasePath, Alert, Wait, Empty, Find, LogViewer) { +.factory('ViewUpdateStatus', ['$state', 'Rest', 'ProcessErrors', 'GetBasePath', 'Alert', 'Wait', 'Empty', 'Find', + function ($state, Rest, ProcessErrors, GetBasePath, Alert, Wait, Empty, Find) { return function (params) { var scope = params.scope, @@ -76,11 +76,13 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name if (scope.removeSourceReady) { scope.removeSourceReady(); } - scope.removeSourceReady = scope.$on('SourceReady', function(e, url) { - LogViewer({ - scope: scope, - url: url - }); + scope.removeSourceReady = scope.$on('SourceReady', function(e, source) { + + // Pull the ID out of related.current_update / related.last_update + var update_id = (source.current_update) ? source.related.current_update.replace(/^\//, '').split('/')[3] : source.related.last_update.replace(/^\//, '').split('/')[3]; + + $state.go('inventorySyncStdout', {id: update_id}); + }); if (group) { @@ -94,8 +96,7 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name Rest.setUrl(group.related.inventory_source); Rest.get() .success(function (data) { - var url = (data.related.current_update) ? data.related.current_update : data.related.last_update; - scope.$emit('SourceReady', url); + scope.$emit('SourceReady', data); }) .error(function (data, status) { ProcessErrors(scope, data, status, null, { hdr: 'Error!', diff --git a/awx/ui/client/src/helpers/Hosts.js b/awx/ui/client/src/helpers/Hosts.js index 517fee8a52..b9bde5fe2c 100644 --- a/awx/ui/client/src/helpers/Hosts.js +++ b/awx/ui/client/src/helpers/Hosts.js @@ -20,7 +20,7 @@ export default angular.module('HostsHelper', [ 'RestServices', 'Utilities', listGenerator.name, 'HostListDefinition', 'SearchHelper', 'PaginationHelpers', listGenerator.name, 'HostsHelper', 'InventoryHelper', 'RelatedSearchHelper', 'InventoryFormDefinition', 'SelectionHelper', - 'HostGroupsFormDefinition', 'VariablesHelper', 'ModalDialog', 'LogViewerHelper', + 'HostGroupsFormDefinition', 'VariablesHelper', 'ModalDialog', 'StandardOutHelper', 'GroupListDefinition' ]) @@ -159,17 +159,6 @@ angular.module('HostsHelper', [ 'RestServices', 'Utilities', listGenerator.name, }; }]) -.factory('ViewJob', ['LogViewer', 'GetBasePath', function(LogViewer, GetBasePath) { - return function(params) { - var scope = params.scope, - id = params.id; - LogViewer({ - scope: scope, - url: GetBasePath('jobs') + id + '/' - }); - }; -}]) - .factory('HostsReload', [ '$stateParams', 'Empty', 'InventoryHosts', 'GetBasePath', 'SearchInit', 'PaginateInit', 'Wait', 'SetHostStatus', 'SetStatus', 'ApplyEllipsis', function($stateParams, Empty, InventoryHosts, GetBasePath, SearchInit, PaginateInit, Wait, SetHostStatus, SetStatus, diff --git a/awx/ui/client/src/helpers/Jobs.js b/awx/ui/client/src/helpers/Jobs.js index 6320259ade..dd7e83d57f 100644 --- a/awx/ui/client/src/helpers/Jobs.js +++ b/awx/ui/client/src/helpers/Jobs.js @@ -14,7 +14,7 @@ import listGenerator from '../shared/list-generator/main'; export default angular.module('JobsHelper', ['Utilities', 'RestServices', 'FormGenerator', 'JobSummaryDefinition', 'InventoryHelper', 'GeneratorHelpers', - 'JobSubmissionHelper', 'LogViewerHelper', 'SearchHelper', 'PaginationHelpers', 'AdhocHelper', listGenerator.name]) + 'JobSubmissionHelper', 'StandardOutHelper', 'SearchHelper', 'PaginationHelpers', 'AdhocHelper', listGenerator.name]) /** * JobsControllerInit({ scope: $scope }); @@ -22,8 +22,8 @@ export default * Initialize calling scope with all the bits required to support a jobs list * */ - .factory('JobsControllerInit', ['$state', 'Find', 'DeleteJob', 'RelaunchJob', 'LogViewer', '$window', - function($state, Find, DeleteJob, RelaunchJob, LogViewer, $window) { + .factory('JobsControllerInit', ['$state', 'Find', 'DeleteJob', 'RelaunchJob', '$window', + function($state, Find, DeleteJob, RelaunchJob, $window) { return function(params) { var scope = params.scope, iterator = (params.iterator) ? params.iterator : scope.iterator; diff --git a/awx/ui/client/src/helpers/LogViewer.js b/awx/ui/client/src/helpers/LogViewer.js deleted file mode 100644 index 90e9a3f407..0000000000 --- a/awx/ui/client/src/helpers/LogViewer.js +++ /dev/null @@ -1,390 +0,0 @@ -/************************************************* - * Copyright (c) 2015 Ansible, Inc. - * - * All Rights Reserved - *************************************************/ - - /** - * @ngdoc function - * @name helpers.function:LogViewer - * @description logviewer -*/ - -export default - angular.module('LogViewerHelper', ['ModalDialog', 'Utilities', 'FormGenerator', 'VariablesHelper']) - - .factory('LogViewer', ['$location', '$compile', 'CreateDialog', 'GetJob', 'Wait', 'GenerateForm', 'LogViewerStatusForm', 'AddTable', 'AddTextarea', - 'LogViewerOptionsForm', 'EnvTable', 'GetBasePath', 'LookUpName', 'Empty', 'AddPreFormattedText', 'ParseVariableString', 'GetChoices', - function($location, $compile, CreateDialog, GetJob, Wait, GenerateForm, LogViewerStatusForm, AddTable, AddTextarea, LogViewerOptionsForm, EnvTable, - GetBasePath, LookUpName, Empty, AddPreFormattedText, ParseVariableString, GetChoices) { - return function(params) { - var parent_scope = params.scope, - url = params.url, - getIcon = params.getIcon, - scope = parent_scope.$new(true), - base = $location.path().replace(/^\//, '').split('/')[0], - pieces; - - if (scope.removeModalReady) { - scope.removeModalReady(); - } - scope.removeModalReady = scope.$on('ModalReady', function() { - Wait('stop'); - $('#logviewer-modal-dialog').dialog('open'); - }); - - if (scope.removeJobReady) { - scope.removeJobReady(); - } - scope.removeJobReady = scope.$on('JobReady', function(e, data) { - var key, resizeText, elem; - $('#status-form-container').empty(); - $('#options-form-container').empty(); - $('#stdout-form-container').empty(); - $('#traceback-form-container').empty(); - $('#variables-container').empty(); - $('#source-container').empty(); - $('#logview-tabs li:eq(1)').hide(); - $('#logview-tabs li:eq(2)').hide(); - $('#logview-tabs li:eq(4)').hide(); - $('#logview-tabs li:eq(5)').hide(); - - // Make sure subsequenct scope references don't bubble up to the parent - for (key in LogViewerStatusForm.fields) { - scope[key] = ''; - } - for (key in LogViewerOptionsForm.fields) { - scope[key] = ''; - } - - for (key in data) { - scope[key] = data[key]; - } - scope.created_by = ''; - scope.job_template = ''; - - if (data.related.created_by) { - pieces = data.related.created_by.replace(/^\//,'').replace(/\/$/,'').split('/'); - scope.created_by = parseInt(pieces[pieces.length - 1],10); - LookUpName({ - scope: scope, - scope_var: 'created_by', - url: GetBasePath('users') + scope.created_by + '/' - }); - } - - // For jobs link the name to the job parent - if (base === 'jobs') { - if (data.type === 'job') { - scope.name_link = "job_template"; - scope.job_template = data.unified_job_template; - scope.job_template_name = (data.summary_fields.job_template) ? data.summary_fields.job_template.name : data.name; - scope.name_id = data.unified_job_template; - } - if (data.type === 'project_update') { - scope.name_link = "project"; - scope.name_id = data.unified_job_template; - } - if (data.type === 'inventory_update') { - scope.name_link = "inventory_source"; - scope.name_id = scope.group; - } - } - - AddTable({ scope: scope, form: LogViewerStatusForm, id: 'status-form-container', getIcon: getIcon }); - AddTable({ scope: scope, form: LogViewerOptionsForm, id: 'options-form-container', getIcon: getIcon }); - - if (data.result_stdout) { - $('#logview-tabs li:eq(1)').show(); - var showStandardOut = (data.type !== "system_job") ? true : false; - AddPreFormattedText({ - id: 'stdout-form-container', - val: data.result_stdout, - standardOut: showStandardOut, - jobUrl: data.url - }); - } - - if (data.result_traceback) { - $('#logview-tabs li:eq(2)').show(); - AddPreFormattedText({ - id: 'traceback-form-container', - val: data.result_traceback - }); - } - - /*if (data.job_env) { - EnvTable({ - id: 'env-form-container', - vars: data.job_env - }); - }*/ - - if (data.extra_vars) { - $('#logview-tabs li:eq(4)').show(); - AddTextarea({ - container_id: 'variables-container', - fld_id: 'variables', - val: ParseVariableString(data.extra_vars) - }); - } - - if (data.source_vars) { - $('#logview-tabs li:eq(5)').show(); - AddTextarea({ - container_id: 'source-container', - fld_id: 'source-variables', - val: ParseVariableString(data.source_vars) - }); - } - - if (!Empty(scope.source)) { - if (scope.removeChoicesReady) { - scope.removeChoicesReady(); - } - scope.removeChoicesReady = scope.$on('ChoicesReady', function() { - scope.source_choices.every(function(e) { - if (e.value === scope.source) { - scope.source = e.label; - return false; - } - return true; - }); - }); - GetChoices({ - scope: scope, - url: GetBasePath('inventory_sources'), - field: 'source', - variable: 'source_choices', - choice_name: 'choices', - callback: 'ChoicesReady' - }); - } - - if (!Empty(scope.credential)) { - LookUpName({ - scope: scope, - scope_var: 'credential', - url: GetBasePath('credentials') + scope.credential + '/' - }); - } - - if (!Empty(scope.inventory)) { - LookUpName({ - scope: scope, - scope_var: 'inventory', - url: GetBasePath('inventory') + scope.inventory + '/' - }); - } - - if (!Empty(scope.project)) { - LookUpName({ - scope: scope, - scope_var: 'project', - url: GetBasePath('projects') + scope.project + '/' - }); - } - - if (!Empty(scope.cloud_credential)) { - LookUpName({ - scope: scope, - scope_var: 'cloud_credential', - url: GetBasePath('credentials') + scope.cloud_credential + '/' - }); - } - - if (!Empty(scope.inventory_source)) { - LookUpName({ - scope: scope, - scope_var: 'inventory_source', - url: GetBasePath('inventory_sources') + scope.inventory_source + '/' - }); - } - - resizeText = function() { - var u = $('#logview-tabs').outerHeight() + 25, - h = $('#logviewer-modal-dialog').innerHeight(), - rows = Math.floor((h - u) / 20); - rows -= 3; - rows = (rows < 6) ? 6 : rows; - $('#logviewer-modal-dialog #variables').attr({ rows: rows }); - $('#logviewer-modal-dialog #source-variables').attr({ rows: rows }); - }; - - elem = angular.element(document.getElementById('logviewer-modal-dialog')); - $compile(elem)(scope); - - CreateDialog({ - scope: scope, - width: 600, - height: 550, - minWidth: 450, - callback: 'ModalReady', - id: 'logviewer-modal-dialog', - onResizeStop: resizeText, - title: 'Job Results', - onOpen: function() { - $('#logview-tabs a:first').tab('show'); - $('#dialog-ok-button').focus(); - resizeText(); - } - }); - }); - - GetJob({ - url: url, - scope: scope - }); - - scope.modalOK = function() { - $('#logviewer-modal-dialog').dialog('close'); - scope.$destroy(); - }; - }; - }]) - - .factory('GetJob', ['Rest', 'ProcessErrors', function(Rest, ProcessErrors) { - return function(params) { - var url = params.url, - scope = params.scope; - Rest.setUrl(url); - Rest.get() - .success(function(data){ - scope.$emit('JobReady', data); - }) - .error(function(data, status) { - ProcessErrors(scope, data, status, null, { hdr: 'Error!', - msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); - }); - }; - }]) - - .factory('LookUpName', ['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) { - return function(params) { - var url = params.url, - scope_var = params.scope_var, - scope = params.scope; - Rest.setUrl(url); - Rest.get() - .success(function(data) { - if (scope_var === 'inventory_source') { - scope[scope_var + '_name'] = data.summary_fields.group.name; - } - else if (!Empty(data.name)) { - scope[scope_var + '_name'] = data.name; - } - if (!Empty(data.group)) { - // Used for inventory_source - scope.group = data.group; - } - }) - .error(function(data, status) { - ProcessErrors(scope, data, status, null, { hdr: 'Error!', - msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); - }); - }; - }]) - - .factory('AddTable', ['$compile', 'Empty', 'Find', function($compile, Empty, Find) { - return function(params) { - var form = params.form, - id = params.id, - scope = params.scope, - getIcon = params.getIcon, - fld, html, url, e, - urls = [ - { "variable": "credential", "url": "/#/credentials/" }, - { "variable": "project", "url": "/#/projects/" }, - { "variable": "inventory", "url": "/#/inventories/" }, - { "variable": "cloud_credential", "url": "/#/credentials/" }, - { "variable": "inventory_source", "url": "/#/home/groups/?id={{ group }}" }, - { "variable": "job_template", "url": "/#/job_templates/" }, - { "variable": "created_by", "url": "/#/users/" } - ]; - html = "\n"; - for (fld in form.fields) { - if (!Empty(scope[fld])) { - html += "" + - "\n"; - } - } - html += "
" + form.fields[fld].label + ""; - url = Find({ list: urls, key: "variable", val: fld }); - if (url) { - html += "{{ " + fld + '_name' + " }}"; - } - else if (fld === 'name' && scope.name_link) { - url = Find({ list: urls, key: "variable", val: scope.name_link }); - html += "{{ " + - ( (scope.name_link === 'inventory_source') ? 'inventory_source_name' : fld ) + " }}"; - } - else if (fld === 'elapsed') { - html += scope[fld] + " seconds"; - } - else if (fld === 'status') { - if (getIcon) { - html += " " + scope[fld]; - } - else { - html += " " + scope[fld]; - } - if (scope.job_explanation) { - html += "

" + scope.job_explanation + "

"; - } - } - else { - html += "{{ " + fld ; - html += (form.fields[fld].filter) ? " | " + form.fields[fld].filter : "" ; - html += " }}"; - } - html += "
\n"; - e = angular.element(document.getElementById(id)); - e.empty().html(html); - $compile(e)(scope); - }; - }]) - - .factory('AddTextarea', [ function() { - return function(params) { - var container_id = params.container_id, - val = params.val, - fld_id = params.fld_id, - html; - html = "
\n" + - "" + - "
\n"; - $('#' + container_id).empty().html(html); - }; - }]) - - .factory('AddPreFormattedText', ['$rootScope', function($rootScope) { - return function(params) { - var id = params.id, - val = params.val, - html = ""; - if (params.standardOut) { - html += 'Download'; - html += "
" + val + "
\n"; - } else { - html += "
" + val + "
\n"; - } - $('#' + id).empty().html(html); - }; - }]) - - .factory('EnvTable', [ function() { - return function(params) { - var id = params.id, - vars = params.vars, - key, html; - html = "\n"; - for (key in vars) { - html += "" + - "\n"; - } - html += "
" + key + "" + vars[key] + "
\n"; - $('#' + id).empty().html(html); - }; - }]); diff --git a/awx/ui/client/src/helpers/StandardOut.js b/awx/ui/client/src/helpers/StandardOut.js new file mode 100644 index 0000000000..a739bef764 --- /dev/null +++ b/awx/ui/client/src/helpers/StandardOut.js @@ -0,0 +1,40 @@ +/************************************************* + * Copyright (c) 2016 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + + /** + * @ngdoc function + * @name helpers.function:StandardOut + * @description Helpers for the standard out views +*/ + +export default + angular.module('StandardOutHelper', []) + + .factory('LookUpName', ['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) { + return function(params) { + var url = params.url, + scope_var = params.scope_var, + scope = params.scope; + Rest.setUrl(url); + Rest.get() + .success(function(data) { + if (scope_var === 'inventory_source') { + scope[scope_var + '_name'] = data.summary_fields.group.name; + } + else if (!Empty(data.name)) { + scope[scope_var + '_name'] = data.name; + } + if (!Empty(data.group)) { + // Used for inventory_source + scope.group = data.group; + } + }) + .error(function(data, status) { + ProcessErrors(scope, data, status, null, { hdr: 'Error!', + msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); + }); + }; + }]) diff --git a/awx/ui/client/src/standard-out/standard-out.controller.js b/awx/ui/client/src/standard-out/standard-out.controller.js index f95eaab1fb..7c6d196aa6 100644 --- a/awx/ui/client/src/standard-out/standard-out.controller.js +++ b/awx/ui/client/src/standard-out/standard-out.controller.js @@ -197,7 +197,7 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi return true; }); }); - // GetChoices can be found in the helper: LogViewer.js + // GetChoices can be found in the helper: StandardOut.js // It attaches the source choices to $scope.source_choices. // Then, when the callback is fired, $scope.source is bound // to the corresponding label. @@ -211,7 +211,7 @@ export function JobStdoutController ($location, $log, $rootScope, $scope, $compi }); } - // LookUpName can be found in the helper: LogViewer.js + // LookUpName can be found in the helper: StandardOut.js // It attaches the name that it gets (based on the url) // to the $scope variable defined by the attribute scope_var. if (!Empty(data.credential)) { From ab3669efa9735eea8d532e1c0789453e4493765e Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 22 Feb 2016 17:09:36 -0500 Subject: [PATCH 15/38] Refactor message generator * Job object can now control the output and generate K:V output for notification types that can support it * Notifications store the body as json/dict now to encode more information * Notification Type can further compose the message based on what is sensible for the notification type * This will also allow customizing the message template in the future * All notification types use sane defaults for the level of detail now --- awx/api/serializers.py | 2 +- awx/api/views.py | 3 ++- awx/main/models/jobs.py | 20 ++++++++++++++++++++ awx/main/models/notifications.py | 17 +++++++---------- awx/main/models/unified_jobs.py | 10 ++++++++++ awx/main/notifications/email_backend.py | 7 +++++++ awx/main/notifications/hipchat_backend.py | 6 +++--- awx/main/notifications/irc_backend.py | 4 ++-- awx/main/notifications/pagerduty_backend.py | 7 +++++-- awx/main/notifications/slack_backend.py | 6 +++--- awx/main/notifications/twilio_backend.py | 6 +++--- awx/main/notifications/webhook_backend.py | 13 +++++++++---- awx/main/tasks.py | 12 ++++-------- 13 files changed, 76 insertions(+), 37 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 04f5e241c3..a680e5b00c 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2118,7 +2118,7 @@ class NotificationSerializer(BaseSerializer): class Meta: model = Notification fields = ('*', '-name', '-description', 'notifier', 'error', 'status', 'notifications_sent', - 'notification_type', 'recipients', 'subject', 'body') + 'notification_type', 'recipients', 'subject') def get_related(self, obj): res = super(NotificationSerializer, self).get_related(obj) diff --git a/awx/api/views.py b/awx/api/views.py index 439de3f845..70532f026c 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -3053,7 +3053,8 @@ class NotifierTest(GenericAPIView): def post(self, request, *args, **kwargs): obj = self.get_object() - notification = obj.generate_notification("Tower Notification Test", "Ansible Tower Test Notification") + notification = obj.generate_notification("Tower Notification Test {}".format(obj.id), + {"body": "Ansible Tower Test Notification {}".format(obj.id)}) if not notification: return Response({}, status=status.HTTP_400_BAD_REQUEST) else: diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index dd772d695d..2d2dc991a9 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -496,6 +496,26 @@ class Job(UnifiedJob, JobOptions): dependencies.append(source.create_inventory_update(launch_type='dependency')) return dependencies + def notification_data(self): + data = super(Job, self).notification_data() + all_hosts = {} + for h in self.job_host_summaries.all(): + all_hosts[h.host.name] = dict(failed=h.failed, + changed=h.changed, + dark=h.dark, + failures=h.failures, + ok=h.ok, + processed=h.processed, + skipped=h.skipped) + data.update(dict(inventory=self.inventory.name, + project=self.project.name, + playbook=self.playbook, + credential=self.credential.name, + limit=self.limit, + extra_vars=self.extra_vars, + hosts=all_hosts)) + return data + def handle_extra_data(self, extra_data): extra_vars = {} if type(extra_data) == dict: diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index d6fc9d31b8..04bd5b0e53 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -69,8 +69,8 @@ class Notifier(CommonModel): for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters): if new_instance: - value = getattr(self.notification_configuration, field, '') - setattr(self, '_saved_{}'.format(field), value) + value = self.notification_configuration[field] + setattr(self, '_saved_{}_{}'.format("config", field), value) self.notification_configuration[field] = '' else: encrypted = encrypt_field(self, 'notification_configuration', subfield=field) @@ -82,8 +82,9 @@ class Notifier(CommonModel): update_fields = [] for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters): - saved_value = getattr(self, '_saved_{}'.format(field), '') - setattr(self.notification_configuration, field, saved_value) + saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '') + self.notification_configuration[field] = saved_value + #setattr(self.notification_configuration, field, saved_value) if 'notification_configuration' not in update_fields: update_fields.append('notification_configuration') self.save(update_fields=update_fields) @@ -112,7 +113,7 @@ class Notifier(CommonModel): recipients = [recipients] sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None) backend_obj = self.notification_class(**self.notification_configuration) - notification_obj = EmailMessage(subject, body, sender, recipients) + notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients) return backend_obj.send_messages([notification_obj]) class Notification(CreatedModifiedModel): @@ -165,11 +166,7 @@ class Notification(CreatedModifiedModel): default='', editable=False, ) - body = models.TextField( - blank=True, - default='', - editable=False, - ) + body = JSONField(blank=True) def get_absolute_url(self): return reverse('api:notification_detail', args=(self.pk,)) diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 986be923fb..ed34653048 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -731,6 +731,16 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique tasks that might preclude creating one''' return [] + def notification_data(self): + return dict(id=self.id, + name=self.name, + url=self.get_absolute_url(), #TODO: Need to replace with UI job view + created_by=str(self.created_by), + started=self.started.isoformat(), + finished=self.finished.isoformat(), + status=self.status, + traceback=self.result_traceback) + def start(self, error_callback, success_callback, **kwargs): ''' Start the task running via Celery. diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py index 271f585d5c..484a61f12d 100644 --- a/awx/main/notifications/email_backend.py +++ b/awx/main/notifications/email_backend.py @@ -18,3 +18,10 @@ class CustomEmailBackend(EmailBackend): recipient_parameter = "recipients" sender_parameter = "sender" + def format_body(self, body): + body_actual = "{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url']) + body_actual += pprint.pformat(body, indent=4) + return body_actual diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py index a5b7f561b6..5d58792591 100644 --- a/awx/main/notifications/hipchat_backend.py +++ b/awx/main/notifications/hipchat_backend.py @@ -5,11 +5,11 @@ import logging import requests -from django.core.mail.backends.base import BaseEmailBackend +from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.hipchat_backend') -class HipChatBackend(BaseEmailBackend): +class HipChatBackend(TowerBaseEmailBackend): init_parameters = {"token": {"label": "Token", "type": "password"}, "channels": {"label": "Destination Channels", "type": "list"}, @@ -35,7 +35,7 @@ class HipChatBackend(BaseEmailBackend): r = requests.post("{}/v2/room/{}/notification".format(self.api_url, rcp), params={"auth_token": self.token}, json={"color": self.color, - "message": m.body, + "message": m.subject, "notify": self.notify, "from": m.from_email, "message_format": "text"}) diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index 2b0944b74a..b3e92a12b3 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -7,11 +7,11 @@ import logging import irc.client -from django.core.mail.backends.base import BaseEmailBackend +from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.irc_backend') -class IrcBackend(BaseEmailBackend): +class IrcBackend(TowerBaseEmailBackend): init_parameters = {"server": {"label": "IRC Server Address", "type": "string"}, "port": {"label": "IRC Server Port", "type": "int"}, diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py index 161bb822bc..fd7661ba86 100644 --- a/awx/main/notifications/pagerduty_backend.py +++ b/awx/main/notifications/pagerduty_backend.py @@ -4,11 +4,11 @@ import logging import pygerduty -from django.core.mail.backends.base import BaseEmailBackend +from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.pagerduty_backend') -class PagerDutyBackend(BaseEmailBackend): +class PagerDutyBackend(TowerBaseEmailBackend): init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"}, "token": {"label": "API Token", "type": "password"}, @@ -22,6 +22,9 @@ class PagerDutyBackend(BaseEmailBackend): self.subdomain = subdomain self.token = token + def format_body(self, body): + return body + def send_messages(self, messages): sent_messages = 0 diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 3bf4f32114..91e4cd4fd3 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -4,11 +4,11 @@ import logging from slackclient import SlackClient -from django.core.mail.backends.base import BaseEmailBackend +from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.slack_backend') -class SlackBackend(BaseEmailBackend): +class SlackBackend(TowerBaseEmailBackend): init_parameters = {"token": {"label": "Token", "type": "password"}, "channels": {"label": "Destination Channels", "type": "list"}} @@ -41,7 +41,7 @@ class SlackBackend(BaseEmailBackend): for m in messages: try: for r in m.recipients(): - self.connection.rtm_send_message(r, m.body) + self.connection.rtm_send_message(r, m.subject) sent_messages += 1 except Exception as e: logger.error("Exception sending messages: {}".format(e)) diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index d9c4cc43b6..847ebb9f2f 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -5,11 +5,11 @@ import logging from twilio.rest import TwilioRestClient -from django.core.mail.backends.base import BaseEmailBackend +from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.twilio_backend') -class TwilioBackend(BaseEmailBackend): +class TwilioBackend(TowerBaseEmailBackend): init_parameters = {"account_sid": {"label": "Account SID", "type": "string"}, "account_token": {"label": "Account Token", "type": "password"}, @@ -38,7 +38,7 @@ class TwilioBackend(BaseEmailBackend): connection.messages.create( to=m.to, from_=m.from_email, - body=m.body) + body=m.subject) sent_messages += 1 except Exception as e: logger.error("Exception sending messages: {}".format(e)) diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index 5bdbff0e02..15cd950923 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -4,12 +4,12 @@ import logging import requests - -from django.core.mail.backends.base import BaseEmailBackend +import json +from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.webhook_backend') -class WebhookBackend(BaseEmailBackend): +class WebhookBackend(TowerBaseEmailBackend): init_parameters = {"url": {"label": "Target URL", "type": "string"}, "headers": {"label": "HTTP Headers", "type": "object"}} @@ -20,11 +20,16 @@ class WebhookBackend(BaseEmailBackend): self.headers = headers super(WebhookBackend, self).__init__(fail_silently=fail_silently) + def format_body(self, body): + logger.error("Generating body from {}".format(str(body))) + return body + def send_messages(self, messages): sent_messages = 0 for m in messages: + logger.error("BODY: " + str(m.body)) r = requests.post("{}".format(m.recipients()[0]), - data=m.body, + data=json.dumps(m.body), headers=self.headers) if r.status_code >= 400: logger.error("Error sending notification webhook: {}".format(r.text)) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index aff9a6a585..7db56d78f5 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -207,10 +207,8 @@ def handle_work_success(self, result, task_actual): notification_subject = "{} #{} '{}' succeeded on Ansible Tower".format(friendly_name, task_actual['id'], instance_name) - notification_body = "{} #{} '{}' succeeded on Ansible Tower\nTo view the output: {}".format(friendly_name, - task_actual['id'], - instance_name, - instance.get_absolute_url()) + notification_body = instance.notification_data() + notification_body['friendly_name'] = friendly_name send_notifications.delay([n.generate_notification(notification_subject, notification_body) for n in notifiers.get('success', []) + notifiers.get('any', [])], job_id=task_actual['id']) @@ -265,10 +263,8 @@ def handle_work_error(self, task_id, subtasks=None): notification_subject = "{} #{} '{}' failed on Ansible Tower".format(first_task_friendly_name, first_task_id, first_task_name) - notification_body = "{} #{} '{}' failed on Ansible Tower\nTo view the output: {}".format(first_task_friendly_name, - first_task_id, - first_task_name, - first_task.get_absolute_url()) + notification_body = first_task.notification_data() + notification_body['friendly_name'] = first_task_friendly_name send_notifications.delay([n.generate_notification(notification_subject, notification_body).id for n in notifiers.get('error', []) + notifiers.get('any', [])], job_id=first_task_id) From 4928badd3fa3d5ccea6a1c7487ffcb1a22b82587 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 23 Feb 2016 10:59:08 -0500 Subject: [PATCH 16/38] Add a periodic administrative notification --- awx/main/notifications/webhook_backend.py | 2 -- awx/main/tasks.py | 30 ++++++++++++++++++++--- awx/settings/defaults.py | 13 ++++++++++ 3 files changed, 39 insertions(+), 6 deletions(-) diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index 15cd950923..e10b6869e3 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -21,13 +21,11 @@ class WebhookBackend(TowerBaseEmailBackend): super(WebhookBackend, self).__init__(fail_silently=fail_silently) def format_body(self, body): - logger.error("Generating body from {}".format(str(body))) return body def send_messages(self, messages): sent_messages = 0 for m in messages: - logger.error("BODY: " + str(m.body)) r = requests.post("{}".format(m.recipients()[0]), data=json.dumps(m.body), headers=self.headers) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 7db56d78f5..4b51893a98 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -39,6 +39,8 @@ from celery import Task, task from django.conf import settings from django.db import transaction, DatabaseError from django.utils.timezone import now +from django.core.mail import send_mail +from django.contrib.auth.models import User # AWX from awx.lib.metrics import task_timer @@ -46,6 +48,7 @@ from awx.main.constants import CLOUD_PROVIDERS from awx.main.models import * # noqa from awx.main.queue import FifoQueue from awx.main.conf import tower_settings +from awx.main.task_engine import TaskSerializer, TASK_TIMEOUT_INTERVAL from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url, ignore_inventory_computed_fields, emit_websocket_notification, check_proot_installed, build_proot_temp_dir, wrap_args_with_proot) @@ -53,7 +56,7 @@ from awx.fact.utils.connection import test_mongo_connection __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', - 'update_inventory_computed_fields', 'send_notifications'] + 'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks'] HIDDEN_PASSWORD = '**********' @@ -69,6 +72,8 @@ logger = logging.getLogger('awx.main.tasks') def send_notifications(notification_list, job_id=None): if not isinstance(notification_list, list): raise TypeError("notification_list should be of type list") + if job_id is not None: + job_actual = UnifiedJob.objects.get(id=job_id) for notification_id in notification_list: notification = Notification.objects.get(id=notification_id) try: @@ -82,8 +87,26 @@ def send_notifications(notification_list, job_id=None): finally: notification.save() if job_id is not None: - j = UnifiedJob.objects.get(id=job_id) - j.notifications.add(notification) + job_actual.notifications.add(notification) + +@task(bind=True) +def run_administrative_checks(self): + if not tower_settings.TOWER_ADMIN_ALERTS: + return + reader = TaskSerializer() + validation_info = reader.from_database() + used_percentage = validation_info.get('current_instances',0) / validation_info.get('instance_count', 100) + tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True) + if (used_percentage * 100) > 90: + send_mail("Ansible Tower host usage over 90%", + "Ansible Tower host usage over 90%", + tower_admin_emails, + fail_silently=True) + if validation_info.get('time_remaining', 0) < TASK_TIMEOUT_INTERVAL: + send_mail("Ansible Tower license will expire soon", + "Ansible Tower license will expire soon", + tower_admin_emails, + fail_silently=True) @task() def bulk_inventory_element_delete(inventory, hosts=[], groups=[]): @@ -155,7 +178,6 @@ def notify_task_runner(metadata_dict): queue = FifoQueue('tower_task_manager') queue.push(metadata_dict) - @task() def mongodb_control(cmd): # Sanity check: Do not send arbitrary commands. diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index d8d1fbc9fc..76ba79df61 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -341,6 +341,10 @@ CELERYBEAT_SCHEDULE = { 'task': 'awx.main.tasks.tower_periodic_scheduler', 'schedule': timedelta(seconds=30) }, + 'admin_checks': { + 'task': 'awx.main.tasks.run_administrative_checks', + 'schedule': timedelta(days=30) + }, } # Social Auth configuration. @@ -679,6 +683,8 @@ FACT_CACHE_PORT = 6564 ORG_ADMINS_CAN_SEE_ALL_USERS = True +TOWER_ADMIN_ALERTS = True + TOWER_SETTINGS_MANIFEST = { "SCHEDULE_MAX_JOBS": { "name": "Maximum Scheduled Jobs", @@ -806,6 +812,13 @@ TOWER_SETTINGS_MANIFEST = { "type": "bool", "category": "system", }, + "TOWER_ADMIN_ALERTS": { + "name": "Enable Tower Administrator Alerts", + "description": "Allow Tower to email Admin users for system events that may require attention", + "default": TOWER_ADMIN_ALERTS, + "type": "bool", + "category": "system", + }, "LICENSE": { "name": "Tower License", "description": "Controls what features and functionality is enabled in Tower.", From 39c942e98bc9e936a952c9c8c876e6fda1207b13 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 23 Feb 2016 11:20:08 -0500 Subject: [PATCH 17/38] Adding migration and base notification type --- awx/main/migrations/0003_v300_changes.py | 105 +++++++++++++++++++++++ awx/main/notifications/base.py | 18 ++++ 2 files changed, 123 insertions(+) create mode 100644 awx/main/migrations/0003_v300_changes.py create mode 100644 awx/main/notifications/base.py diff --git a/awx/main/migrations/0003_v300_changes.py b/awx/main/migrations/0003_v300_changes.py new file mode 100644 index 0000000000..83b8b4b3ab --- /dev/null +++ b/awx/main/migrations/0003_v300_changes.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonfield.fields +import django.db.models.deletion +from django.conf import settings +import taggit.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0002_auto_20150616_2121'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('main', '0002_v300_changes'), + ] + + operations = [ + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])), + ('error', models.TextField(default=b'', editable=False, blank=True)), + ('notifications_sent', models.IntegerField(default=0, editable=False)), + ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])), + ('recipients', models.TextField(default=b'', editable=False, blank=True)), + ('subject', models.TextField(default=b'', editable=False, blank=True)), + ('body', jsonfield.fields.JSONField(default=dict, blank=True)), + ], + options={ + 'ordering': ('pk',), + }, + ), + migrations.CreateModel( + name='Notifier', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('description', models.TextField(default=b'', blank=True)), + ('active', models.BooleanField(default=True, editable=False)), + ('name', models.CharField(unique=True, max_length=512)), + ('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'irc', 'IRC')])), + ('notification_configuration', jsonfield.fields.JSONField(default=dict)), + ('created_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), + ('modified_by', models.ForeignKey(related_name="{u'class': 'notifier', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), + ('organization', models.ForeignKey(related_name='notifiers', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)), + ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')), + ], + ), + migrations.AddField( + model_name='notification', + name='notifier', + field=models.ForeignKey(related_name='notifications', editable=False, to='main.Notifier'), + ), + migrations.AddField( + model_name='activitystream', + name='notification', + field=models.ManyToManyField(to='main.Notification', blank=True), + ), + migrations.AddField( + model_name='activitystream', + name='notifier', + field=models.ManyToManyField(to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_any', + field=models.ManyToManyField(related_name='organization_notifiers_for_any', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_error', + field=models.ManyToManyField(related_name='organization_notifiers_for_errors', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='organization', + name='notifiers_success', + field=models.ManyToManyField(related_name='organization_notifiers_for_success', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjob', + name='notifications', + field=models.ManyToManyField(related_name='unifiedjob_notifications', editable=False, to='main.Notification'), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_any', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_any', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_error', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_errors', to='main.Notifier', blank=True), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='notifiers_success', + field=models.ManyToManyField(related_name='unifiedjobtemplate_notifiers_for_success', to='main.Notifier', blank=True), + ), + ] diff --git a/awx/main/notifications/base.py b/awx/main/notifications/base.py new file mode 100644 index 0000000000..e574a07df3 --- /dev/null +++ b/awx/main/notifications/base.py @@ -0,0 +1,18 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +import pprint +from django.core.mail.backends.base import BaseEmailBackend + +class TowerBaseEmailBackend(BaseEmailBackend): + + def format_body(self, body): + if "body" in body: + body_actual = body['body'] + else: + body_actual = "{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url']) + body_actual += pprint.pformat(body, indent=4) + return body_actual From 050ed8a200a1ea5d53eb0e9f18c833d5e008b26e Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 23 Feb 2016 12:18:31 -0500 Subject: [PATCH 18/38] Proper type for in check Still an __in for when we need to add another trigger obj --- awx/main/models/inventory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index b0acf0a90b..fd6dfbff76 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -1185,9 +1185,9 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions): def notifiers(self): # Return all notifiers defined on the Project, and on the Organization for each trigger type base_notifiers = Notifier.objects.filter(active=True) - error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors__in=self)) - success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success__in=self)) - any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any__in=self)) + error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors__in=[self])) + success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success__in=[self])) + any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any__in=[self])) return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) def clean_source(self): From b88892be49dca2db9d7a16e2696f6da6cedeb9b1 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 23 Feb 2016 12:33:30 -0500 Subject: [PATCH 19/38] Sanity check and force proper types in admin check --- awx/main/tasks.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 4b51893a98..58e5425866 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -95,7 +95,9 @@ def run_administrative_checks(self): return reader = TaskSerializer() validation_info = reader.from_database() - used_percentage = validation_info.get('current_instances',0) / validation_info.get('instance_count', 100) + if validation_info.get('instance_count', 0) < 1: + return + used_percentage = float(validation_info.get('current_instances', 0)) / float(validation_info.get('instance_count', 100)) tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True) if (used_percentage * 100) > 90: send_mail("Ansible Tower host usage over 90%", From 75ef0dd395fc08ef6e87a8fc03d8d90310736c08 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 23 Feb 2016 14:19:42 -0500 Subject: [PATCH 20/38] Implement tower ui view url on models --- awx/api/views.py | 5 +++-- awx/main/models/ad_hoc_commands.py | 4 ++++ awx/main/models/inventory.py | 5 +++++ awx/main/models/jobs.py | 7 +++++++ awx/main/models/projects.py | 4 ++++ awx/main/models/unified_jobs.py | 9 ++++++++- awx/main/tasks.py | 15 ++++++++------- awx/settings/defaults.py | 9 +++++++++ 8 files changed, 48 insertions(+), 10 deletions(-) diff --git a/awx/api/views.py b/awx/api/views.py index 70532f026c..e5975c7173 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -268,6 +268,7 @@ class ApiV1ConfigView(APIView): # If the license is valid, write it to disk. if license_data['valid_key']: tower_settings.LICENSE = data_actual + tower_settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) # Spawn a task to ensure that MongoDB is started (or stopped) # as appropriate, based on whether the license uses it. @@ -3053,8 +3054,8 @@ class NotifierTest(GenericAPIView): def post(self, request, *args, **kwargs): obj = self.get_object() - notification = obj.generate_notification("Tower Notification Test {}".format(obj.id), - {"body": "Ansible Tower Test Notification {}".format(obj.id)}) + notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, tower_settings.TOWER_URL_BASE), + {"body": "Ansible Tower Test Notification {} {}".format(obj.id, tower_settings.TOWER_URL_BASE)}) if not notification: return Response({}, status=status.HTTP_400_BAD_REQUEST) else: diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 664269a188..12c4261d8b 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -5,6 +5,7 @@ import hmac import json import logging +from urlparse import urljoin # Django from django.conf import settings @@ -139,6 +140,9 @@ class AdHocCommand(UnifiedJob): def get_absolute_url(self): return reverse('api:ad_hoc_command_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/ad_hoc_commands/{}".format(self.pk)) + @property def task_auth_token(self): '''Return temporary auth token used for task requests via API.''' diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index fd6dfbff76..edf03a883d 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -6,6 +6,7 @@ import datetime import logging import re import copy +from urlparse import urljoin # Django from django.conf import settings @@ -25,6 +26,7 @@ from awx.main.models.jobs import Job from awx.main.models.unified_jobs import * # noqa from awx.main.models.notifications import Notifier from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates +from awx.main.conf import tower_settings __all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript'] @@ -1249,6 +1251,9 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions): def get_absolute_url(self): return reverse('api:inventory_update_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/inventory_sync/{}".format(self.pk)) + def is_blocked_by(self, obj): if type(obj) == InventoryUpdate: if self.inventory_source.inventory == obj.inventory_source.inventory: diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 2d2dc991a9..01857b8b06 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -6,6 +6,7 @@ import hmac import json import yaml import logging +from urlparse import urljoin # Django from django.conf import settings @@ -380,6 +381,9 @@ class Job(UnifiedJob, JobOptions): def get_absolute_url(self): return reverse('api:job_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/jobs/{}".format(self.pk)) + @property def task_auth_token(self): '''Return temporary auth token used for task requests via API.''' @@ -1096,6 +1100,9 @@ class SystemJob(UnifiedJob, SystemJobOptions): def get_absolute_url(self): return reverse('api:system_job_detail', args=(self.pk,)) + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/management_jobs/{}".format(self.pk)) + def is_blocked_by(self, obj): return True diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 8a320e3cfc..01e4220d6d 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -24,6 +24,7 @@ from awx.main.models.jobs import Job from awx.main.models.notifications import Notifier from awx.main.models.unified_jobs import * # noqa from awx.main.utils import update_scm_url +from awx.main.conf import tower_settings __all__ = ['Project', 'ProjectUpdate'] @@ -389,6 +390,9 @@ class ProjectUpdate(UnifiedJob, ProjectOptions): def get_absolute_url(self): return reverse('api:project_update_detail', args=(self.pk,)) + def get_ui_url(self): + return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/scm_update/{}".format(self.pk)) + def _update_parent_instance(self): parent_instance = self._get_parent_instance() if parent_instance: diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index ed34653048..9a324048c3 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -484,6 +484,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique else: return '' + def get_ui_url(self): + real_instance = self.get_real_instance() + if real_instance != self: + return real_instance.get_ui_url() + else: + return '' + @classmethod def _get_task_class(cls): raise NotImplementedError # Implement in subclasses. @@ -734,7 +741,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique def notification_data(self): return dict(id=self.id, name=self.name, - url=self.get_absolute_url(), #TODO: Need to replace with UI job view + url=self.get_ui_url(), created_by=str(self.created_by), started=self.started.isoformat(), finished=self.finished.isoformat(), diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 58e5425866..ee65490ec1 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -228,11 +228,11 @@ def handle_work_success(self, result, task_actual): friendly_name = "AdHoc Command" else: return - notification_subject = "{} #{} '{}' succeeded on Ansible Tower".format(friendly_name, - task_actual['id'], - instance_name) notification_body = instance.notification_data() - notification_body['friendly_name'] = friendly_name + notification_subject = "{} #{} '{}' succeeded on Ansible Tower: {}".format(friendly_name, + task_actual['id'], + instance_name, + notification_body['url']) send_notifications.delay([n.generate_notification(notification_subject, notification_body) for n in notifiers.get('success', []) + notifiers.get('any', [])], job_id=task_actual['id']) @@ -284,10 +284,11 @@ def handle_work_error(self, task_id, subtasks=None): (first_task_type, first_task_name, first_task_id) instance.save() instance.socketio_emit_status("failed") - notification_subject = "{} #{} '{}' failed on Ansible Tower".format(first_task_friendly_name, - first_task_id, - first_task_name) notification_body = first_task.notification_data() + notification_subject = "{} #{} '{}' failed on Ansible Tower: {}".format(first_task_friendly_name, + first_task_id, + first_task_name, + notification_body['url']) notification_body['friendly_name'] = first_task_friendly_name send_notifications.delay([n.generate_notification(notification_subject, notification_body).id for n in notifiers.get('error', []) + notifiers.get('any', [])], diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 76ba79df61..465809b523 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -685,6 +685,8 @@ ORG_ADMINS_CAN_SEE_ALL_USERS = True TOWER_ADMIN_ALERTS = True +TOWER_URL_BASE = "https://towerhost" + TOWER_SETTINGS_MANIFEST = { "SCHEDULE_MAX_JOBS": { "name": "Maximum Scheduled Jobs", @@ -819,6 +821,13 @@ TOWER_SETTINGS_MANIFEST = { "type": "bool", "category": "system", }, + "TOWER_URL_BASE": { + "name": "Base URL of the Tower host", + "description": "This is used by services like Notifications to render a valid url to the Tower host", + "default": TOWER_URL_BASE, + "type": "string", + "category": "system", + }, "LICENSE": { "name": "Tower License", "description": "Controls what features and functionality is enabled in Tower.", From 4b1493f456ea1f1327f12692c20d4d135ad57c4d Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 23 Feb 2016 15:26:29 -0500 Subject: [PATCH 21/38] Fixing up some unicode issues --- awx/main/notifications/base.py | 10 ++++++---- awx/main/notifications/email_backend.py | 9 +++++---- awx/main/notifications/hipchat_backend.py | 6 ++++-- awx/main/notifications/irc_backend.py | 4 +++- awx/main/notifications/pagerduty_backend.py | 6 ++++-- awx/main/notifications/slack_backend.py | 4 +++- awx/main/notifications/twilio_backend.py | 6 ++++-- awx/main/notifications/webhook_backend.py | 8 +++++--- awx/main/tasks.py | 3 ++- 9 files changed, 36 insertions(+), 20 deletions(-) diff --git a/awx/main/notifications/base.py b/awx/main/notifications/base.py index e574a07df3..8129c33e27 100644 --- a/awx/main/notifications/base.py +++ b/awx/main/notifications/base.py @@ -2,6 +2,8 @@ # All Rights Reserved. import pprint + +from django.utils.encoding import smart_text from django.core.mail.backends.base import BaseEmailBackend class TowerBaseEmailBackend(BaseEmailBackend): @@ -10,9 +12,9 @@ class TowerBaseEmailBackend(BaseEmailBackend): if "body" in body: body_actual = body['body'] else: - body_actual = "{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], - body['id'], - body['status'], - body['url']) + body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url'])) body_actual += pprint.pformat(body, indent=4) return body_actual diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py index 484a61f12d..364e45fc28 100644 --- a/awx/main/notifications/email_backend.py +++ b/awx/main/notifications/email_backend.py @@ -3,6 +3,7 @@ import logging +from django.utils.encoding import smart_text from django.core.mail.backends.smtp import EmailBackend class CustomEmailBackend(EmailBackend): @@ -19,9 +20,9 @@ class CustomEmailBackend(EmailBackend): sender_parameter = "sender" def format_body(self, body): - body_actual = "{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], - body['id'], - body['status'], - body['url']) + body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], + body['id'], + body['status'], + body['url'])) body_actual += pprint.pformat(body, indent=4) return body_actual diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py index 5d58792591..420ef928fa 100644 --- a/awx/main/notifications/hipchat_backend.py +++ b/awx/main/notifications/hipchat_backend.py @@ -5,6 +5,8 @@ import logging import requests +from django.utils.encoding import smart_text + from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.hipchat_backend') @@ -40,8 +42,8 @@ class HipChatBackend(TowerBaseEmailBackend): "from": m.from_email, "message_format": "text"}) if r.status_code != 204: - logger.error("Error sending messages: {}".format(r.text)) + logger.error(smart_text("Error sending messages: {}".format(r.text))) if not self.fail_silently: - raise Exception("Error sending message to hipchat: {}".format(r.text)) + raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index b3e92a12b3..e01d390c09 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -7,6 +7,8 @@ import logging import irc.client +from django.utils.encoding import smart_text + from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.irc_backend') @@ -48,7 +50,7 @@ class IrcBackend(TowerBaseEmailBackend): connect_factory=connection_factory, ) except irc.client.ServerConnectionError as e: - logger.error("Exception connecting to irc server: {}".format(e)) + logger.error(smart_text("Exception connecting to irc server: {}".format(e))) if not self.fail_silently: raise return True diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py index fd7661ba86..af6b95cfd6 100644 --- a/awx/main/notifications/pagerduty_backend.py +++ b/awx/main/notifications/pagerduty_backend.py @@ -4,6 +4,8 @@ import logging import pygerduty +from django.utils.encoding import smart_text + from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.pagerduty_backend') @@ -33,7 +35,7 @@ class PagerDutyBackend(TowerBaseEmailBackend): except Exception as e: if not self.fail_silently: raise - logger.error("Exception connecting to PagerDuty: {}".format(e)) + logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e))) for m in messages: try: pager.trigger_incident(m.recipients()[0], @@ -41,7 +43,7 @@ class PagerDutyBackend(TowerBaseEmailBackend): details=m.body, client=m.from_email) except Exception as e: - logger.error("Exception sending messages: {}".format(e)) + logger.error(smart_text("Exception sending messages: {}".format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 91e4cd4fd3..00f23ed60c 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -4,6 +4,8 @@ import logging from slackclient import SlackClient +from django.utils.encoding import smart_text + from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.slack_backend') @@ -44,7 +46,7 @@ class SlackBackend(TowerBaseEmailBackend): self.connection.rtm_send_message(r, m.subject) sent_messages += 1 except Exception as e: - logger.error("Exception sending messages: {}".format(e)) + logger.error(smart_text("Exception sending messages: {}".format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index 847ebb9f2f..1aea6f368e 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -5,6 +5,8 @@ import logging from twilio.rest import TwilioRestClient +from django.utils.encoding import smart_text + from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.twilio_backend') @@ -31,7 +33,7 @@ class TwilioBackend(TowerBaseEmailBackend): except Exception as e: if not self.fail_silently: raise - logger.error("Exception connecting to Twilio: {}".format(e)) + logger.error(smart_text("Exception connecting to Twilio: {}".format(e))) for m in messages: try: @@ -41,7 +43,7 @@ class TwilioBackend(TowerBaseEmailBackend): body=m.subject) sent_messages += 1 except Exception as e: - logger.error("Exception sending messages: {}".format(e)) + logger.error(smart_text("Exception sending messages: {}".format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index e10b6869e3..52d85483ab 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -2,9 +2,11 @@ # All Rights Reserved. import logging - import requests import json + +from django.utils.encoding import smart_text + from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.webhook_backend') @@ -30,8 +32,8 @@ class WebhookBackend(TowerBaseEmailBackend): data=json.dumps(m.body), headers=self.headers) if r.status_code >= 400: - logger.error("Error sending notification webhook: {}".format(r.text)) + logger.error(smart_text("Error sending notification webhook: {}".format(r.text))) if not self.fail_silently: - raise Exception("Error sending notification webhook: {}".format(r.text)) + raise Exception(smart_text("Error sending notification webhook: {}".format(r.text))) sent_messages += 1 return sent_messages diff --git a/awx/main/tasks.py b/awx/main/tasks.py index ee65490ec1..4b285546bb 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -39,6 +39,7 @@ from celery import Task, task from django.conf import settings from django.db import transaction, DatabaseError from django.utils.timezone import now +from django.utils.encoding import smart_text from django.core.mail import send_mail from django.contrib.auth.models import User @@ -83,7 +84,7 @@ def send_notifications(notification_list, job_id=None): except Exception as e: logger.error("Send Notification Failed {}".format(e)) notification.status = "failed" - notification.error = str(e) + notification.error = smart_text(e) finally: notification.save() if job_id is not None: From 41d5393af03e29989897b82844ce20b5835b3c27 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 23 Feb 2016 15:30:07 -0500 Subject: [PATCH 22/38] Clean up flake8 related issues --- awx/api/serializers.py | 2 +- awx/main/models/notifications.py | 2 +- awx/main/models/projects.py | 1 - awx/main/models/unified_jobs.py | 1 - awx/main/notifications/email_backend.py | 2 +- awx/main/notifications/irc_backend.py | 2 +- 6 files changed, 4 insertions(+), 6 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index a680e5b00c..f42c20812c 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2108,7 +2108,7 @@ class NotifierSerializer(BaseSerializer): if incorrect_type_fields: for type_field_error in incorrect_type_fields: error_list.append("Configuration field '{}' incorrect type, expected {}".format(type_field_error[0], - type_field_error[1])) + type_field_error[1])) if error_list: raise serializers.ValidationError(error_list) return attrs diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 04bd5b0e53..29a51cf9ac 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -104,7 +104,7 @@ class Notifier(CommonModel): def send(self, subject, body): for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", - self.notification_class.init_parameters): + self.notification_class.init_parameters): self.notification_configuration[field] = decrypt_field(self, 'notification_configuration', subfield=field) diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 01e4220d6d..415c674bb1 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -10,7 +10,6 @@ import urlparse # Django from django.conf import settings from django.db import models -from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import smart_str, smart_text from django.core.exceptions import ValidationError diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 9a324048c3..7bb4cdd798 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -30,7 +30,6 @@ from djcelery.models import TaskMeta # AWX from awx.main.models.base import * # noqa from awx.main.models.schedules import Schedule -from awx.main.models.notifications import Notification from awx.main.utils import decrypt_field, emit_websocket_notification, _inventory_updates from awx.main.redact import UriCleaner diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py index 364e45fc28..9a9d0a9e2d 100644 --- a/awx/main/notifications/email_backend.py +++ b/awx/main/notifications/email_backend.py @@ -1,7 +1,7 @@ # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. -import logging +import pprint from django.utils.encoding import smart_text from django.core.mail.backends.smtp import EmailBackend diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index e01d390c09..61158bbe5d 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -88,7 +88,7 @@ class IrcBackend(TowerBaseEmailBackend): self.connection.add_global_handler("join", self.on_join) start_time = time.time() process_time = time.time() - while self.channels_sent < len(self.channels) and (process_time-start_time) < 60: + while self.channels_sent < len(self.channels) and (process_time - start_time) < 60: self.reactor.process_once(0.1) process_time = time.time() self.reactor.disconnect_all() From 2f763d0ff5c3d451b5a029a8da615ccecfb019fd Mon Sep 17 00:00:00 2001 From: Chris Church Date: Tue, 23 Feb 2016 18:26:38 -0500 Subject: [PATCH 23/38] Fix error with ad hoc command events when running in check mode. --- awx/main/migrations/0001_initial.py | 2 +- awx/main/models/ad_hoc_commands.py | 5 +++-- awx/main/tests/old/ad_hoc.py | 4 ++-- awx/plugins/callback/job_event_callback.py | 15 +++++++++++++++ 4 files changed, 21 insertions(+), 5 deletions(-) diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py index 79e8d8d6dd..6d2c78e454 100644 --- a/awx/main/migrations/0001_initial.py +++ b/awx/main/migrations/0001_initial.py @@ -43,7 +43,7 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('host_name', models.CharField(default=b'', max_length=1024, editable=False)), - ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable')])), + ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])), ('event_data', jsonfield.fields.JSONField(default={}, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 664269a188..b3f5bfb3e9 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -221,8 +221,9 @@ class AdHocCommandEvent(CreatedModifiedModel): ('runner_on_unreachable', _('Host Unreachable'), True), # Tower won't see no_hosts (check is done earlier without callback). #('runner_on_no_hosts', _('No Hosts Matched'), False), - # Tower should probably never see skipped (no conditionals). - #('runner_on_skipped', _('Host Skipped'), False), + # Tower will see skipped (when running in check mode for a module that + # does not support check mode). + ('runner_on_skipped', _('Host Skipped'), False), # Tower does not support async for ad hoc commands. #('runner_on_async_poll', _('Host Polling'), False), #('runner_on_async_ok', _('Host Async OK'), False), diff --git a/awx/main/tests/old/ad_hoc.py b/awx/main/tests/old/ad_hoc.py index 104c67d1fa..a912f7a89b 100644 --- a/awx/main/tests/old/ad_hoc.py +++ b/awx/main/tests/old/ad_hoc.py @@ -128,8 +128,8 @@ class RunAdHocCommandTest(BaseAdHocCommandTest): self.assertFalse(ad_hoc_command.passwords_needed_to_start) self.assertTrue(ad_hoc_command.signal_start()) ad_hoc_command = AdHocCommand.objects.get(pk=ad_hoc_command.pk) - self.check_job_result(ad_hoc_command, 'failed') - self.check_ad_hoc_command_events(ad_hoc_command, 'unreachable') + self.check_job_result(ad_hoc_command, 'successful') + self.check_ad_hoc_command_events(ad_hoc_command, 'skipped') @mock.patch('awx.main.tasks.BaseTask.run_pexpect', return_value=('canceled', 0)) def test_cancel_ad_hoc_command(self, ignore): diff --git a/awx/plugins/callback/job_event_callback.py b/awx/plugins/callback/job_event_callback.py index 99573983b2..f12f5e8489 100644 --- a/awx/plugins/callback/job_event_callback.py +++ b/awx/plugins/callback/job_event_callback.py @@ -91,6 +91,8 @@ CENSOR_FIELD_WHITELIST=[ ] def censor(obj): + if not isinstance(obj, dict): + return obj if obj.get('_ansible_no_log', False): new_obj = {} for k in CENSOR_FIELD_WHITELIST: @@ -529,6 +531,7 @@ class AdHocCommandCallbackModule(BaseCallbackModule): def __init__(self): self.ad_hoc_command_id = int(os.getenv('AD_HOC_COMMAND_ID', '0')) self.rest_api_path = '/api/v1/ad_hoc_commands/%d/events/' % self.ad_hoc_command_id + self.skipped_hosts = set() super(AdHocCommandCallbackModule, self).__init__() def _log_event(self, event, **event_data): @@ -539,6 +542,18 @@ class AdHocCommandCallbackModule(BaseCallbackModule): def runner_on_file_diff(self, host, diff): pass # Ignore file diff for ad hoc commands. + def runner_on_ok(self, host, res): + # When running in check mode using a module that does not support check + # mode, Ansible v1.9 will call runner_on_skipped followed by + # runner_on_ok for the same host; only capture the skipped event and + # ignore the ok event. + if host not in self.skipped_hosts: + super(AdHocCommandCallbackModule, self).runner_on_ok(host, res) + + def runner_on_skipped(self, host, item=None): + super(AdHocCommandCallbackModule, self).runner_on_skipped(host, item) + self.skipped_hosts.add(host) + if os.getenv('JOB_ID', ''): CallbackModule = JobCallbackModule From c4f15c1541919e4c58ebf75ba67e2ccc3de8570b Mon Sep 17 00:00:00 2001 From: Michael Abashian Date: Wed, 24 Feb 2016 09:43:46 -0500 Subject: [PATCH 24/38] Moved the LookUpName factory out into the standard out directory. Removed the StandardOut helper. --- awx/ui/client/src/helpers.js | 2 - awx/ui/client/src/helpers/StandardOut.js | 40 ------------------- awx/ui/client/src/standard-out/main.js | 3 +- .../lookup-name.factory.js | 32 +++++++++++++++ .../standard-out-factories/main.js | 11 +++++ 5 files changed, 45 insertions(+), 43 deletions(-) delete mode 100644 awx/ui/client/src/helpers/StandardOut.js create mode 100644 awx/ui/client/src/standard-out/standard-out-factories/lookup-name.factory.js create mode 100644 awx/ui/client/src/standard-out/standard-out-factories/main.js diff --git a/awx/ui/client/src/helpers.js b/awx/ui/client/src/helpers.js index 94acbd1382..fdd0c9eab3 100644 --- a/awx/ui/client/src/helpers.js +++ b/awx/ui/client/src/helpers.js @@ -21,7 +21,6 @@ import JobTemplates from "./helpers/JobTemplates"; import Jobs from "./helpers/Jobs"; import License from "./helpers/License"; import LoadConfig from "./helpers/LoadConfig"; -import StandardOut from "./helpers/StandardOut"; import Lookup from "./helpers/Lookup"; import PaginationHelpers from "./helpers/PaginationHelpers"; import Parse from "./helpers/Parse"; @@ -59,7 +58,6 @@ export Jobs, License, LoadConfig, - StandardOut, Lookup, PaginationHelpers, Parse, diff --git a/awx/ui/client/src/helpers/StandardOut.js b/awx/ui/client/src/helpers/StandardOut.js deleted file mode 100644 index a739bef764..0000000000 --- a/awx/ui/client/src/helpers/StandardOut.js +++ /dev/null @@ -1,40 +0,0 @@ -/************************************************* - * Copyright (c) 2016 Ansible, Inc. - * - * All Rights Reserved - *************************************************/ - - /** - * @ngdoc function - * @name helpers.function:StandardOut - * @description Helpers for the standard out views -*/ - -export default - angular.module('StandardOutHelper', []) - - .factory('LookUpName', ['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) { - return function(params) { - var url = params.url, - scope_var = params.scope_var, - scope = params.scope; - Rest.setUrl(url); - Rest.get() - .success(function(data) { - if (scope_var === 'inventory_source') { - scope[scope_var + '_name'] = data.summary_fields.group.name; - } - else if (!Empty(data.name)) { - scope[scope_var + '_name'] = data.name; - } - if (!Empty(data.group)) { - // Used for inventory_source - scope.group = data.group; - } - }) - .error(function(data, status) { - ProcessErrors(scope, data, status, null, { hdr: 'Error!', - msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); - }); - }; - }]) diff --git a/awx/ui/client/src/standard-out/main.js b/awx/ui/client/src/standard-out/main.js index e8a0946e82..b0aafe40ad 100644 --- a/awx/ui/client/src/standard-out/main.js +++ b/awx/ui/client/src/standard-out/main.js @@ -9,8 +9,9 @@ import stdoutManagementJobsRoute from './management-jobs/standard-out-management import stdoutInventorySyncRoute from './inventory-sync/standard-out-inventory-sync.route'; import stdoutScmUpdateRoute from './scm-update/standard-out-scm-update.route'; import {JobStdoutController} from './standard-out.controller'; +import StandardOutHelper from './standard-out-factories/main'; -export default angular.module('standardOut', []) +export default angular.module('standardOut', [StandardOutHelper.name]) .controller('JobStdoutController', JobStdoutController) .run(['$stateExtender', function($stateExtender) { $stateExtender.addState(stdoutAdhocRoute); diff --git a/awx/ui/client/src/standard-out/standard-out-factories/lookup-name.factory.js b/awx/ui/client/src/standard-out/standard-out-factories/lookup-name.factory.js new file mode 100644 index 0000000000..f097a945be --- /dev/null +++ b/awx/ui/client/src/standard-out/standard-out-factories/lookup-name.factory.js @@ -0,0 +1,32 @@ +/************************************************* + * Copyright (c) 2016 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + + export default + ['Rest', 'ProcessErrors', 'Empty', function(Rest, ProcessErrors, Empty) { + return function(params) { + var url = params.url, + scope_var = params.scope_var, + scope = params.scope; + Rest.setUrl(url); + Rest.get() + .success(function(data) { + if (scope_var === 'inventory_source') { + scope[scope_var + '_name'] = data.summary_fields.group.name; + } + else if (!Empty(data.name)) { + scope[scope_var + '_name'] = data.name; + } + if (!Empty(data.group)) { + // Used for inventory_source + scope.group = data.group; + } + }) + .error(function(data, status) { + ProcessErrors(scope, data, status, null, { hdr: 'Error!', + msg: 'Failed to retrieve ' + url + '. GET returned: ' + status }); + }); + }; + }]; diff --git a/awx/ui/client/src/standard-out/standard-out-factories/main.js b/awx/ui/client/src/standard-out/standard-out-factories/main.js new file mode 100644 index 0000000000..fdded8ab31 --- /dev/null +++ b/awx/ui/client/src/standard-out/standard-out-factories/main.js @@ -0,0 +1,11 @@ +/************************************************* + * Copyright (c) 2016 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + +import lookUpName from './lookup-name.factory'; + +export default + angular.module('StandardOutHelper', []) + .factory('LookUpName', lookUpName); From b35d7a3c6bd86ee09ff0ad563814f7375e73495d Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 24 Feb 2016 13:26:43 -0500 Subject: [PATCH 25/38] Add notification system documentation --- docs/notification_system.md | 187 ++++++++++++++++++++++++++++++++++++ 1 file changed, 187 insertions(+) create mode 100644 docs/notification_system.md diff --git a/docs/notification_system.md b/docs/notification_system.md new file mode 100644 index 0000000000..fc8f99b9ee --- /dev/null +++ b/docs/notification_system.md @@ -0,0 +1,187 @@ +Completion pending unit tests and acceptance info and instructions. The following documentation will likely be moved to the feature epic card and reproduced in our development documentation. + +# Notification System Overview + +A Notifier is an instance of a notification type (Email, Slack, Webhook, etc) with a name, description, and a defined configuration (A few examples: Username, password, server, recipients for the Email type. Token and list of channels for Slack. Url and Headers for webhooks) + +A Notification is a manifestation of the Notifier... for example, when a job fails a notification is sent using the configuration defined by the Notifier. + +This PR implements the Notification system as outlined in the 3.0 Notifications spec. At a high level the typical flow is: + +* User creates a Notifier at `/api/v1/notifiers` +* User assigns the notifier to any of the various objects that support it (all variants of job templates as well as organizations and projects) and at the appropriate trigger level for which they want the notification (error, success, or any). For example a user may wish to assign a particular Notifier to trigger when `Job Template 1` fails. In which case they will associate the notifier with the job template at `/api/v1/job_templates/n/notifiers_error`. + +## Notifier hierarchy + +Notifiers assigned at certain levels will inherit notifiers defined on parent objects as such: + +* Job Templates will use notifiers defined on it as well as inheriting notifiers from the Project used by the Job Template and from the Organization that it is listed under (via the Project). +* Project Updates will use notifiers defined on the project and will inherit notifiers from the Organization associated with it. +* Inventory Updates will use notifiers defined on the Organization that it is listed under +* Ad-hoc commands will use notifiers defined on the Organization that the inventory is associated with + +## Workflow + +When a job succeeds or fails, the error or success handler will pull a list of relevant notifiers using the procedure defined above. It will then create a Notification object for each one containing relevant details about the job and then **send**s it to the destination (email addresses, slack channel(s), sms numbers, etc). These Notification objects are available as related resources on job types (jobs, inventory updates, project updates), and also at `/api/v1/notifications`. You may also see what notifications have been sent from a notifier by examining its related resources. + +Notifications can succeed or fail but that will not cause its associated job to succeed or fail. The status of the notification can be viewed at its detail endpoint `/api/v1/notifications/` + +## Testing Notifiers before using them + +Once a Notifier is created its configuration can be tested by utilizing the endpoint at `/api/v1/notifiers//test` This will emit a test notification given the configuration defined by the Notifier. These test notifications will also appear in the notifications list at `/api/v1/notifications` + +# Notification Types + +The currently defined Notification Types are: + +* Email +* Slack +* Hipchat +* Pagerduty +* Twilio +* IRC +* Webhook + +Each of these have their own configuration and behavioral semantics and testing them may need to be approached in different ways. The following sections will give as much detail as possible. + +## Email + +The email notification type supports a wide variety of smtp servers and has support for ssl/tls connections. + +### Testing considerations + +The following should be performed for good acceptance: + +* Test plain authentication +* Test SSL and TLS authentication +* Verify single and multiple recipients +* Verify message subject and contents are formatted sanely. They should be plaintext but readable. + +### Test Service + +Either setup a local smtp mail service here are some options: + +* postfix service on galaxy: https://galaxy.ansible.com/debops/postfix/ +* Mailtrap has a good free plan and should provide all of the features we need under that plan: https://mailtrap.io/ + +## Slack + +Slack is pretty easy to configure, it just needs a token which you can get from creating a bot in the integrations settings for the slack team. + +### Testing considerations + +The following should be performed for good acceptance: + +* Test single and multiple channels and good formatting of the message. Note that slack notifications only contain the minimal information + +### Test Service + +Any user of the Ansible slack service can create a bot integration (which is how this notification is implemented). Remember to invite the bot to the channel first. + +## Hipchat + +There are several ways to integrate with hipchat. The Tower implementation uses Hipchat "Integrations". Currently you can find this at the bottom right of the main hipchat webview. From there you will select "Build your own Integration". After creating that it will list the `auth_token` that needs to be supplied to Tower. Some other relevant details on the fields accepted by Tower for the Hipchat notification type: + +* `color`: This will highlight the message as the given color. If set to something hipchat doesn't expect then the notification will generate an error, but it's pretty rad. I like green personally. +* `notify`: Selecting this will cause the bot to "notify" channel members. Normally it will just be stuck as a message in the chat channel without triggering anyone's notifications. This option will notify users of the channel respecting their existing notification settings (browser notification, email fallback, etc.) +* `message_from`: Along with the integration name itself this will put another label on the notification. I reckon this would be helpful if multiple services are using the same integration to distinguish them from each other. +* `api_url`: The url of the hipchat api service. If you create a team hosted by them it'll be something like `https://team.hipchat.com`. For a self-hosted service it'll be the http url that is accessible by Tower. + +### Testing considerations + +* Make sure all options behave as expected +* Test single and multiple channels +* Test that notification preferences are obeyed. +* Test formatting and appearance. Note that, like Slack, hipchat will use the minimal version of the notification. +* Test standalone hipchat service for parity with hosted solution + +### Test Service + +Hipchat allows you to create a team with limited users and message history for free, which is easy to set up and get started with. Hipchat contains a self-hosted server also which we should test for parity... it has a 30 day trial but there might be some other way to negotiate with them, redhat, or ansible itself: + +https://www.hipchat.com/server + +## Pagerduty + +Pager duty is a fairly straightforward integration. The user will create an API Key in the pagerduty system (this will be the token that is given to Tower) and then create a "Service" which will provide an "Integration Key" that will be given to Tower also. The other options of note are: + +* `subdomain`: When you sign up for the pagerduty account you will get a unique subdomain to communicate with. For instance, if you signed up as "towertest" the web dashboard will be at towertest.pagerduty.com and you will give the Tower API "towertest" as the subdomain (not the full domain). +* `client_name`: This will be sent along with the alert content to the pagerduty service to help identify the service that is using the api key/service. This is helpful if multiple integrations are using the same api key and service. + +### Testing considerations + +* Make sure the alert lands on the pagerduty service +* Verify that the minimal information is displayed for the notification but also that the detail of the notification contains all fields. Pagerduty itself should understand the format in which we send the detail information. + +### Test Service + +Pagerduty allows you to sign up for a free trial with the service. We may also have a ansible-wide pagerduty service that we could tie into for other things. + +## Twilio + +Twilio service is an Voice and SMS automation service. Once you are signed in you'll need to create a phone number from which the message will be sent. You'll then define a "Messaging Service" under Programmable SMS and associate the number you created before with it. Note that you may need to verify this number or some other information before you are allowed to use it to send to any numbers. The Messaging Service does not need a status callback url nor does it need the ability to Process inbound messages. + +Under your individual (or sub) account settings you will have API credentials. The Account SID and AuthToken are what will be given to Tower. There are a couple of other important fields: + +* `from_number`: This is the number associated with the messaging service above and must be given in the form of "+15556667777" +* `to_numbers`: This will be the list of numbers to receive the SMS and should be the 10-digit phone number. + +### Testing considerations + +* Test notifications with single and multiple recipients +* Verify that the minimal information is displayed for the notification. Note that this notification type does not display the full detailed notification. + +### Test Service + +Twilio is fairly straightforward to sign up for but I don't believe it has a free plan, a credit card will be needed to sign up for it though the charges are fairly minimal per message. + +## IRC + +The Tower irc notification takes the form of an IRC bot that will connect, deliver its messages to channel(s) or individual user(s), and then disconnect. The Tower notification bot also supports SSL authentication. The Tower bot does not currently support Nickserv identification. If a channel or user does not exist or is not on-line then the Notification will not fail, the failure scenario is reserved specifically for connectivity. + +Connectivity information is straightforward: + +* `server`: The host name or address of the irc server +* `port`: The irc server port +* `nickname`: The bot's nickname once it connects to the server +* `password`: IRC servers can require a password to connect. If the server doesn't require one then this should be an empty string +* `use_ssl`: Should the bot use SSL when connecting +* `targets`: A list of users and/or channels to send the notification to. + +### Test Considerations + +* Test both plain and SSL connectivity +* Test single and multiples of both users and channels. + +### Test Service + +There are a few modern irc servers to choose from but we should use a fairly full featured service to get good test coverage. I recommend inspircd because it is actively maintained and pretty straightforward to configure. + +## Webhook + +The webhook notification type in Ansible Tower provides a simple interface to sending POSTs to a predefined web service. Tower will POST to this address using `application/json` content type with the data payload containing all relevant details in json format. +The parameters are pretty straightforward: + +* `url`: The full url that will be POSTed to +* `headers`: Headers in json form where the keys and values are strings. For example: `{"Authentication": "988881adc9fc3655077dc2d4d757d480b5ea0e11", "MessageType": "Test"}` + +### Test Considerations + +* Test HTTP service and HTTPS, also specifically test HTTPS with a self signed cert. +* Verify that the headers and payload are present and that the payload is json and the content type is specifically `application/json` + +### Test Service + +A very basic test can be performed by using `netcat`: + +``` +netcat -l 8099 +``` + +and then sending the request to: http://\:8099 + +Note that this won't respond correctly to the notification so it will yield an error. I recommend using a very basic Flask application for verifying the POST request, you can see an example of mine here: + +https://gist.github.com/matburt/73bfbf85c2443f39d272 + +This demonstrates how to define an endpoint and parse headers and json content, it doesn't show configuring Flask for HTTPS but this is also pretty straightforward: http://flask.pocoo.org/snippets/111/ From 7ffe46fc746d0d46ef587b944223cc8bbeee93c2 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 15 Feb 2016 16:59:21 -0500 Subject: [PATCH 26/38] add postgres Fact model, update views, tests * awx.main.models Fact added * view host fact and timeline updated to use new Postgres Fact model instead of Mongo * Removed license set start Mongo logic * added View tests * added Model tests * Removed mongo fact unit tests * point at modified jsonbfield that supports sqlite storage driver * postgresify fact cache receiver * test OPTIONS endpoint * Note: single fact view not implemented yet. --- awx/api/generics.py | 64 +- awx/api/serializers.py | 38 +- awx/api/urls.py | 4 +- awx/api/views.py | 164 +- .../commands/run_fact_cache_receiver.py | 43 +- .../migrations/0003_auto_20160209_1615.py | 2 - awx/main/models/fact.py | 46 +- awx/main/tests/functional/ansible.json | 283 ++ .../{ => api}/test_activity_streams.py | 0 .../functional/api/test_fact_versions.py | 239 ++ .../tests/functional/api/test_fact_view.py | 158 + .../tests/functional/api/test_host_detail.py | 20 + .../tests/functional/commands/__init__.py | 0 .../tests/functional/commands/conftest.py | 109 + .../commands/test_run_fact_cache_receiver.py | 95 + awx/main/tests/functional/conftest.py | 126 +- .../models/fact/test_get_host_fact.py | 111 + .../models/fact/test_get_timeline.py | 129 + awx/main/tests/functional/packages.json | 2922 +++++++++++++++++ awx/main/tests/functional/services.json | 697 ++++ .../old/commands/run_fact_cache_receiver.py | 221 -- awx/main/tests/old/fact/fact_api.py | 242 -- awx/settings/defaults.py | 5 +- awx/settings/development.py | 10 +- pytest.ini | 2 + requirements/requirements.txt | 2 +- requirements/requirements_dev.txt | 1 + requirements/requirements_jenkins.txt | 1 + tools/docker-compose/start_development.sh | 1 + 29 files changed, 5025 insertions(+), 710 deletions(-) create mode 100644 awx/main/tests/functional/ansible.json rename awx/main/tests/functional/{ => api}/test_activity_streams.py (100%) create mode 100644 awx/main/tests/functional/api/test_fact_versions.py create mode 100644 awx/main/tests/functional/api/test_fact_view.py create mode 100644 awx/main/tests/functional/api/test_host_detail.py create mode 100644 awx/main/tests/functional/commands/__init__.py create mode 100644 awx/main/tests/functional/commands/conftest.py create mode 100644 awx/main/tests/functional/commands/test_run_fact_cache_receiver.py create mode 100644 awx/main/tests/functional/models/fact/test_get_host_fact.py create mode 100644 awx/main/tests/functional/models/fact/test_get_timeline.py create mode 100644 awx/main/tests/functional/packages.json create mode 100644 awx/main/tests/functional/services.json delete mode 100644 awx/main/tests/old/commands/run_fact_cache_receiver.py delete mode 100644 awx/main/tests/old/fact/fact_api.py diff --git a/awx/api/generics.py b/awx/api/generics.py index 6618263742..9f31d2bf24 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -32,7 +32,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView', 'SubListCreateAttachDetachAPIView', 'RetrieveAPIView', 'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView', 'RetrieveUpdateDestroyAPIView', 'DestroyAPIView', - 'MongoAPIView', 'MongoListAPIView'] + 'SubDetailAPIView', + 'ParentMixin',] logger = logging.getLogger('awx.api.generics') @@ -200,28 +201,6 @@ class GenericAPIView(generics.GenericAPIView, APIView): d['settings'] = settings return d - -class MongoAPIView(GenericAPIView): - - def get_parent_object(self): - parent_filter = { - self.lookup_field: self.kwargs.get(self.lookup_field, None), - } - return get_object_or_404(self.parent_model, **parent_filter) - - def check_parent_access(self, parent=None): - parent = parent or self.get_parent_object() - parent_access = getattr(self, 'parent_access', 'read') - if parent_access in ('read', 'delete'): - args = (self.parent_model, parent_access, parent) - else: - args = (self.parent_model, parent_access, parent, None) - if not self.request.user.can_access(*args): - raise PermissionDenied() - -class MongoListAPIView(generics.ListAPIView, MongoAPIView): - pass - class SimpleListAPIView(generics.ListAPIView, GenericAPIView): def get_queryset(self): @@ -258,7 +237,25 @@ class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView): # Base class for a list view that allows creating new objects. pass -class SubListAPIView(ListAPIView): +class ParentMixin(object): + + def get_parent_object(self): + parent_filter = { + self.lookup_field: self.kwargs.get(self.lookup_field, None), + } + return get_object_or_404(self.parent_model, **parent_filter) + + def check_parent_access(self, parent=None): + parent = parent or self.get_parent_object() + parent_access = getattr(self, 'parent_access', 'read') + if parent_access in ('read', 'delete'): + args = (self.parent_model, parent_access, parent) + else: + args = (self.parent_model, parent_access, parent, None) + if not self.request.user.can_access(*args): + raise PermissionDenied() + +class SubListAPIView(ListAPIView, ParentMixin): # Base class for a read-only sublist view. # Subclasses should define at least: @@ -278,22 +275,6 @@ class SubListAPIView(ListAPIView): }) return d - def get_parent_object(self): - parent_filter = { - self.lookup_field: self.kwargs.get(self.lookup_field, None), - } - return get_object_or_404(self.parent_model, **parent_filter) - - def check_parent_access(self, parent=None): - parent = parent or self.get_parent_object() - parent_access = getattr(self, 'parent_access', 'read') - if parent_access in ('read', 'delete'): - args = (self.parent_model, parent_access, parent) - else: - args = (self.parent_model, parent_access, parent, None) - if not self.request.user.can_access(*args): - raise PermissionDenied() - def get_queryset(self): parent = self.get_parent_object() self.check_parent_access(parent) @@ -430,6 +411,9 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView): else: return self.attach(request, *args, **kwargs) +class SubDetailAPIView(generics.RetrieveAPIView, GenericAPIView, ParentMixin): + pass + class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView): pass diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 572e690ce6..4d380565ab 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -10,8 +10,6 @@ from collections import OrderedDict from dateutil import rrule from ast import literal_eval -from rest_framework_mongoengine.serializers import DocumentSerializer - # PyYAML import yaml @@ -46,8 +44,6 @@ from awx.main.conf import tower_settings from awx.api.license import feature_enabled from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, EncryptedPasswordField, VerbatimField -from awx.fact.models import * # noqa - logger = logging.getLogger('awx.api.serializers') # Fields that should be summarized regardless of object type. @@ -482,18 +478,19 @@ class BaseSerializer(serializers.ModelSerializer): return ret -class BaseFactSerializer(DocumentSerializer): +class BaseFactSerializer(BaseSerializer): __metaclass__ = BaseSerializerMetaclass def get_fields(self): ret = super(BaseFactSerializer, self).get_fields() if 'module' in ret and feature_enabled('system_tracking'): - choices = [(o, o.title()) for o in FactVersion.objects.all().only('module').distinct('module')] - ret['module'] = serializers.ChoiceField(source='module', choices=choices, read_only=True, required=False) + # TODO: the values_list may pull in a LOT of entries before the distinct is called + modules = Fact.objects.all().values_list('module', flat=True).distinct() + choices = [(o, o.title()) for o in modules] + ret['module'] = serializers.ChoiceField(choices=choices, read_only=True, required=False) return ret - class UnifiedJobTemplateSerializer(BaseSerializer): class Meta: @@ -2290,28 +2287,31 @@ class AuthTokenSerializer(serializers.Serializer): class FactVersionSerializer(BaseFactSerializer): - related = serializers.SerializerMethodField('get_related') class Meta: - model = FactVersion - fields = ('related', 'module', 'timestamp',) + model = Fact + fields = ('related', 'module', 'timestamp') + read_only_fields = ('*',) def get_related(self, obj): - host_obj = self.context.get('host_obj') - res = {} + res = super(FactVersionSerializer, self).get_related(obj) params = { 'datetime': timestamp_apiformat(obj.timestamp), 'module': obj.module, } - res.update(dict( - fact_view = build_url('api:host_fact_compare_view', args=(host_obj.pk,), get=params), - )) + res['fact_view'] = build_url('api:host_fact_compare_view', args=(obj.host.pk,), get=params) return res - class FactSerializer(BaseFactSerializer): class Meta: model = Fact - depth = 2 - fields = ('timestamp', 'host', 'module', 'fact') + # TODO: Consider adding in host to the fields list ? + fields = ('related', 'timestamp', 'module', 'facts', 'id', 'summary_fields', 'host') + read_only_fields = ('*',) + + def get_related(self, obj): + res = super(FactSerializer, self).get_related(obj) + res['host'] = obj.host.get_absolute_url() + return res + diff --git a/awx/api/urls.py b/awx/api/urls.py index 2b3a93d852..df0e5feeaa 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -91,8 +91,8 @@ host_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'), url(r'^(?P[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'), #url(r'^(?P[0-9]+)/single_fact/$', 'host_single_fact_view'), - url(r'^(?P[0-9]+)/fact_versions/$', 'host_fact_versions_list'), - url(r'^(?P[0-9]+)/fact_view/$', 'host_fact_compare_view'), + url(r'^(?P[0-9]+)/fact_versions/$', 'host_fact_versions_list'), + url(r'^(?P[0-9]+)/fact_view/$', 'host_fact_compare_view'), ) group_urls = patterns('awx.api.views', diff --git a/awx/api/views.py b/awx/api/views.py index 9a41e779ea..06800cb92b 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -42,9 +42,6 @@ from rest_framework import status from rest_framework_yaml.parsers import YAMLParser from rest_framework_yaml.renderers import YAMLRenderer -# MongoEngine -import mongoengine - # QSStats import qsstats @@ -61,7 +58,6 @@ from awx.main.access import get_user_queryset from awx.main.ha import is_ha_environment from awx.api.authentication import TaskAuthentication, TokenGetAuthentication from awx.api.utils.decorators import paginated -from awx.api.filters import MongoFilterBackend from awx.api.generics import get_view_name from awx.api.generics import * # noqa from awx.api.license import feature_enabled, feature_exists, LicenseForbids @@ -70,7 +66,6 @@ from awx.main.utils import * # noqa from awx.api.permissions import * # noqa from awx.api.renderers import * # noqa from awx.api.serializers import * # noqa -from awx.fact.models import * # noqa from awx.main.utils import emit_websocket_notification from awx.main.conf import tower_settings @@ -250,32 +245,11 @@ class ApiV1ConfigView(APIView): # FIX: Log return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST) - # Sanity check: If this license includes system tracking, make - # sure that we have a valid MongoDB to point to, and complain if - # we do not. - if ('features' in license_data and 'system_tracking' in license_data['features'] and - license_data['features']['system_tracking'] and settings.MONGO_HOST == NotImplemented): - return Response({ - 'error': 'This license supports system tracking, which ' - 'requires MongoDB to be installed. Since you are ' - 'running in an HA environment, you will need to ' - 'provide a MongoDB instance. Please re-run the ' - 'installer prior to installing this license.' - }, status=status.HTTP_400_BAD_REQUEST) - # If the license is valid, write it to disk. if license_data['valid_key']: tower_settings.LICENSE = data_actual - - # Spawn a task to ensure that MongoDB is started (or stopped) - # as appropriate, based on whether the license uses it. - if license_data['features']['system_tracking']: - mongodb_control.delay('start') - else: - mongodb_control.delay('stop') - - # Done; return the response. return Response(license_data) + return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): @@ -1125,33 +1099,6 @@ class InventoryScanJobTemplateList(SubListAPIView): qs = self.request.user.get_queryset(self.model) return qs.filter(job_type=PERM_INVENTORY_SCAN, inventory=parent) -class InventorySingleFactView(MongoAPIView): - - model = Fact - parent_model = Inventory - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - inventory_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([h.name for h in inventory_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - - class HostList(ListCreateAPIView): model = Host @@ -1225,88 +1172,43 @@ class HostActivityStreamList(SubListAPIView): qs = self.request.user.get_queryset(self.model) return qs.filter(Q(host=parent) | Q(inventory=parent.inventory)) -class HostFactVersionsList(MongoListAPIView): +class HostFactVersionsList(ListAPIView, ParentMixin): + model = Fact serializer_class = FactVersionSerializer parent_model = Host new_in_220 = True - filter_backends = (MongoFilterBackend,) def get_queryset(self): - from_spec = self.request.query_params.get('from', None) - to_spec = self.request.query_params.get('to', None) - module_spec = self.request.query_params.get('module', None) - if not feature_enabled("system_tracking"): raise LicenseForbids("Your license does not permit use " "of system tracking.") - host = self.get_parent_object() - self.check_parent_access(host) + from_spec = self.request.query_params.get('from', None) + to_spec = self.request.query_params.get('to', None) + module_spec = self.request.query_params.get('module', None) - try: - fact_host = FactHost.objects.get(hostname=host.name, inventory_id=host.inventory.pk) - except FactHost.DoesNotExist: - return None - except mongoengine.ConnectionError: - return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST) + if from_spec: + from_spec = dateutil.parser.parse(from_spec) + if to_spec: + to_spec = dateutil.parser.parse(to_spec) - kv = { - 'host': fact_host.id, - } - if module_spec is not None: - kv['module'] = module_spec - if from_spec is not None: - from_actual = dateutil.parser.parse(from_spec) - kv['timestamp__gt'] = from_actual - if to_spec is not None: - to_actual = dateutil.parser.parse(to_spec) - kv['timestamp__lte'] = to_actual - - return FactVersion.objects.filter(**kv).order_by("-timestamp") + host_obj = self.get_parent_object() + + return Fact.get_timeline(host_obj.id, module=module_spec, ts_from=from_spec, ts_to=to_spec) def list(self, *args, **kwargs): queryset = self.get_queryset() or [] - try: - serializer = FactVersionSerializer(queryset, many=True, context=dict(host_obj=self.get_parent_object())) - except mongoengine.ConnectionError: - return Response(dict(error="System Tracking Database is disabled"), status=status.HTTP_400_BAD_REQUEST) - return Response(dict(results=serializer.data)) + return Response(dict(results=self.serializer_class(queryset, many=True).data)) -class HostSingleFactView(MongoAPIView): +class HostFactCompareView(SubDetailAPIView): model = Fact - parent_model = Host - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - host_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([host_obj.name], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - -class HostFactCompareView(MongoAPIView): - new_in_220 = True parent_model = Host serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - def get(self, request, *args, **kwargs): + def retrieve(self, request, *args, **kwargs): # Sanity check: Does the license allow system tracking? if not feature_enabled('system_tracking'): raise LicenseForbids('Your license does not permit use ' @@ -1317,10 +1219,11 @@ class HostFactCompareView(MongoAPIView): datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() host_obj = self.get_parent_object() - fact_entry = Fact.get_host_version(host_obj.name, host_obj.inventory.pk, datetime_actual, module_spec) - host_data = FactSerializer(fact_entry).data if fact_entry is not None else {} - return Response(host_data) + fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual) + if not fact_entry: + return Response({'detail': 'Fact not found'}, status=status.HTTP_404_NOT_FOUND) + return Response(self.serializer_class(instance=fact_entry).data) class GroupList(ListCreateAPIView): @@ -1470,33 +1373,6 @@ class GroupDetail(RetrieveUpdateDestroyAPIView): obj.mark_inactive_recursive() return Response(status=status.HTTP_204_NO_CONTENT) - -class GroupSingleFactView(MongoAPIView): - - model = Fact - parent_model = Group - new_in_220 = True - serializer_class = FactSerializer - filter_backends = (MongoFilterBackend,) - - def get(self, request, *args, **kwargs): - # Sanity check: Does the license allow system tracking? - if not feature_enabled('system_tracking'): - raise LicenseForbids('Your license does not permit use ' - 'of system tracking.') - - fact_key = request.query_params.get("fact_key", None) - fact_value = request.query_params.get("fact_value", None) - datetime_spec = request.query_params.get("timestamp", None) - module_spec = request.query_params.get("module", None) - - if fact_key is None or fact_value is None or module_spec is None: - return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST) - datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now() - group_obj = self.get_parent_object() - fact_data = Fact.get_single_facts([h.name for h in group_obj.hosts.all()], fact_key, fact_value, datetime_actual, module_spec) - return Response(dict(results=FactSerializer(fact_data).data if fact_data is not None else [])) - class InventoryGroupsList(SubListCreateAttachDetachAPIView): model = Group diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index aa3abe1bfd..42fc25a561 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -9,9 +9,11 @@ from datetime import datetime # Django from django.core.management.base import NoArgsCommand from django.conf import settings +#from django.core.exceptions import Does # AWX -from awx.fact.models.fact import * # noqa +from awx.main.models.fact import Fact +from awx.main.models.inventory import Host from awx.main.socket import Socket logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver') @@ -47,35 +49,34 @@ class FactCacheReceiver(object): # ansible v2 will not emit this message. Thus, this can be removed at that time. if 'module_setup' in facts_data and len(facts_data) == 1: logger.info('Received module_setup message') - return + return None try: - host = FactHost.objects.get(hostname=hostname, inventory_id=inventory_id) - except FactHost.DoesNotExist: - logger.info('Creating new host <%s, %s>' % (hostname, inventory_id)) - host = FactHost(hostname=hostname, inventory_id=inventory_id) - host.save() - logger.info('Created new host <%s>' % (host.id)) - except FactHost.MultipleObjectsReturned: - query = "db['fact_host'].find(hostname=%s, inventory_id=%s)" % (hostname, inventory_id) - logger.warn('Database inconsistent. Multiple FactHost "%s" exist. Try the query %s to find the records.' % (hostname, query)) + host_obj = Host.objects.get(name=hostname, inventory__id=inventory_id) + except Fact.DoesNotExist: + logger.warn('Failed to intake fact. Host does not exist <%s, %s>' % (hostname, inventory_id)) return + except Fact.MultipleObjectsReturned: + logger.warn('Database inconsistent. Multiple Hosts found for <%s, %s>.' % (hostname, inventory_id)) + return None except Exception, e: logger.error("Exception communicating with Fact Cache Database: %s" % str(e)) - return + return None - (module, facts) = self.process_facts(facts_data) + (module_name, facts) = self.process_facts(facts_data) self.timestamp = datetime.fromtimestamp(date_key, None) - try: - # Update existing Fact entry - version_obj = FactVersion.objects.get(timestamp=self.timestamp, host=host, module=module) - Fact.objects(id=version_obj.fact.id).update_one(fact=facts) - logger.info('Updated existing fact <%s>' % (version_obj.fact.id)) - except FactVersion.DoesNotExist: + # Update existing Fact entry + fact_obj = Fact.get_host_fact(host_obj.id, module_name, self.timestamp) + if fact_obj: + fact_obj.facts = facts + fact_obj.save() + logger.info('Updated existing fact <%s>' % (fact_obj.id)) + else: # Create new Fact entry - (fact_obj, version_obj) = Fact.add_fact(self.timestamp, facts, host, module) - logger.info('Created new fact <%s, %s>' % (fact_obj.id, version_obj.id)) + fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts) + logger.info('Created new fact <%s, %s>' % (fact_obj.id, module_name)) + return fact_obj def run_receiver(self, use_processing_threads=True): with Socket('fact_cache', 'r') as facts: diff --git a/awx/main/migrations/0003_auto_20160209_1615.py b/awx/main/migrations/0003_auto_20160209_1615.py index c489c1d830..712234f638 100644 --- a/awx/main/migrations/0003_auto_20160209_1615.py +++ b/awx/main/migrations/0003_auto_20160209_1615.py @@ -17,8 +17,6 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('timestamp', models.DateTimeField(default=None, editable=False)), - ('created', models.DateTimeField(auto_now_add=True)), - ('modified', models.DateTimeField(auto_now=True)), ('module', models.CharField(max_length=128)), ('facts', jsonbfield.fields.JSONField(default={}, blank=True)), ('host', models.ForeignKey(related_name='facts', to='main.Host')), diff --git a/awx/main/models/fact.py b/awx/main/models/fact.py index 35288c489c..16a67eb45e 100644 --- a/awx/main/models/fact.py +++ b/awx/main/models/fact.py @@ -2,9 +2,9 @@ # All Rights Reserved. from django.db import models -from jsonbfield.fields import JSONField +from django.utils.translation import ugettext_lazy as _ -from awx.main.models import Host +from jsonbfield.fields import JSONField __all__ = ('Fact', ) @@ -13,16 +13,19 @@ class Fact(models.Model): Facts are stored as JSON dictionaries. """ host = models.ForeignKey( - Host, + 'Host', related_name='facts', db_index=True, on_delete=models.CASCADE, + help_text=_('Host for the facts that the fact scan captured.'), + ) + timestamp = models.DateTimeField( + default=None, + editable=False, + help_text=_('Date and time of the corresponding fact scan gathering time.') ) - timestamp = models.DateTimeField(default=None, editable=False) - created = models.DateTimeField(editable=False, auto_now_add=True) - modified = models.DateTimeField(editable=False, auto_now=True) module = models.CharField(max_length=128) - facts = JSONField(blank=True, default={}) + facts = JSONField(blank=True, default={}, help_text=_('Arbitrary JSON structure of module facts captured at timestamp for a single host.')) class Meta: app_label = 'main' @@ -30,3 +33,32 @@ class Fact(models.Model): ["timestamp", "module", "host"], ] + @staticmethod + def get_host_fact(host_id, module, timestamp): + qs = Fact.objects.filter(host__id=host_id, module=module, timestamp__lte=timestamp).order_by('-timestamp') + if qs: + return qs[0] + else: + return None + + @staticmethod + def get_timeline(host_id, module=None, ts_from=None, ts_to=None): + kwargs = { + 'host__id': host_id, + } + if module: + kwargs['module'] = module + if ts_from and ts_to and ts_from == ts_to: + kwargs['timestamp'] = ts_from + else: + if ts_from: + kwargs['timestamp__gt'] = ts_from + if ts_to: + kwargs['timestamp__lte'] = ts_to + return Fact.objects.filter(**kwargs).order_by('-timestamp').only('timestamp', 'module').order_by('-timestamp', 'module') + + @staticmethod + def add_fact(host_id, module, timestamp, facts): + fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts) + fact_obj.save() + return fact_obj diff --git a/awx/main/tests/functional/ansible.json b/awx/main/tests/functional/ansible.json new file mode 100644 index 0000000000..e877df2ad1 --- /dev/null +++ b/awx/main/tests/functional/ansible.json @@ -0,0 +1,283 @@ +{ + "ansible_all_ipv4_addresses": [ + "172.17.0.7" + ], + "ansible_all_ipv6_addresses": [ + "fe80::42:acff:fe11:7" + ], + "ansible_architecture": "x86_64", + "ansible_bios_date": "12/01/2006", + "ansible_bios_version": "VirtualBox", + "ansible_cmdline": { + "BOOT_IMAGE": "/boot/vmlinuz64", + "base": true, + "console": "tty0", + "initrd": "/boot/initrd.img", + "loglevel": "3", + "noembed": true, + "nomodeset": true, + "norestore": true, + "user": "docker", + "waitusb": "10:LABEL=boot2docker-data" + }, + "ansible_date_time": { + "date": "2016-02-02", + "day": "02", + "epoch": "1454424257", + "hour": "14", + "iso8601": "2016-02-02T14:44:17Z", + "iso8601_basic": "20160202T144417348424", + "iso8601_basic_short": "20160202T144417", + "iso8601_micro": "2016-02-02T14:44:17.348496Z", + "minute": "44", + "month": "02", + "second": "17", + "time": "14:44:17", + "tz": "UTC", + "tz_offset": "+0000", + "weekday": "Tuesday", + "weekday_number": "2", + "weeknumber": "05", + "year": "2016" + }, + "ansible_default_ipv4": { + "address": "172.17.0.7", + "alias": "eth0", + "broadcast": "global", + "gateway": "172.17.0.1", + "interface": "eth0", + "macaddress": "02:42:ac:11:00:07", + "mtu": 1500, + "netmask": "255.255.0.0", + "network": "172.17.0.0", + "type": "ether" + }, + "ansible_default_ipv6": {}, + "ansible_devices": { + "sda": { + "holders": [], + "host": "", + "model": "VBOX HARDDISK", + "partitions": { + "sda1": { + "sectors": "510015555", + "sectorsize": 512, + "size": "243.19 GB", + "start": "1975995" + }, + "sda2": { + "sectors": "1975932", + "sectorsize": 512, + "size": "964.81 MB", + "start": "63" + } + }, + "removable": "0", + "rotational": "0", + "scheduler_mode": "deadline", + "sectors": "512000000", + "sectorsize": "512", + "size": "244.14 GB", + "support_discard": "0", + "vendor": "ATA" + }, + "sr0": { + "holders": [], + "host": "", + "model": "CD-ROM", + "partitions": {}, + "removable": "1", + "rotational": "1", + "scheduler_mode": "deadline", + "sectors": "61440", + "sectorsize": "2048", + "size": "120.00 MB", + "support_discard": "0", + "vendor": "VBOX" + } + }, + "ansible_distribution": "Ubuntu", + "ansible_distribution_major_version": "14", + "ansible_distribution_release": "trusty", + "ansible_distribution_version": "14.04", + "ansible_dns": { + "nameservers": [ + "8.8.8.8" + ] + }, + "ansible_domain": "", + "ansible_env": { + "HOME": "/root", + "HOSTNAME": "ede894599989", + "LANG": "en_US.UTF-8", + "LC_ALL": "en_US.UTF-8", + "LC_MESSAGES": "en_US.UTF-8", + "LESSCLOSE": "/usr/bin/lesspipe %s %s", + "LESSOPEN": "| /usr/bin/lesspipe %s", + "LS_COLORS": "", + "OLDPWD": "/ansible", + "PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "PWD": "/ansible/examples", + "SHLVL": "1", + "_": "/usr/local/bin/ansible", + "container": "docker" + }, + "ansible_eth0": { + "active": true, + "device": "eth0", + "ipv4": { + "address": "172.17.0.7", + "broadcast": "global", + "netmask": "255.255.0.0", + "network": "172.17.0.0" + }, + "ipv6": [ + { + "address": "fe80::42:acff:fe11:7", + "prefix": "64", + "scope": "link" + } + ], + "macaddress": "02:42:ac:11:00:07", + "mtu": 1500, + "promisc": false, + "type": "ether" + }, + "ansible_fips": false, + "ansible_form_factor": "Other", + "ansible_fqdn": "ede894599989", + "ansible_hostname": "ede894599989", + "ansible_interfaces": [ + "lo", + "eth0" + ], + "ansible_kernel": "4.1.12-boot2docker", + "ansible_lo": { + "active": true, + "device": "lo", + "ipv4": { + "address": "127.0.0.1", + "broadcast": "host", + "netmask": "255.0.0.0", + "network": "127.0.0.0" + }, + "ipv6": [ + { + "address": "::1", + "prefix": "128", + "scope": "host" + } + ], + "mtu": 65536, + "promisc": false, + "type": "loopback" + }, + "ansible_lsb": { + "codename": "trusty", + "description": "Ubuntu 14.04.3 LTS", + "id": "Ubuntu", + "major_release": "14", + "release": "14.04" + }, + "ansible_machine": "x86_64", + "ansible_memfree_mb": 3746, + "ansible_memory_mb": { + "nocache": { + "free": 8896, + "used": 3638 + }, + "real": { + "free": 3746, + "total": 12534, + "used": 8788 + }, + "swap": { + "cached": 0, + "free": 4048, + "total": 4048, + "used": 0 + } + }, + "ansible_memtotal_mb": 12534, + "ansible_mounts": [ + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/resolv.conf", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + }, + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/hostname", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + }, + { + "device": "/dev/sda1", + "fstype": "ext4", + "mount": "/etc/hosts", + "options": "rw,relatime,data=ordered", + "size_available": 201281392640, + "size_total": 256895700992, + "uuid": "NA" + } + ], + "ansible_nodename": "ede894599989", + "ansible_os_family": "Debian", + "ansible_pkg_mgr": "apt", + "ansible_processor": [ + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz", + "GenuineIntel", + "Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz" + ], + "ansible_processor_cores": 8, + "ansible_processor_count": 1, + "ansible_processor_threads_per_core": 1, + "ansible_processor_vcpus": 8, + "ansible_product_name": "VirtualBox", + "ansible_product_serial": "0", + "ansible_product_uuid": "25C5EA5A-1DF1-48D9-A2C6-81227DA153C0", + "ansible_product_version": "1.2", + "ansible_python_version": "2.7.6", + "ansible_selinux": false, + "ansible_service_mgr": "upstart", + "ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBALF0xsM8UMXgSKiWNw4t19wxbxLnxQX742t/dIM0O8YLx+/lIP+Q69Dv5uoVt0zKV39eFziRlCh96qj2KYkGEJ6XfVZFnhpculL2Pv2CPpSwKuQ1vTbDO/xxUrvY+bHpfNJf9Rh69bFEE2pTsjomFPCgp8M0qGaFtwg6czSaeBONAAAAFQCGEfVtj97JiexTVRqgQITYlFp/eQAAAIEAg+S9qWn+AIb3amwVoLL/usQYOPCmZY9RVPzpkjJ6OG+HI4B7cXeauPtNTJwT0f9vGEqzf4mPpmS+aCShj6iwdmJ+cOwR5+SJlNalab3CMBoXKVLbT1J2XWFlK0szKKnoReP96IDbkAkGQ3fkm4jz0z6Wy0u6wOQVNcd4G5cwLZ4AAACAFvBm+H1LwNrwWBjWio+ayhglZ4Y25mLMEn2+dqBz0gLK5szEbft1HMPOWIVHvl6vi3v34pAJHKpxXpkLlNliTn8iw9BzCOrgP4V8sp2/85mxEuCdI1w/QERj9cHu5iS2pZ0cUwDE3pfuuGBB3IEliaJyaapowdrM8lN12jQl11E=", + "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHiYp4e9RfXpxDcEWpK4EuXPHW9++xcFI9hiB0TYAZgxEF9RIgwfucpPawFk7HIFoNc7EXQMlryilLSbg155KWM=", + "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAILclD2JaC654azEsAfcHRIOA2Ig9/Qk6MX80i/VCEdSH", + "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDeSUGxZaZsgBsezld0mj3HcbAwx6aykGnejceBjcs6lVwSGMHevofzSXIQDPYBhZoyWNl0PYAHv6AsQ8+3khd2SitUMJAuHSz1ZjgHCCGQP9ijXTKHn+lWCKA8rhLG/dwYwiouoOPZfn1G+erbKO6XiVbELrrf2RadnMGuMinESIOKVj3IunXsaGRMsDOQferOnUf7MvH7xpQnoySyQ1+p4rGruaohWG+Y2cDo7+B2FylPVbrpRDDJkfbt4J96WHx0KOdD0qzOicQP8JqDflqQPJJCWcgrvjQOSe4gXdPB6GZDtBl2qgQRwt1IgizPMm+b7Bwbd2VDe1TeWV2gT/7H", + "ansible_swapfree_mb": 4048, + "ansible_swaptotal_mb": 4048, + "ansible_system": "Linux", + "ansible_system_vendor": "innotek GmbH", + "ansible_uptime_seconds": 178398, + "ansible_user_dir": "/root", + "ansible_user_gecos": "root", + "ansible_user_gid": 0, + "ansible_user_id": "root", + "ansible_user_shell": "/bin/bash", + "ansible_user_uid": 0, + "ansible_userspace_architecture": "x86_64", + "ansible_userspace_bits": "64", + "ansible_virtualization_role": "guest", + "ansible_virtualization_type": "docker", + "module_setup": true +} diff --git a/awx/main/tests/functional/test_activity_streams.py b/awx/main/tests/functional/api/test_activity_streams.py similarity index 100% rename from awx/main/tests/functional/test_activity_streams.py rename to awx/main/tests/functional/api/test_activity_streams.py diff --git a/awx/main/tests/functional/api/test_fact_versions.py b/awx/main/tests/functional/api/test_fact_versions.py new file mode 100644 index 0000000000..b9ff345172 --- /dev/null +++ b/awx/main/tests/functional/api/test_fact_versions.py @@ -0,0 +1,239 @@ +# Python +import mock +import pytest +from datetime import timedelta +import urlparse +import urllib + +# AWX +from awx.main.models.fact import Fact +from awx.api.views import ( + HostFactVersionsList, +) +from awx.main.utils import timestamp_apiformat + +# Django +from django.core.urlresolvers import reverse +from django.utils import timezone + +def mock_feature_enabled(feature, bypass_database=None): + return True + +def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1): + hosts = hosts(host_count=host_count) + fact_scans(fact_scans=3, timestamp_epoch=epoch) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id, params=get_params) + + return (hosts[0], response) + +def check_url(url1_full, fact_known, module): + url1_split = urlparse.urlsplit(url1_full) + url1 = url1_split.path + url1_params = urlparse.parse_qsl(url1_split.query) + + url2 = reverse('api:host_fact_compare_view', args=(fact_known.host.pk,)) + url2_params = [('module', module), ('datetime', timestamp_apiformat(fact_known.timestamp))] + + assert url1 == url2 + assert urllib.urlencode(url1_params) == urllib.urlencode(url2_params) + +def check_response_facts(facts_known, response): + for i, fact_known in enumerate(facts_known): + assert fact_known.module == response.data['results'][i]['module'] + assert timestamp_apiformat(fact_known.timestamp) == response.data['results'][i]['timestamp'] + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_no_facts_db(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id) + + response_expected = { + 'results': [] + } + assert response_expected == response.data + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch, + 'to': epoch, + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + + results = response.data['results'] + assert 'related' in results[0] + assert 'timestamp' in results[0] + assert 'module' in results[0] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +@pytest.mark.skipif(True, reason="Options fix landed in devel but not here. Enable this after this pr gets merged.") +def test_basic_options_fields(hosts, fact_scans, options, user): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = options(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id) + + #import json + #print(json.dumps(response.data)) + assert 'related' in response.data + assert 'id' in response.data + assert 'facts' in response.data + assert 'module' in response.data + assert 'host' in response.data + assert isinstance(response.data['host'], int) + assert 'summary_fields' in response.data + assert 'host' in response.data['summary_fields'] + assert 'name' in response.data['summary_fields']['host'] + assert 'description' in response.data['summary_fields']['host'] + assert 'host' in response.data['related'] + assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_related_fact_view(hosts, fact_scans, get, user): + epoch = timezone.now() + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch) + facts_known = Fact.get_timeline(host.id) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + for i, fact_known in enumerate(facts_known): + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_multiple_hosts(hosts, fact_scans, get, user): + epoch = timezone.now() + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, host_count=3) + facts_known = Fact.get_timeline(host.id) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + for i, fact_known in enumerate(facts_known): + check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_to_from(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch - timedelta(days=10), + 'to': epoch + timedelta(days=10), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_from=search['from'], ts_to=search['to']) + assert 9 == len(facts_known) + assert 9 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_module(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'module': 'packages', + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, module=search['module']) + assert 3 == len(facts_known) + assert 3 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_from(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'from': epoch + timedelta(days=1), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_from=search['from']) + assert 3 == len(facts_known) + assert 3 == len(response.data['results']) + + check_response_facts(facts_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_param_to(hosts, fact_scans, get, user): + epoch = timezone.now() + search = { + 'to': epoch + timedelta(days=1), + } + + (host, response) = setup_common(hosts, fact_scans, get, user, epoch=epoch, get_params=search) + facts_known = Fact.get_timeline(host.id, ts_to=search['to']) + assert 6 == len(facts_known) + assert 6 == len(response.data['results']) + + check_response_facts(facts_known, response) + +def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + team_obj.users.add(user_obj) + + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(HostFactVersionsList, user_obj, url, pk=hosts[0].id) + return response + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_normal_user_403(hosts, fact_scans, get, user, team): + user_bob = user('bob', False) + response = _test_user_access_control(hosts, fact_scans, get, user_bob, team) + + assert 403 == response.status_code + assert "You do not have permission to perform this action." == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_super_user_ok(hosts, fact_scans, get, user, team): + user_super = user('bob', True) + response = _test_user_access_control(hosts, fact_scans, get, user_super, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_ok(organization, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + organization.admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + org2 = organizations(1) + org2[0].admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 403 == response.status_code + diff --git a/awx/main/tests/functional/api/test_fact_view.py b/awx/main/tests/functional/api/test_fact_view.py new file mode 100644 index 0000000000..afa6361dcd --- /dev/null +++ b/awx/main/tests/functional/api/test_fact_view.py @@ -0,0 +1,158 @@ +import mock +import pytest +import json + +from awx.api.views import ( + HostFactCompareView, +) +from awx.main.utils import timestamp_apiformat +from django.core.urlresolvers import reverse +from django.utils import timezone + +def mock_feature_enabled(feature, bypass_database=None): + return True + +# TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it +def find_fact(facts, host_id, module_name, timestamp): + for f in facts: + if f.host_id == host_id and f.module == module_name and f.timestamp == timestamp: + return f + raise RuntimeError('fact <%s, %s, %s> not found in %s', (host_id, module_name, timestamp, facts)) + +def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name='ansible', get_params={}): + hosts = hosts(host_count=1) + facts = fact_scans(fact_scans=1, timestamp_epoch=epoch) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id, params=get_params) + + fact_known = find_fact(facts, hosts[0].id, module_name, epoch) + return (fact_known, response) + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_no_fact_found(hosts, get, user): + hosts = hosts(host_count=1) + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id) + + expected_response = { + "detail": "Fact not found" + } + assert 404 == response.status_code + assert expected_response == response.data + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id) + + assert 'related' in response.data + assert 'id' in response.data + assert 'facts' in response.data + assert 'module' in response.data + assert 'host' in response.data + assert isinstance(response.data['host'], int) + assert 'summary_fields' in response.data + assert 'host' in response.data['summary_fields'] + assert 'name' in response.data['summary_fields']['host'] + assert 'description' in response.data['summary_fields']['host'] + assert 'host' in response.data['related'] + assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_content(hosts, fact_scans, get, user, fact_ansible_json): + (fact_known, response) = setup_common(hosts, fact_scans, get, user) + + assert fact_known.host_id == response.data['host'] + assert fact_ansible_json == json.loads(response.data['facts']) + assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] + assert fact_known.module == response.data['module'] + +def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name): + params = { + 'module': module_name + } + (fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params) + + assert fact_json == json.loads(response.data['facts']) + assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] + assert module_name == response.data['module'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_module_packages(hosts, fact_scans, get, user, fact_packages_json): + _test_search_by_module(hosts, fact_scans, get, user, fact_packages_json, 'packages') + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_module_services(hosts, fact_scans, get, user, fact_services_json): + _test_search_by_module(hosts, fact_scans, get, user, fact_services_json, 'services') + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.django_db +def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packages_json): + epoch = timezone.now() + module_name = 'packages' + + (fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, epoch=epoch, get_params=dict(module=module_name, datetime=epoch)) + + assert fact_known.id == response.data['id'] + +def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): + hosts = hosts(host_count=1) + fact_scans(fact_scans=1) + + team_obj.users.add(user_obj) + + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(HostFactCompareView, user_obj, url, pk=hosts[0].id) + return response + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_normal_user_403(hosts, fact_scans, get, user, team): + user_bob = user('bob', False) + response = _test_user_access_control(hosts, fact_scans, get, user_bob, team) + + assert 403 == response.status_code + assert "You do not have permission to perform this action." == response.data['detail'] + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_super_user_ok(hosts, fact_scans, get, user, team): + user_super = user('bob', True) + response = _test_user_access_control(hosts, fact_scans, get, user_super, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_ok(organization, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + organization.admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 200 == response.status_code + +@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) +@pytest.mark.ac +@pytest.mark.django_db +def test_user_admin_403(organization, organizations, hosts, fact_scans, get, user, team): + user_admin = user('johnson', False) + org2 = organizations(1) + org2[0].admins.add(user_admin) + + response = _test_user_access_control(hosts, fact_scans, get, user_admin, team) + + assert 403 == response.status_code + diff --git a/awx/main/tests/functional/api/test_host_detail.py b/awx/main/tests/functional/api/test_host_detail.py new file mode 100644 index 0000000000..18e183f396 --- /dev/null +++ b/awx/main/tests/functional/api/test_host_detail.py @@ -0,0 +1,20 @@ +# TODO: As of writing this our only concern is ensuring that the fact feature is reflected in the Host endpoint. +# Other host tests should live here to make this test suite more complete. +import pytest + +from awx.api.views import ( + HostDetail, +) +from django.core.urlresolvers import reverse + +@pytest.mark.django_db +def test_basic_fields(hosts, fact_scans, get, user): + hosts = hosts(host_count=1) + + url = reverse('api:host_detail', args=(hosts[0].pk,)) + response = get(HostDetail, user('admin', True), url, pk=hosts[0].id) + + assert 'related' in response.data + assert 'fact_versions' in response.data['related'] + assert reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) == response.data['related']['fact_versions'] + diff --git a/awx/main/tests/functional/commands/__init__.py b/awx/main/tests/functional/commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/tests/functional/commands/conftest.py b/awx/main/tests/functional/commands/conftest.py new file mode 100644 index 0000000000..2de8846b0a --- /dev/null +++ b/awx/main/tests/functional/commands/conftest.py @@ -0,0 +1,109 @@ +import pytest +import time + +from datetime import datetime + +@pytest.fixture +def fact_msg_base(inventory, hosts): + host_objs = hosts(1) + return { + 'host': host_objs[0].name, + 'date_key': time.mktime(datetime.utcnow().timetuple()), + 'facts' : { }, + 'inventory_id': inventory.id + } + +@pytest.fixture +def fact_msg_small(fact_msg_base): + fact_msg_base['facts'] = { + 'packages': { + "accountsservice": [ + { + "architecture": "amd64", + "name": "accountsservice", + "source": "apt", + "version": "0.6.35-0ubuntu7.1" + } + ], + "acpid": [ + { + "architecture": "amd64", + "name": "acpid", + "source": "apt", + "version": "1:2.0.21-1ubuntu2" + } + ], + "adduser": [ + { + "architecture": "all", + "name": "adduser", + "source": "apt", + "version": "3.113+nmu3ubuntu3" + } + ], + }, + 'services': [ + { + "name": "acpid", + "source": "sysv", + "state": "running" + }, + { + "name": "apparmor", + "source": "sysv", + "state": "stopped" + }, + { + "name": "atd", + "source": "sysv", + "state": "running" + }, + { + "name": "cron", + "source": "sysv", + "state": "running" + } + ], + 'ansible': { + 'ansible_fact_simple': 'hello world', + 'ansible_fact_complex': { + 'foo': 'bar', + 'hello': [ + 'scooby', + 'dooby', + 'doo' + ] + }, + } + } + return fact_msg_base + + +''' +Facts sent from ansible to our fact cache reciever. +The fact module type is implicit i.e + +Note: The 'ansible' module is an expection to this rule. +It is NOT nested in a dict, and thus does NOT contain a first-level +key of 'ansible' + +{ + 'fact_module_name': { ... }, +} +''' + +@pytest.fixture +def fact_msg_ansible(fact_msg_base, fact_ansible_json): + fact_msg_base['facts'] = fact_ansible_json + return fact_msg_base + +@pytest.fixture +def fact_msg_packages(fact_msg_base, fact_packages_json): + fact_msg_base['facts']['packages'] = fact_packages_json + return fact_msg_base + +@pytest.fixture +def fact_msg_services(fact_msg_base, fact_services_json): + fact_msg_base['facts']['services'] = fact_services_json + return fact_msg_base + diff --git a/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py b/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py new file mode 100644 index 0000000000..266272e37c --- /dev/null +++ b/awx/main/tests/functional/commands/test_run_fact_cache_receiver.py @@ -0,0 +1,95 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved + +# Python +import pytest +from datetime import datetime +import json + +# Django + +# AWX +from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver +from awx.main.models.fact import Fact +from awx.main.models.inventory import Host + +# TODO: Check that timestamp and other attributes are as expected +def check_process_fact_message_module(fact_returned, data, module_name): + date_key = data['date_key'] + + # Ensure 1, and only 1, fact created + timestamp = datetime.fromtimestamp(date_key, None) + assert 1 == Fact.objects.all().count() + + host_obj = Host.objects.get(name=data['host'], inventory__id=data['inventory_id']) + assert host_obj is not None + fact_known = Fact.get_host_fact(host_obj.id, module_name, timestamp) + assert fact_known is not None + assert fact_known == fact_returned + + assert host_obj == fact_returned.host + if module_name == 'ansible': + assert data['facts'] == fact_returned.facts + else: + assert data['facts'][module_name] == fact_returned.facts + assert timestamp == fact_returned.timestamp + assert module_name == fact_returned.module + +@pytest.mark.django_db +def test_process_fact_message_ansible(fact_msg_ansible): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_ansible) + + check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible') + +@pytest.mark.django_db +def test_process_fact_message_packages(fact_msg_packages): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_packages) + + check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages') + +@pytest.mark.django_db +def test_process_fact_message_services(fact_msg_services): + receiver = FactCacheReceiver() + fact_returned = receiver.process_fact_message(fact_msg_services) + + check_process_fact_message_module(fact_returned, fact_msg_services, 'services') + +''' +We pickypack our fact sending onto the Ansible fact interface. +The interface is . Where facts is a json blob of all the facts. +This makes it hard to decipher what facts are new/changed. +Because of this, we handle the same fact module data being sent multiple times +and just keep the newest version. +''' +@pytest.mark.django_db +def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible): + #epoch = timezone.now() + epoch = datetime.fromtimestamp(fact_msg_ansible['date_key']) + fact_scans(fact_scans=1, timestamp_epoch=epoch) + key = 'ansible.overwrite' + value = 'hello world' + + receiver = FactCacheReceiver() + receiver.process_fact_message(fact_msg_ansible) + + fact_msg_ansible['facts'][key] = value + fact_returned = receiver.process_fact_message(fact_msg_ansible) + + fact_obj = Fact.objects.get(id=fact_returned.id) + assert key in fact_obj.facts + assert json.loads(fact_obj.facts) == fact_msg_ansible['facts'] + assert value == json.loads(fact_obj.facts)[key] + +# Ensure that the message flows from the socket through to process_fact_message() +@pytest.mark.django_db +def test_run_receiver(mocker, fact_msg_ansible): + mocker.patch("awx.main.socket.Socket.listen", return_value=[fact_msg_ansible]) + + receiver = FactCacheReceiver() + mocker.patch.object(receiver, 'process_fact_message', return_value=None) + + receiver.run_receiver(use_processing_threads=False) + + receiver.process_fact_message.assert_called_once_with(fact_msg_ansible) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 7e8ae5f1a0..e8c571c2f4 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -1,13 +1,42 @@ import pytest +import mock +import json +import os -from awx.main.models.organization import Organization +from datetime import timedelta + +from awx.main.models.organization import Organization, Permission +from awx.main.models.base import PERM_INVENTORY_READ from awx.main.models.ha import Instance +from awx.main.models.fact import Fact + +from django.utils import timezone from django.contrib.auth.models import User +from django.conf import settings + from rest_framework.test import ( APIRequestFactory, force_authenticate, ) +''' +Disable all django model signals. +''' +@pytest.fixture(scope="session", autouse=False) +def disable_signals(): + mocked = mock.patch('django.dispatch.Signal.send', autospec=True) + mocked.start() + +''' +FIXME: Not sure how "far" just setting the BROKER_URL will get us. +We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py) + +Allows django signal code to execute without the need for redis +''' +@pytest.fixture(scope="session", autouse=True) +def celery_memory_broker(): + settings.BROKER_URL='memory://localhost/' + @pytest.fixture def user(): def u(name, is_superuser=False): @@ -35,9 +64,23 @@ def post(): @pytest.fixture def get(): - def rf(_cls, _user, _url, pk=None, middleware=None): + def rf(_cls, _user, _url, pk=None, params={}, middleware=None): view = _cls.as_view() - request = APIRequestFactory().get(_url, format='json') + request = APIRequestFactory().get(_url, params, format='json') + if middleware: + middleware.process_request(request) + force_authenticate(request, user=_user) + response = view(request, pk=pk) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def options(): + def rf(_cls, _user, _url, pk=None, params={}, middleware=None): + view = _cls.as_view() + request = APIRequestFactory().options(_url, params, format='json') if middleware: middleware.process_request(request) force_authenticate(request, user=_user) @@ -54,3 +97,80 @@ def instance(settings): @pytest.fixture def organization(instance): return Organization.objects.create(name="test-org", description="test-org-desc") + +@pytest.fixture +def organizations(instance): + def rf(organization_count=1): + orgs = [] + for i in xrange(0, organization_count): + o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc") + orgs.append(o) + return orgs + return rf + +@pytest.fixture +def inventory(organization): + return organization.inventories.create(name="test-inv") + +@pytest.fixture +def group(inventory): + return inventory.groups.create(name='group-1') + +@pytest.fixture +def hosts(group): + def rf(host_count=1): + hosts = [] + for i in xrange(0, host_count): + name = '%s-host-%s' % (group.name, i) + (host, created) = group.inventory.hosts.get_or_create(name=name) + if created: + group.hosts.add(host) + hosts.append(host) + return hosts + return rf + +@pytest.fixture +def fact_scans(group, fact_ansible_json, fact_packages_json, fact_services_json): + def rf(fact_scans=1, timestamp_epoch=timezone.now()): + facts_json = {} + facts = [] + module_names = ['ansible', 'services', 'packages'] + timestamp_current = timestamp_epoch + + facts_json['ansible'] = fact_ansible_json + facts_json['packages'] = fact_packages_json + facts_json['services'] = fact_services_json + + for i in xrange(0, fact_scans): + for host in group.hosts.all(): + for module_name in module_names: + facts.append(Fact.objects.create(host=host, timestamp=timestamp_current, module=module_name, facts=facts_json[module_name])) + timestamp_current += timedelta(days=1) + return facts + return rf + +def _fact_json(module_name): + current_dir = os.path.dirname(os.path.realpath(__file__)) + with open('%s/%s.json' % (current_dir, module_name)) as f: + return json.load(f) + +@pytest.fixture +def fact_ansible_json(): + return _fact_json('ansible') + +@pytest.fixture +def fact_packages_json(): + return _fact_json('packages') + +@pytest.fixture +def fact_services_json(): + return _fact_json('services') + +@pytest.fixture +def team(organization): + return organization.teams.create(name='test-team') + +@pytest.fixture +def permission_inv_read(organization, inventory, team): + return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ) + diff --git a/awx/main/tests/functional/models/fact/test_get_host_fact.py b/awx/main/tests/functional/models/fact/test_get_host_fact.py new file mode 100644 index 0000000000..2569417496 --- /dev/null +++ b/awx/main/tests/functional/models/fact/test_get_host_fact.py @@ -0,0 +1,111 @@ +import pytest + +from datetime import timedelta +from django.utils import timezone + +from awx.main.models import Fact + +@pytest.mark.django_db +def test_newest_scan_exact(hosts, fact_scans): + epoch = timezone.now() + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch: + fact_known = f + break + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', epoch) + + assert fact_found == fact_known + +''' +Show me the most recent state of the sytem at any point of time. +or, said differently +For any timestamp, get the first scan that is <= the timestamp. +''' + +''' +Ensure most recent scan run is the scan returned. +Query by future date. +''' +@pytest.mark.django_db +def test_newest_scan_less_than(hosts, fact_scans): + epoch = timezone.now() + timestamp_future = epoch + timedelta(days=10) + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=2): + fact_known = f + break + assert fact_known is not None + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_future) + + assert fact_found == fact_known + +''' +Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp. +''' +@pytest.mark.django_db +def test_query_middle_of_timeline(hosts, fact_scans): + epoch = timezone.now() + timestamp_middle = epoch + timedelta(days=1, hours=3) + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known = None + for f in facts: + if f.host_id == hosts[0].id and f.module == 'ansible' and f.timestamp == epoch + timedelta(days=1): + fact_known = f + break + assert fact_known is not None + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_middle) + + assert fact_found == fact_known + +''' +Query time less than any fact scan. Should return None +''' +@pytest.mark.django_db +def test_query_result_empty(hosts, fact_scans): + epoch = timezone.now() + timestamp_less = epoch - timedelta(days=1) + hosts = hosts(host_count=2) + fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_found = Fact.get_host_fact(hosts[0].id, 'ansible', timestamp_less) + + assert fact_found is None + +''' +Query by fact module other than 'ansible' +''' +@pytest.mark.django_db +def test_by_module(hosts, fact_scans): + epoch = timezone.now() + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + fact_known_services = None + fact_known_packages = None + for f in facts: + if f.host_id == hosts[0].id: + if f.module == 'services' and f.timestamp == epoch: + fact_known_services = f + elif f.module == 'packages' and f.timestamp == epoch: + fact_known_packages = f + assert fact_known_services is not None + assert fact_known_packages is not None + + fact_found_services = Fact.get_host_fact(hosts[0].id, 'services', epoch) + fact_found_packages = Fact.get_host_fact(hosts[0].id, 'packages', epoch) + + assert fact_found_services == fact_known_services + assert fact_found_packages == fact_known_packages + diff --git a/awx/main/tests/functional/models/fact/test_get_timeline.py b/awx/main/tests/functional/models/fact/test_get_timeline.py new file mode 100644 index 0000000000..da3360340a --- /dev/null +++ b/awx/main/tests/functional/models/fact/test_get_timeline.py @@ -0,0 +1,129 @@ +import pytest + +from datetime import timedelta +from django.utils import timezone + +from awx.main.models import Fact + +def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now(), module_name='ansible', ts_known=None): + hosts = hosts(host_count=2) + facts = fact_scans(fact_scans=3, timestamp_epoch=epoch) + + facts_known = [] + for f in facts: + if f.host.id == hosts[0].id: + if module_name and f.module != module_name: + continue + if ts_known and f.timestamp != ts_known: + continue + facts_known.append(f) + fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to) + return (facts_known, fact_objs) + +@pytest.mark.django_db +def test_all(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch) + assert 9 == len(facts_known) + assert 9 == len(fact_objs) + +@pytest.mark.django_db +def test_all_ansible(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch) + assert 3 == len(facts_known) + assert 3 == len(fact_objs) + + for i in xrange(len(facts_known) - 1, 0): + assert facts_known[i].id == fact_objs[i].id + +@pytest.mark.django_db +def test_empty_db(hosts, fact_scans): + hosts = hosts(host_count=2) + epoch = timezone.now() + ts_from = epoch - timedelta(days=1) + ts_to = epoch + timedelta(days=10) + + fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to) + + assert 0 == len(fact_objs) + +@pytest.mark.django_db +def test_no_results(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch - timedelta(days=100) + ts_to = epoch - timedelta(days=50) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch) + assert 0 == len(fact_objs) + +@pytest.mark.django_db +def test_exact_same_equal(hosts, fact_scans): + epoch = timezone.now() + ts_to = ts_from = epoch + timedelta(days=1) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch) + assert 1 == len(facts_known) + assert 1 == len(fact_objs) + + assert facts_known[0].id == fact_objs[0].id + +@pytest.mark.django_db +def test_exact_from_exclusive_to_inclusive(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch + timedelta(days=1) + ts_to = epoch + timedelta(days=2) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, ts_known=ts_to, epoch=epoch) + + assert 1 == len(facts_known) + assert 1 == len(fact_objs) + + assert facts_known[0].id == fact_objs[0].id + +@pytest.mark.django_db +def test_to_lte(hosts, fact_scans): + epoch = timezone.now() + ts_to = epoch + timedelta(days=1) + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=ts_to, epoch=epoch) + facts_known_subset = filter(lambda x: x.timestamp <= ts_to, facts_known) + + assert 2 == len(facts_known_subset) + assert 2 == len(fact_objs) + + for i in xrange(0, len(fact_objs)): + assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id + +@pytest.mark.django_db +def test_from_gt(hosts, fact_scans): + epoch = timezone.now() + ts_from = epoch + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=ts_from, ts_to=None, epoch=epoch) + facts_known_subset = filter(lambda x: x.timestamp > ts_from, facts_known) + + assert 2 == len(facts_known_subset) + assert 2 == len(fact_objs) + + for i in xrange(0, len(fact_objs)): + assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id + +@pytest.mark.django_db +def test_no_ts(hosts, fact_scans): + epoch = timezone.now() + + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=epoch) + assert 3 == len(facts_known) + assert 3 == len(fact_objs) + + for i in xrange(len(facts_known) - 1, 0): + assert facts_known[i].id == fact_objs[i].id + + diff --git a/awx/main/tests/functional/packages.json b/awx/main/tests/functional/packages.json new file mode 100644 index 0000000000..7bc735d06f --- /dev/null +++ b/awx/main/tests/functional/packages.json @@ -0,0 +1,2922 @@ +[ + { + "name": "kbd", + "source": "rpm", + "epoch": null, + "version": "1.15.5", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "centos-release", + "source": "rpm", + "epoch": null, + "version": "7", + "release": "0.1406.el7.centos.2.3", + "arch": "x86_64" + }, + { + "name": "postfix", + "source": "rpm", + "epoch": 2, + "version": "2.10.1", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "filesystem", + "source": "rpm", + "epoch": null, + "version": "3.2", + "release": "18.el7", + "arch": "x86_64" + }, + { + "name": "tuned", + "source": "rpm", + "epoch": null, + "version": "2.3.0", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "ncurses-base", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "noarch" + }, + { + "name": "aic94xx-firmware", + "source": "rpm", + "epoch": null, + "version": "30", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "kbd-misc", + "source": "rpm", + "epoch": null, + "version": "1.15.5", + "release": "10.el7", + "arch": "noarch" + }, + { + "name": "irqbalance", + "source": "rpm", + "epoch": 2, + "version": "1.0.6", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "tzdata", + "source": "rpm", + "epoch": null, + "version": "2014b", + "release": "1.el7", + "arch": "noarch" + }, + { + "name": "openssh-clients", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "glibc-common", + "source": "rpm", + "epoch": null, + "version": "2.17", + "release": "55.el7", + "arch": "x86_64" + }, + { + "name": "authconfig", + "source": "rpm", + "epoch": null, + "version": "6.2.8", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "xz-libs", + "source": "rpm", + "epoch": null, + "version": "5.1.2", + "release": "8alpha.el7", + "arch": "x86_64" + }, + { + "name": "btrfs-progs", + "source": "rpm", + "epoch": null, + "version": "3.12", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "ncurses-libs", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "x86_64" + }, + { + "name": "sudo", + "source": "rpm", + "epoch": null, + "version": "1.8.6p7", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libsepol", + "source": "rpm", + "epoch": null, + "version": "2.1.9", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "iprutils", + "source": "rpm", + "epoch": null, + "version": "2.3.16", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libselinux", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "iwl6000g2b-firmware", + "source": "rpm", + "epoch": null, + "version": "17.168.5.2", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "info", + "source": "rpm", + "epoch": null, + "version": "5.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl7260-firmware", + "source": "rpm", + "epoch": null, + "version": "22.0.7.0", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "dbus-libs", + "source": "rpm", + "epoch": 1, + "version": "1.6.12", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libertas-sd8787-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "sed", + "source": "rpm", + "epoch": null, + "version": "4.2.2", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "iwl6050-firmware", + "source": "rpm", + "epoch": null, + "version": "41.28.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "chkconfig", + "source": "rpm", + "epoch": null, + "version": "1.3.61", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl1000-firmware", + "source": "rpm", + "epoch": 1, + "version": "39.31.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "nspr", + "source": "rpm", + "epoch": null, + "version": "4.10.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl6000-firmware", + "source": "rpm", + "epoch": null, + "version": "9.221.4.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "nss-util", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "iwl2000-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "grep", + "source": "rpm", + "epoch": null, + "version": "2.16", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "iwl5150-firmware", + "source": "rpm", + "epoch": null, + "version": "8.24.2.2", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "gawk", + "source": "rpm", + "epoch": null, + "version": "4.0.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl4965-firmware", + "source": "rpm", + "epoch": null, + "version": "228.61.2.24", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "expat", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "iwl3160-firmware", + "source": "rpm", + "epoch": null, + "version": "22.0.7.0", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libattr", + "source": "rpm", + "epoch": null, + "version": "2.4.46", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "iwl3945-firmware", + "source": "rpm", + "epoch": null, + "version": "15.32.2.9", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libcap", + "source": "rpm", + "epoch": null, + "version": "2.22", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libsemanage-python", + "source": "rpm", + "epoch": null, + "version": "2.1.10", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "libxml2", + "source": "rpm", + "epoch": null, + "version": "2.9.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-HTTP-Tiny", + "source": "rpm", + "epoch": null, + "version": "0.033", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libgcrypt", + "source": "rpm", + "epoch": null, + "version": "1.5.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Perldoc", + "source": "rpm", + "epoch": null, + "version": "3.20", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "lua", + "source": "rpm", + "epoch": null, + "version": "5.1.4", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "perl-Encode", + "source": "rpm", + "epoch": null, + "version": "2.51", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "pkgconfig", + "source": "rpm", + "epoch": 1, + "version": "0.27.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Usage", + "source": "rpm", + "epoch": null, + "version": "1.63", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "shared-mime-info", + "source": "rpm", + "epoch": null, + "version": "1.1", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-Exporter", + "source": "rpm", + "epoch": null, + "version": "5.68", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libcap-ng", + "source": "rpm", + "epoch": null, + "version": "0.7.3", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-Time-Local", + "source": "rpm", + "epoch": null, + "version": "1.2300", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "libidn", + "source": "rpm", + "epoch": null, + "version": "1.28", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Carp", + "source": "rpm", + "epoch": null, + "version": "1.26", + "release": "244.el7", + "arch": "noarch" + }, + { + "name": "gmp", + "source": "rpm", + "epoch": 1, + "version": "5.1.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-PathTools", + "source": "rpm", + "epoch": null, + "version": "3.40", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "p11-kit", + "source": "rpm", + "epoch": null, + "version": "0.18.7", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-macros", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "libdaemon", + "source": "rpm", + "epoch": null, + "version": "0.14", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-File-Temp", + "source": "rpm", + "epoch": null, + "version": "0.23.01", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "libcroco", + "source": "rpm", + "epoch": null, + "version": "0.6.8", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-threads-shared", + "source": "rpm", + "epoch": null, + "version": "1.43", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libnl3-cli", + "source": "rpm", + "epoch": null, + "version": "3.2.21", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "perl-Filter", + "source": "rpm", + "epoch": null, + "version": "1.49", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "cyrus-sasl-lib", + "source": "rpm", + "epoch": null, + "version": "2.1.26", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "perl-Getopt-Long", + "source": "rpm", + "epoch": null, + "version": "2.40", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "groff-base", + "source": "rpm", + "epoch": null, + "version": "1.22.2", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "04bbaa7b", + "release": "4c881cbf", + "arch": null + }, + { + "name": "libunistring", + "source": "rpm", + "epoch": null, + "version": "0.9.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "libicu", + "source": "rpm", + "epoch": null, + "version": "50.1.2", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "diffutils", + "source": "rpm", + "epoch": null, + "version": "3.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libdnet", + "source": "rpm", + "epoch": null, + "version": "1.12", + "release": "13.1.el7", + "arch": "x86_64" + }, + { + "name": "xz", + "source": "rpm", + "epoch": null, + "version": "5.1.2", + "release": "8alpha.el7", + "arch": "x86_64" + }, + { + "name": "open-vm-tools", + "source": "rpm", + "epoch": null, + "version": "9.4.0", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "sysvinit-tools", + "source": "rpm", + "epoch": null, + "version": "2.88", + "release": "14.dsf.el7", + "arch": "x86_64" + }, + { + "name": "open-vm-tools-deploypkg", + "source": "rpm", + "epoch": 0, + "version": "9.4.10", + "release": "3", + "arch": "x86_64" + }, + { + "name": "newt", + "source": "rpm", + "epoch": null, + "version": "0.52.15", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-kitchen", + "source": "rpm", + "epoch": null, + "version": "1.1.1", + "release": "5.el7", + "arch": "noarch" + }, + { + "name": "ethtool", + "source": "rpm", + "epoch": 2, + "version": "3.8", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "yum-utils", + "source": "rpm", + "epoch": null, + "version": "1.1.31", + "release": "29.el7", + "arch": "noarch" + }, + { + "name": "hostname", + "source": "rpm", + "epoch": null, + "version": "3.13", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "emacs-filesystem", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "gdbm", + "source": "rpm", + "epoch": null, + "version": "1.10", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "emacs-common", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "less", + "source": "rpm", + "epoch": null, + "version": "458", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "epel-release", + "source": "rpm", + "epoch": null, + "version": "7", + "release": "5", + "arch": "noarch" + }, + { + "name": "p11-kit-trust", + "source": "rpm", + "epoch": null, + "version": "0.18.7", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pkcs11-helper", + "source": "rpm", + "epoch": null, + "version": "1.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "nettle", + "source": "rpm", + "epoch": null, + "version": "2.7.1", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "easy-rsa", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "1.el7", + "arch": "noarch" + }, + { + "name": "gobject-introspection", + "source": "rpm", + "epoch": null, + "version": "1.36.0", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libevent", + "source": "rpm", + "epoch": null, + "version": "2.0.21", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "gsettings-desktop-schemas", + "source": "rpm", + "epoch": null, + "version": "3.8.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "dhcp-libs", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "acl", + "source": "rpm", + "epoch": null, + "version": "2.2.51", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "dhcp", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "elfutils-libs", + "source": "rpm", + "epoch": null, + "version": "0.158", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "bind-license", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "noarch" + }, + { + "name": "mozjs17", + "source": "rpm", + "epoch": null, + "version": "17.0.0", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "bind", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "pinentry", + "source": "rpm", + "epoch": null, + "version": "0.8.1", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "bind-libs-lite", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "libselinux-utils", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "audit-libs", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libedit", + "source": "rpm", + "epoch": null, + "version": "3.0", + "release": "12.20121213cvs.el7", + "arch": "x86_64" + }, + { + "name": "audit-libs-python", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libmodman", + "source": "rpm", + "epoch": null, + "version": "2.0.1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "checkpolicy", + "source": "rpm", + "epoch": null, + "version": "2.1.12", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "glib-networking", + "source": "rpm", + "epoch": null, + "version": "2.36.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "setools-libs", + "source": "rpm", + "epoch": null, + "version": "3.3.7", + "release": "46.el7", + "arch": "x86_64" + }, + { + "name": "snappy", + "source": "rpm", + "epoch": null, + "version": "1.1.0", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "audit", + "source": "rpm", + "epoch": null, + "version": "2.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "numactl-libs", + "source": "rpm", + "epoch": null, + "version": "2.0.9", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "autogen-libopts", + "source": "rpm", + "epoch": null, + "version": "5.18", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libverto", + "source": "rpm", + "epoch": null, + "version": "0.2.5", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "ntp", + "source": "rpm", + "epoch": null, + "version": "4.2.6p5", + "release": "19.el7.centos.3", + "arch": "x86_64" + }, + { + "name": "libsemanage", + "source": "rpm", + "epoch": null, + "version": "2.1.10", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "krb5-libs", + "source": "rpm", + "epoch": null, + "version": "1.11.3", + "release": "49.el7", + "arch": "x86_64" + }, + { + "name": "openldap", + "source": "rpm", + "epoch": null, + "version": "2.4.39", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "cracklib", + "source": "rpm", + "epoch": null, + "version": "2.9.0", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libmount", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "systemd-libs", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libpwquality", + "source": "rpm", + "epoch": null, + "version": "1.2.3", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pam", + "source": "rpm", + "epoch": null, + "version": "1.1.8", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "shadow-utils", + "source": "rpm", + "epoch": 2, + "version": "4.1.5.1", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "util-linux", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "python-libs", + "source": "rpm", + "epoch": null, + "version": "2.7.5", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "python-decorator", + "source": "rpm", + "epoch": null, + "version": "3.4.0", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "gettext", + "source": "rpm", + "epoch": null, + "version": "0.18.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "centos-logos", + "source": "rpm", + "epoch": null, + "version": "70.0.6", + "release": "1.el7.centos", + "arch": "noarch" + }, + { + "name": "libselinux-python", + "source": "rpm", + "epoch": null, + "version": "2.2.2", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "python-slip-dbus", + "source": "rpm", + "epoch": null, + "version": "0.4.0", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "pyliblzma", + "source": "rpm", + "epoch": null, + "version": "0.5.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "yum-metadata-parser", + "source": "rpm", + "epoch": null, + "version": "1.1.4", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "pyxattr", + "source": "rpm", + "epoch": null, + "version": "0.5.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "python-backports-ssl_match_hostname", + "source": "rpm", + "epoch": null, + "version": "3.4.0.2", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "python-pyudev", + "source": "rpm", + "epoch": null, + "version": "0.15", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "binutils", + "source": "rpm", + "epoch": null, + "version": "2.23.52.0.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "logrotate", + "source": "rpm", + "epoch": null, + "version": "3.8.6", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "alsa-lib", + "source": "rpm", + "epoch": null, + "version": "1.0.27.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "mariadb-libs", + "source": "rpm", + "epoch": 1, + "version": "5.5.35", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libcurl", + "source": "rpm", + "epoch": null, + "version": "7.29.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "python-urlgrabber", + "source": "rpm", + "epoch": null, + "version": "3.10", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "rpm-libs", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "fipscheck", + "source": "rpm", + "epoch": null, + "version": "1.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "json-c", + "source": "rpm", + "epoch": null, + "version": "0.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "virt-what", + "source": "rpm", + "epoch": null, + "version": "1.13", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libnetfilter_conntrack", + "source": "rpm", + "epoch": null, + "version": "1.0.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "iproute", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "qrencode-libs", + "source": "rpm", + "epoch": null, + "version": "3.4.1", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper-libs", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "systemd", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "systemd-sysv", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "iputils", + "source": "rpm", + "epoch": null, + "version": "20121221", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper-event-libs", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager-glib", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "polkit-pkla-compat", + "source": "rpm", + "epoch": null, + "version": "0.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "cronie-anacron", + "source": "rpm", + "epoch": null, + "version": "1.4.11", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "crontabs", + "source": "rpm", + "epoch": null, + "version": "1.11", + "release": "6.20121102git.el7", + "arch": "noarch" + }, + { + "name": "device-mapper-event", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "avahi-libs", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "avahi-autoipd", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "dnsmasq", + "source": "rpm", + "epoch": null, + "version": "2.66", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "ebtables", + "source": "rpm", + "epoch": null, + "version": "2.0.10", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "libpciaccess", + "source": "rpm", + "epoch": null, + "version": "0.13.1", + "release": "4.1.el7", + "arch": "x86_64" + }, + { + "name": "fxload", + "source": "rpm", + "epoch": null, + "version": "2002_04_11", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "alsa-tools-firmware", + "source": "rpm", + "epoch": null, + "version": "1.0.27", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libpipeline", + "source": "rpm", + "epoch": null, + "version": "1.2.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "gnupg2", + "source": "rpm", + "epoch": null, + "version": "2.0.22", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "rpm-python", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "pygpgme", + "source": "rpm", + "epoch": null, + "version": "0.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "hardlink", + "source": "rpm", + "epoch": 1, + "version": "1.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "dracut-network", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "plymouth", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "teamd", + "source": "rpm", + "epoch": null, + "version": "1.9", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "libestr", + "source": "rpm", + "epoch": null, + "version": "0.1.9", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager-tui", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "kernel", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "dracut-config-rescue", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "man-db", + "source": "rpm", + "epoch": null, + "version": "2.6.3", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "lvm2", + "source": "rpm", + "epoch": 7, + "version": "2.02.105", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "libgcc", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "setup", + "source": "rpm", + "epoch": null, + "version": "2.8.71", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "microcode_ctl", + "source": "rpm", + "epoch": 2, + "version": "2.1", + "release": "7.1.el7", + "arch": "x86_64" + }, + { + "name": "basesystem", + "source": "rpm", + "epoch": null, + "version": "10.0", + "release": "7.el7.centos", + "arch": "noarch" + }, + { + "name": "biosdevname", + "source": "rpm", + "epoch": null, + "version": "0.5.0", + "release": "10.el7", + "arch": "x86_64" + }, + { + "name": "linux-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "openssh-server", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "parted", + "source": "rpm", + "epoch": null, + "version": "3.1", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "nss-softokn-freebl", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "selinux-policy-targeted", + "source": "rpm", + "epoch": null, + "version": "3.12.1", + "release": "153.el7", + "arch": "noarch" + }, + { + "name": "glibc", + "source": "rpm", + "epoch": null, + "version": "2.17", + "release": "55.el7", + "arch": "x86_64" + }, + { + "name": "xfsprogs", + "source": "rpm", + "epoch": null, + "version": "3.2.0", + "release": "0.10.alpha2.el7", + "arch": "x86_64" + }, + { + "name": "libstdc++", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "e2fsprogs", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "bash", + "source": "rpm", + "epoch": null, + "version": "4.2.45", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "passwd", + "source": "rpm", + "epoch": null, + "version": "0.79", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "pcre", + "source": "rpm", + "epoch": null, + "version": "8.32", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "tar", + "source": "rpm", + "epoch": 2, + "version": "1.26", + "release": "29.el7", + "arch": "x86_64" + }, + { + "name": "zlib", + "source": "rpm", + "epoch": null, + "version": "1.2.7", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "rootfiles", + "source": "rpm", + "epoch": null, + "version": "8.1", + "release": "11.el7", + "arch": "noarch" + }, + { + "name": "iwl6000g2a-firmware", + "source": "rpm", + "epoch": null, + "version": "17.168.5.3", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libuuid", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "iwl2030-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "popt", + "source": "rpm", + "epoch": null, + "version": "1.13", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "iwl100-firmware", + "source": "rpm", + "epoch": null, + "version": "39.31.5.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libcom_err", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iwl135-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libdb", + "source": "rpm", + "epoch": null, + "version": "5.3.21", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "iwl105-firmware", + "source": "rpm", + "epoch": null, + "version": "18.168.6.1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "bzip2-libs", + "source": "rpm", + "epoch": null, + "version": "1.0.6", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "libertas-sd8686-firmware", + "source": "rpm", + "epoch": null, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "readline", + "source": "rpm", + "epoch": null, + "version": "6.2", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "ivtv-firmware", + "source": "rpm", + "epoch": 2, + "version": "20080701", + "release": "26.el7", + "arch": "noarch" + }, + { + "name": "elfutils-libelf", + "source": "rpm", + "epoch": null, + "version": "0.158", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libertas-usb8388-firmware", + "source": "rpm", + "epoch": 2, + "version": "20140213", + "release": "0.3.git4164c23.el7", + "arch": "noarch" + }, + { + "name": "libgpg-error", + "source": "rpm", + "epoch": null, + "version": "1.12", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "iwl5000-firmware", + "source": "rpm", + "epoch": null, + "version": "8.83.5.1_1", + "release": "34.el7", + "arch": "noarch" + }, + { + "name": "libacl", + "source": "rpm", + "epoch": null, + "version": "2.2.51", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "f4a80eb5", + "release": "53a7ff4b", + "arch": null + }, + { + "name": "cpio", + "source": "rpm", + "epoch": null, + "version": "2.11", + "release": "22.el7", + "arch": "x86_64" + }, + { + "name": "perl-parent", + "source": "rpm", + "epoch": 1, + "version": "0.225", + "release": "244.el7", + "arch": "noarch" + }, + { + "name": "libnl3", + "source": "rpm", + "epoch": null, + "version": "3.2.21", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "perl-podlators", + "source": "rpm", + "epoch": null, + "version": "2.5.1", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "sqlite", + "source": "rpm", + "epoch": null, + "version": "3.7.17", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Escapes", + "source": "rpm", + "epoch": 1, + "version": "1.04", + "release": "285.el7", + "arch": "noarch" + }, + { + "name": "libffi", + "source": "rpm", + "epoch": null, + "version": "3.0.13", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "perl-Text-ParseWords", + "source": "rpm", + "epoch": null, + "version": "3.29", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "glib2", + "source": "rpm", + "epoch": null, + "version": "2.36.3", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "perl-Storable", + "source": "rpm", + "epoch": null, + "version": "2.45", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "dbus-glib", + "source": "rpm", + "epoch": null, + "version": "0.100", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "perl-constant", + "source": "rpm", + "epoch": null, + "version": "1.27", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "findutils", + "source": "rpm", + "epoch": 1, + "version": "4.5.11", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Socket", + "source": "rpm", + "epoch": null, + "version": "2.010", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "file-libs", + "source": "rpm", + "epoch": null, + "version": "5.11", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "perl-Time-HiRes", + "source": "rpm", + "epoch": 4, + "version": "1.9725", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "libtasn1", + "source": "rpm", + "epoch": null, + "version": "3.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-Scalar-List-Utils", + "source": "rpm", + "epoch": null, + "version": "1.27", + "release": "248.el7", + "arch": "x86_64" + }, + { + "name": "tcp_wrappers-libs", + "source": "rpm", + "epoch": null, + "version": "7.6", + "release": "77.el7", + "arch": "x86_64" + }, + { + "name": "perl-Pod-Simple", + "source": "rpm", + "epoch": 1, + "version": "3.28", + "release": "4.el7", + "arch": "noarch" + }, + { + "name": "file", + "source": "rpm", + "epoch": null, + "version": "5.11", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "perl-File-Path", + "source": "rpm", + "epoch": null, + "version": "2.09", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "nss-softokn", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "perl-threads", + "source": "rpm", + "epoch": null, + "version": "1.87", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libassuan", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "perl-libs", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "e2fsprogs-libs", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "perl", + "source": "rpm", + "epoch": 4, + "version": "5.16.3", + "release": "285.el7", + "arch": "x86_64" + }, + { + "name": "which", + "source": "rpm", + "epoch": null, + "version": "2.20", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "66fd4949", + "release": "4803fe57", + "arch": null + }, + { + "name": "libgomp", + "source": "rpm", + "epoch": null, + "version": "4.8.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "procps-ng", + "source": "rpm", + "epoch": null, + "version": "3.3.10", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "kmod-libs", + "source": "rpm", + "epoch": null, + "version": "14", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "net-tools", + "source": "rpm", + "epoch": null, + "version": "2.0", + "release": "0.17.20131004git.el7", + "arch": "x86_64" + }, + { + "name": "libnfnetlink", + "source": "rpm", + "epoch": null, + "version": "1.0.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "libmspack", + "source": "rpm", + "epoch": 0, + "version": "0.0.20040308alpha", + "release": "2", + "arch": "x86_64" + }, + { + "name": "slang", + "source": "rpm", + "epoch": null, + "version": "2.2.4", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "python-chardet", + "source": "rpm", + "epoch": null, + "version": "2.0.1", + "release": "7.el7", + "arch": "noarch" + }, + { + "name": "lzo", + "source": "rpm", + "epoch": null, + "version": "2.06", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "yum", + "source": "rpm", + "epoch": null, + "version": "3.4.3", + "release": "125.el7.centos", + "arch": "noarch" + }, + { + "name": "pciutils-libs", + "source": "rpm", + "epoch": null, + "version": "3.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "gpm-libs", + "source": "rpm", + "epoch": null, + "version": "1.20.7", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "keyutils-libs", + "source": "rpm", + "epoch": null, + "version": "1.5.8", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "liblockfile", + "source": "rpm", + "epoch": null, + "version": "1.08", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "gettext-libs", + "source": "rpm", + "epoch": null, + "version": "0.18.2.1", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "emacs-nox", + "source": "rpm", + "epoch": 1, + "version": "24.3", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libteam", + "source": "rpm", + "epoch": null, + "version": "1.9", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "gpg-pubkey", + "source": "rpm", + "epoch": null, + "version": "352c64e5", + "release": "52ae6884", + "arch": null + }, + { + "name": "ca-certificates", + "source": "rpm", + "epoch": null, + "version": "2013.1.95", + "release": "71.el7", + "arch": "noarch" + }, + { + "name": "openvpn", + "source": "rpm", + "epoch": null, + "version": "2.3.7", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "gnutls", + "source": "rpm", + "epoch": null, + "version": "3.1.18", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "strace", + "source": "rpm", + "epoch": null, + "version": "4.8", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "ModemManager-glib", + "source": "rpm", + "epoch": null, + "version": "1.1.0", + "release": "6.git20130913.el7", + "arch": "x86_64" + }, + { + "name": "tmux", + "source": "rpm", + "epoch": null, + "version": "1.8", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "vim-minimal", + "source": "rpm", + "epoch": 2, + "version": "7.4.160", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "dhcp-common", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "device-mapper-persistent-data", + "source": "rpm", + "epoch": null, + "version": "0.3.2", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "dhclient", + "source": "rpm", + "epoch": 12, + "version": "4.2.5", + "release": "36.el7.centos", + "arch": "x86_64" + }, + { + "name": "libdb-utils", + "source": "rpm", + "epoch": null, + "version": "5.3.21", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "bind-libs", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "libss", + "source": "rpm", + "epoch": null, + "version": "1.42.9", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "bind-utils", + "source": "rpm", + "epoch": 32, + "version": "9.9.4", + "release": "18.el7_1.1", + "arch": "x86_64" + }, + { + "name": "make", + "source": "rpm", + "epoch": 1, + "version": "3.82", + "release": "21.el7", + "arch": "x86_64" + }, + { + "name": "nmap-ncat", + "source": "rpm", + "epoch": 2, + "version": "6.40", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "freetype", + "source": "rpm", + "epoch": null, + "version": "2.4.11", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "policycoreutils", + "source": "rpm", + "epoch": null, + "version": "2.2.5", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "ncurses", + "source": "rpm", + "epoch": null, + "version": "5.9", + "release": "13.20130511.el7", + "arch": "x86_64" + }, + { + "name": "python-IPy", + "source": "rpm", + "epoch": null, + "version": "0.75", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "libproxy", + "source": "rpm", + "epoch": null, + "version": "0.4.11", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libcgroup", + "source": "rpm", + "epoch": null, + "version": "0.41", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libsoup", + "source": "rpm", + "epoch": null, + "version": "2.42.2", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "policycoreutils-python", + "source": "rpm", + "epoch": null, + "version": "2.2.5", + "release": "15.el7", + "arch": "x86_64" + }, + { + "name": "libndp", + "source": "rpm", + "epoch": null, + "version": "1.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "iftop", + "source": "rpm", + "epoch": null, + "version": "1.0", + "release": "0.7.pre4.el7", + "arch": "x86_64" + }, + { + "name": "libsysfs", + "source": "rpm", + "epoch": null, + "version": "2.1.0", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "ntpdate", + "source": "rpm", + "epoch": null, + "version": "4.2.6p5", + "release": "19.el7.centos.3", + "arch": "x86_64" + }, + { + "name": "ustr", + "source": "rpm", + "epoch": null, + "version": "1.0.4", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "nss-tools", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "openssl-libs", + "source": "rpm", + "epoch": 1, + "version": "1.0.1e", + "release": "34.el7", + "arch": "x86_64" + }, + { + "name": "gzip", + "source": "rpm", + "epoch": null, + "version": "1.5", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "cracklib-dicts", + "source": "rpm", + "epoch": null, + "version": "2.9.0", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "nss", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "libuser", + "source": "rpm", + "epoch": null, + "version": "0.60", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "coreutils", + "source": "rpm", + "epoch": null, + "version": "8.22", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "libblkid", + "source": "rpm", + "epoch": null, + "version": "2.23.2", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "libutempter", + "source": "rpm", + "epoch": null, + "version": "1.1.6", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "nss-sysinit", + "source": "rpm", + "epoch": null, + "version": "3.15.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "python", + "source": "rpm", + "epoch": null, + "version": "2.7.5", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "dbus-python", + "source": "rpm", + "epoch": null, + "version": "1.1.1", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "pygobject3-base", + "source": "rpm", + "epoch": null, + "version": "3.8.2", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-slip", + "source": "rpm", + "epoch": null, + "version": "0.4.0", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "python-iniparse", + "source": "rpm", + "epoch": null, + "version": "0.4", + "release": "9.el7", + "arch": "noarch" + }, + { + "name": "newt-python", + "source": "rpm", + "epoch": null, + "version": "0.52.15", + "release": "4.el7", + "arch": "x86_64" + }, + { + "name": "python-configobj", + "source": "rpm", + "epoch": null, + "version": "4.7.2", + "release": "7.el7", + "arch": "noarch" + }, + { + "name": "python-backports", + "source": "rpm", + "epoch": null, + "version": "1.0", + "release": "6.el7", + "arch": "noarch" + }, + { + "name": "python-setuptools", + "source": "rpm", + "epoch": null, + "version": "0.9.8", + "release": "3.el7", + "arch": "noarch" + }, + { + "name": "grubby", + "source": "rpm", + "epoch": null, + "version": "8.28", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "kmod", + "source": "rpm", + "epoch": null, + "version": "14", + "release": "9.el7", + "arch": "x86_64" + }, + { + "name": "openssl", + "source": "rpm", + "epoch": 1, + "version": "1.0.1e", + "release": "34.el7", + "arch": "x86_64" + }, + { + "name": "plymouth-core-libs", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "libssh2", + "source": "rpm", + "epoch": null, + "version": "1.4.3", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "python-pycurl", + "source": "rpm", + "epoch": null, + "version": "7.19.0", + "release": "17.el7", + "arch": "x86_64" + }, + { + "name": "curl", + "source": "rpm", + "epoch": null, + "version": "7.29.0", + "release": "19.el7", + "arch": "x86_64" + }, + { + "name": "rpm", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "selinux-policy", + "source": "rpm", + "epoch": null, + "version": "3.12.1", + "release": "153.el7", + "arch": "noarch" + }, + { + "name": "fipscheck-lib", + "source": "rpm", + "epoch": null, + "version": "1.4.1", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "openssh", + "source": "rpm", + "epoch": null, + "version": "6.4p1", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "dmidecode", + "source": "rpm", + "epoch": 1, + "version": "2.12", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "libmnl", + "source": "rpm", + "epoch": null, + "version": "1.0.3", + "release": "7.el7", + "arch": "x86_64" + }, + { + "name": "iptables", + "source": "rpm", + "epoch": null, + "version": "1.4.21", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "libpcap", + "source": "rpm", + "epoch": 14, + "version": "1.5.3", + "release": "3.el7", + "arch": "x86_64" + }, + { + "name": "device-mapper", + "source": "rpm", + "epoch": 7, + "version": "1.02.84", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "cryptsetup-libs", + "source": "rpm", + "epoch": null, + "version": "1.6.3", + "release": "2.el7", + "arch": "x86_64" + }, + { + "name": "dbus", + "source": "rpm", + "epoch": 1, + "version": "1.6.12", + "release": "8.el7", + "arch": "x86_64" + }, + { + "name": "libgudev1", + "source": "rpm", + "epoch": null, + "version": "208", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "initscripts", + "source": "rpm", + "epoch": null, + "version": "9.49.17", + "release": "1.el7", + "arch": "x86_64" + }, + { + "name": "polkit", + "source": "rpm", + "epoch": null, + "version": "0.112", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "os-prober", + "source": "rpm", + "epoch": null, + "version": "1.58", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "cronie", + "source": "rpm", + "epoch": null, + "version": "1.4.11", + "release": "11.el7", + "arch": "x86_64" + }, + { + "name": "grub2-tools", + "source": "rpm", + "epoch": 1, + "version": "2.02", + "release": "0.2.10.el7.centos.1", + "arch": "x86_64" + }, + { + "name": "lvm2-libs", + "source": "rpm", + "epoch": 7, + "version": "2.02.105", + "release": "14.el7", + "arch": "x86_64" + }, + { + "name": "avahi", + "source": "rpm", + "epoch": null, + "version": "0.6.31", + "release": "13.el7", + "arch": "x86_64" + }, + { + "name": "wpa_supplicant", + "source": "rpm", + "epoch": 1, + "version": "2.0", + "release": "12.el7", + "arch": "x86_64" + }, + { + "name": "ppp", + "source": "rpm", + "epoch": null, + "version": "2.4.5", + "release": "33.el7", + "arch": "x86_64" + }, + { + "name": "hwdata", + "source": "rpm", + "epoch": null, + "version": "0.252", + "release": "7.3.el7", + "arch": "noarch" + }, + { + "name": "libdrm", + "source": "rpm", + "epoch": null, + "version": "2.4.50", + "release": "1.1.el7", + "arch": "x86_64" + }, + { + "name": "alsa-firmware", + "source": "rpm", + "epoch": null, + "version": "1.0.27", + "release": "2.el7", + "arch": "noarch" + }, + { + "name": "kpartx", + "source": "rpm", + "epoch": null, + "version": "0.4.9", + "release": "66.el7", + "arch": "x86_64" + }, + { + "name": "pth", + "source": "rpm", + "epoch": null, + "version": "2.0.7", + "release": "22.el7", + "arch": "x86_64" + }, + { + "name": "rpm-build-libs", + "source": "rpm", + "epoch": null, + "version": "4.11.1", + "release": "16.el7", + "arch": "x86_64" + }, + { + "name": "gpgme", + "source": "rpm", + "epoch": null, + "version": "1.3.2", + "release": "5.el7", + "arch": "x86_64" + }, + { + "name": "yum-plugin-fastestmirror", + "source": "rpm", + "epoch": null, + "version": "1.1.31", + "release": "24.el7", + "arch": "noarch" + }, + { + "name": "kernel-tools-libs", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "dracut", + "source": "rpm", + "epoch": null, + "version": "033", + "release": "161.el7", + "arch": "x86_64" + }, + { + "name": "plymouth-scripts", + "source": "rpm", + "epoch": null, + "version": "0.8.9", + "release": "0.10.20140113.el7.centos", + "arch": "x86_64" + }, + { + "name": "jansson", + "source": "rpm", + "epoch": null, + "version": "2.4", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "NetworkManager", + "source": "rpm", + "epoch": 1, + "version": "0.9.9.1", + "release": "13.git20140326.4dba720.el7", + "arch": "x86_64" + }, + { + "name": "rsyslog", + "source": "rpm", + "epoch": null, + "version": "7.4.7", + "release": "6.el7", + "arch": "x86_64" + }, + { + "name": "kexec-tools", + "source": "rpm", + "epoch": null, + "version": "2.0.4", + "release": "32.el7.centos", + "arch": "x86_64" + }, + { + "name": "grub2", + "source": "rpm", + "epoch": 1, + "version": "2.02", + "release": "0.2.10.el7.centos.1", + "arch": "x86_64" + }, + { + "name": "kernel-tools", + "source": "rpm", + "epoch": null, + "version": "3.10.0", + "release": "123.el7", + "arch": "x86_64" + }, + { + "name": "firewalld", + "source": "rpm", + "epoch": null, + "version": "0.3.9", + "release": "7.el7", + "arch": "noarch" + } +] diff --git a/awx/main/tests/functional/services.json b/awx/main/tests/functional/services.json new file mode 100644 index 0000000000..a86bf4a875 --- /dev/null +++ b/awx/main/tests/functional/services.json @@ -0,0 +1,697 @@ +[ + { + "source": "sysv", + "state": "running", + "name": "iprdump" + }, + { + "source": "sysv", + "state": "running", + "name": "iprinit" + }, + { + "source": "sysv", + "state": "running", + "name": "iprupdate" + }, + { + "source": "sysv", + "state": "stopped", + "name": "netconsole" + }, + { + "source": "sysv", + "state": "running", + "name": "network" + }, + { + "source": "systemd", + "state": "stopped", + "name": "arp-ethers.service" + }, + { + "source": "systemd", + "state": "running", + "name": "auditd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "autovt@.service" + }, + { + "source": "systemd", + "state": "running", + "name": "avahi-daemon.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "blk-availability.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "brandbot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "console-getty.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "console-shell.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "cpupower.service" + }, + { + "source": "systemd", + "state": "running", + "name": "crond.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.fedoraproject.FirewallD1.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.Avahi.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.hostname1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.locale1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.login1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.machine1.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.NetworkManager.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dbus-org.freedesktop.nm-dispatcher.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus-org.freedesktop.timedate1.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dbus.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "debug-shell.service" + }, + { + "source": "systemd", + "state": "running", + "name": "dhcpd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dhcpd6.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dhcrelay.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dm-event.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dnsmasq.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-cmdline.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-initqueue.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-mount.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-mount.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-pivot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-trigger.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-pre-udev.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "dracut-shutdown.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "ebtables.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "emergency.service" + }, + { + "source": "systemd", + "state": "running", + "name": "firewalld.service" + }, + { + "source": "systemd", + "state": "running", + "name": "getty@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "halt-local.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-cleanup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-parse-etc.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-switch-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "initrd-udevadm-cleanup-db.service" + }, + { + "source": "systemd", + "state": "running", + "name": "irqbalance.service" + }, + { + "source": "systemd", + "state": "running", + "name": "kdump.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "kmod-static-nodes.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "lvm2-lvmetad.service" + }, + { + "source": "systemd", + "state": "running", + "name": "lvm2-monitor.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "lvm2-pvscan@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "messagebus.service" + }, + { + "source": "systemd", + "state": "running", + "name": "microcode.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "named-setup-rndc.service" + }, + { + "source": "systemd", + "state": "running", + "name": "named.service" + }, + { + "source": "systemd", + "state": "running", + "name": "NetworkManager-dispatcher.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "NetworkManager-wait-online.service" + }, + { + "source": "systemd", + "state": "running", + "name": "NetworkManager.service" + }, + { + "source": "systemd", + "state": "running", + "name": "ntpd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "ntpdate.service" + }, + { + "source": "systemd", + "state": "running", + "name": "openvpn@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-halt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-kexec.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-poweroff.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-quit-wait.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-quit.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-read-write.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-reboot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-start.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "plymouth-switch-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "polkit.service" + }, + { + "source": "systemd", + "state": "running", + "name": "postfix.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "quotaon.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rc-local.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rdisc.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rescue.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-autorelabel-mark.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-autorelabel.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-configure.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-dmesg.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-domainname.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-import-state.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-loadmodules.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "rhel-readonly.service" + }, + { + "source": "systemd", + "state": "running", + "name": "rsyslog.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "serial-getty@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "sshd-keygen.service" + }, + { + "source": "systemd", + "state": "running", + "name": "sshd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "sshd@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-console.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-plymouth.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-ask-password-wall.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-backlight@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-binfmt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-fsck-root.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-fsck@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-halt.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hibernate.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hostnamed.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-hybrid-sleep.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-initctl.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-journal-flush.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-journald.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-kexec.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-localed.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-logind.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-machined.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-modules-load.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-nspawn@.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-poweroff.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-quotacheck.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-random-seed.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-collect.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-readahead-done.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-drop.service" + }, + { + "source": "systemd", + "state": "running", + "name": "systemd-readahead-replay.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-reboot.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-remount-fs.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-shutdownd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-suspend.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-sysctl.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-timedated.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-clean.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-setup-dev.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-tmpfiles-setup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udev-settle.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udev-trigger.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-udevd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-update-utmp-runlevel.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-update-utmp.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-user-sessions.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "systemd-vconsole-setup.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "teamd@.service" + }, + { + "source": "systemd", + "state": "running", + "name": "tuned.service" + }, + { + "source": "systemd", + "state": "running", + "name": "vmtoolsd.service" + }, + { + "source": "systemd", + "state": "stopped", + "name": "wpa_supplicant.service" + } +] diff --git a/awx/main/tests/old/commands/run_fact_cache_receiver.py b/awx/main/tests/old/commands/run_fact_cache_receiver.py deleted file mode 100644 index 7dedf7657a..0000000000 --- a/awx/main/tests/old/commands/run_fact_cache_receiver.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -import time -from datetime import datetime -import mock -import unittest2 as unittest -from copy import deepcopy -from mock import MagicMock - -# AWX -from awx.main.tests.base import BaseTest -from awx.fact.tests.base import MongoDBRequired -from command_base import BaseCommandMixin -from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiver -from awx.fact.models.fact import * # noqa - -__all__ = ['RunFactCacheReceiverUnitTest', 'RunFactCacheReceiverFunctionalTest'] - -TEST_MSG_BASE = { - 'host': 'hostname1', - 'date_key': time.mktime(datetime.utcnow().timetuple()), - 'facts' : { }, - 'inventory_id': 1 -} - -TEST_MSG_MODULES = { - 'packages': { - "accountsservice": [ - { - "architecture": "amd64", - "name": "accountsservice", - "source": "apt", - "version": "0.6.35-0ubuntu7.1" - } - ], - "acpid": [ - { - "architecture": "amd64", - "name": "acpid", - "source": "apt", - "version": "1:2.0.21-1ubuntu2" - } - ], - "adduser": [ - { - "architecture": "all", - "name": "adduser", - "source": "apt", - "version": "3.113+nmu3ubuntu3" - } - ], - }, - 'services': [ - { - "name": "acpid", - "source": "sysv", - "state": "running" - }, - { - "name": "apparmor", - "source": "sysv", - "state": "stopped" - }, - { - "name": "atd", - "source": "sysv", - "state": "running" - }, - { - "name": "cron", - "source": "sysv", - "state": "running" - } - ], - 'ansible': { - 'ansible_fact_simple': 'hello world', - 'ansible_fact_complex': { - 'foo': 'bar', - 'hello': [ - 'scooby', - 'dooby', - 'doo' - ] - }, - } -} -# Derived from TEST_MSG_BASE -TEST_MSG = dict(TEST_MSG_BASE) - -TEST_MSG_LARGE = {u'ansible_product_version': u'To Be Filled By O.E.M.', u'ansible_memory_mb': {u'real': {u'total': 32062, u'used': 8079, u'free': 23983}, u'swap': {u'cached': 0, u'total': 0, u'used': 0, u'free': 0}, u'nocache': {u'used': 4339, u'free': 27723}}, u'ansible_user_dir': u'/root', u'ansible_userspace_bits': u'64', u'ansible_distribution_version': u'14.04', u'ansible_virtualization_role': u'guest', u'ansible_env': {u'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS': u'False', u'LC_CTYPE': u'en_US.UTF-8', u'JOB_CALLBACK_DEBUG': u'1', u'_MP_FORK_LOGFILE_': u'', u'HOME': u'/', u'REST_API_TOKEN': u'122-5deb0d6fcec85f3bf44fec6ce170600c', u'LANG': u'en_US.UTF-8', u'SHELL': u'/bin/bash', u'_MP_FORK_LOGFORMAT_': u'[%(asctime)s: %(levelname)s/%(processName)s] %(message)s', u'_': u'/usr/bin/make', u'DJANGO_PROJECT_DIR': u'/tower_devel', u'MFLAGS': u'-w', u'JOB_ID': u'122', u'PYTHONPATH': u'/tower_devel/awx/lib/site-packages:', u'_MP_FORK_LOGLEVEL_': u'10', u'ANSIBLE_CACHE_PLUGIN_CONNECTION': u'tcp://127.0.0.1:6564', u'ANSIBLE_LIBRARY': u'/tower_devel/awx/plugins/library', u'CELERY_LOG_LEVEL': u'10', u'HOSTNAME': u'2842b3619fa8', u'MAKELEVEL': u'2', u'TMUX_PANE': u'%1', u'DJANGO_LIVE_TEST_SERVER_ADDRESS': u'localhost:9013-9199', u'CELERY_LOG_REDIRECT': u'1', u'PATH': u'/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin', u'CALLBACK_CONSUMER_PORT': u'tcp://127.0.0.1:5557', u'MAKEFLAGS': u'w', u'ANSIBLE_CALLBACK_PLUGINS': u'/tower_devel/awx/plugins/callback', u'TERM': u'screen', u'TZ': u'America/New_York', u'LANGUAGE': u'en_US:en', u'ANSIBLE_SSH_CONTROL_PATH': u'/tmp/ansible_tower_y3xGdA/cp/ansible-ssh-%%h-%%p-%%r', u'SHLVL': u'1', u'CELERY_LOG_FILE': u'', u'ANSIBLE_HOST_KEY_CHECKING': u'False', u'TMUX': u'/tmp/tmux-0/default,3719,0', u'CELERY_LOADER': u'djcelery.loaders.DjangoLoader', u'LC_ALL': u'en_US.UTF-8', u'ANSIBLE_FORCE_COLOR': u'True', u'REST_API_URL': u'http://127.0.0.1:8013', u'CELERY_LOG_REDIRECT_LEVEL': u'WARNING', u'INVENTORY_HOSTVARS': u'True', u'ANSIBLE_CACHE_PLUGIN': u'tower', u'INVENTORY_ID': u'1', u'PWD': u'/tower_devel/awx/playbooks', u'DJANGO_SETTINGS_MODULE': u'awx.settings.development', u'ANSIBLE_CACHE_PLUGINS': u'/tower_devel/awx/plugins/fact_caching'}, u'ansible_lo': {u'mtu': 65536, u'device': u'lo', u'promisc': False, u'ipv4': {u'netmask': u'255.0.0.0', u'network': u'127.0.0.0', u'address': u'127.0.0.1'}, u'ipv6': [{u'scope': u'host', u'prefix': u'128', u'address': u'::1'}], u'active': True, u'type': u'loopback'}, u'ansible_memtotal_mb': 32062, u'ansible_architecture': u'x86_64', u'ansible_default_ipv4': {u'alias': u'eth0', u'netmask': u'255.255.0.0', u'macaddress': u'02:42:ac:11:00:01', u'network': u'172.17.0.0', u'address': u'172.17.0.1', u'interface': u'eth0', u'type': u'ether', u'gateway': u'172.17.42.1', u'mtu': 1500}, u'ansible_swapfree_mb': 0, u'ansible_default_ipv6': {}, u'ansible_cmdline': {u'nomodeset': True, u'rw': True, u'initrd': u'EFIarchinitramfs-arch.img', u'rootfstype': u'ext4', u'root': u'/dev/sda4', u'systemd.unit': u'graphical.target'}, u'ansible_selinux': False, u'ansible_userspace_architecture': u'x86_64', u'ansible_product_uuid': u'00020003-0004-0005-0006-000700080009', u'ansible_pkg_mgr': u'apt', u'ansible_memfree_mb': 23983, u'ansible_distribution': u'Ubuntu', u'ansible_processor_count': 1, u'ansible_hostname': u'2842b3619fa8', u'ansible_all_ipv6_addresses': [u'fe80::42:acff:fe11:1'], u'ansible_interfaces': [u'lo', u'eth0'], u'ansible_kernel': u'4.0.1-1-ARCH', u'ansible_fqdn': u'2842b3619fa8', u'ansible_mounts': [{u'uuid': u'NA', u'size_total': 10434699264, u'mount': u'/', u'size_available': 4918865920, u'fstype': u'ext4', u'device': u'/dev/mapper/docker-8:4-18219321-2842b3619fa885d19e47302009754a4bfd54c1b32c7f21e98f38c7fe7412d3d0', u'options': u'rw,relatime,discard,stripe=16,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/tower_devel', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/resolv.conf', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/hostname', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}, {u'uuid': u'NA', u'size_total': 570629263360, u'mount': u'/etc/hosts', u'size_available': 240166572032, u'fstype': u'ext4', u'device': u'/dev/sda4', u'options': u'rw,relatime,data=ordered'}], u'ansible_user_shell': u'/bin/bash', u'ansible_nodename': u'2842b3619fa8', u'ansible_product_serial': u'To Be Filled By O.E.M.', u'ansible_form_factor': u'Desktop', u'ansible_fips': False, u'ansible_user_id': u'root', u'ansible_domain': u'', u'ansible_date_time': {u'month': u'05', u'second': u'47', u'iso8601_micro': u'2015-05-01T19:46:47.868456Z', u'year': u'2015', u'date': u'2015-05-01', u'iso8601': u'2015-05-01T19:46:47Z', u'day': u'01', u'minute': u'46', u'tz': u'EDT', u'hour': u'15', u'tz_offset': u'-0400', u'epoch': u'1430509607', u'weekday': u'Friday', u'time': u'15:46:47'}, u'ansible_processor_cores': 4, u'ansible_processor_vcpus': 4, u'ansible_bios_version': u'P1.80', u'ansible_processor': [u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz', u'GenuineIntel', u'Intel(R) Core(TM) i5-2310 CPU @ 2.90GHz'], u'ansible_virtualization_type': u'docker', u'ansible_distribution_release': u'trusty', u'ansible_system_vendor': u'To Be Filled By O.E.M.', u'ansible_os_family': u'Debian', u'ansible_user_gid': 0, u'ansible_swaptotal_mb': 0, u'ansible_system': u'Linux', u'ansible_devices': {u'sda': {u'sectorsize': u'4096', u'vendor': u'ATA', u'host': u'', u'support_discard': u'0', u'model': u'ST1000DM003-9YN1', u'size': u'7.28 TB', u'scheduler_mode': u'cfq', u'rotational': u'1', u'sectors': u'1953525168', u'removable': u'0', u'holders': [], u'partitions': {u'sda4': {u'start': u'820979712', u'sectorsize': 512, u'sectors': u'1132545423', u'size': u'540.04 GB'}, u'sda2': {u'start': u'206848', u'sectorsize': 512, u'sectors': u'262144', u'size': u'128.00 MB'}, u'sda3': {u'start': u'468992', u'sectorsize': 512, u'sectors': u'820510720', u'size': u'391.25 GB'}, u'sda1': {u'start': u'2048', u'sectorsize': 512, u'sectors': u'204800', u'size': u'100.00 MB'}}}}, u'ansible_user_uid': 0, u'ansible_distribution_major_version': u'14', u'ansible_lsb': {u'major_release': u'14', u'release': u'14.04', u'codename': u'trusty', u'description': u'Ubuntu 14.04.1 LTS', u'id': u'Ubuntu'}, u'ansible_bios_date': u'12/05/2012', u'ansible_machine': u'x86_64', u'ansible_user_gecos': u'root', u'ansible_processor_threads_per_core': 1, u'ansible_eth0': {u'device': u'eth0', u'promisc': False, u'macaddress': u'02:42:ac:11:00:01', u'ipv4': {u'netmask': u'255.255.0.0', u'network': u'172.17.0.0', u'address': u'172.17.0.1'}, u'ipv6': [{u'scope': u'link', u'prefix': u'64', u'address': u'fe80::42:acff:fe11:1'}], u'active': True, u'type': u'ether', u'mtu': 1500}, u'ansible_product_name': u'To Be Filled By O.E.M.', u'ansible_all_ipv4_addresses': [u'172.17.0.1'], u'ansible_python_version': u'2.7.6'} # noqa - -def copy_only_module(data, module): - data = deepcopy(data) - data['facts'] = {} - if module == 'ansible': - data['facts'] = deepcopy(TEST_MSG_MODULES[module]) - else: - data['facts'][module] = deepcopy(TEST_MSG_MODULES[module]) - return data - - -class RunFactCacheReceiverFunctionalTest(BaseCommandMixin, BaseTest, MongoDBRequired): - @unittest.skip('''\ -TODO: run_fact_cache_receiver enters a while True loop that never exists. \ -This differs from most other commands that we test for. More logic and work \ -would be required to invoke this case from the command line with little return \ -in terms of increase coverage and confidence.''') - def test_invoke(self): - result, stdout, stderr = self.run_command('run_fact_cache_receiver') - self.assertEqual(result, None) - -class RunFactCacheReceiverUnitTest(BaseTest, MongoDBRequired): - - # TODO: Check that timestamp and other attributes are as expected - def check_process_fact_message_module(self, data, module): - fact_found = None - facts = Fact.objects.all() - self.assertEqual(len(facts), 1) - for fact in facts: - if fact.module == module: - fact_found = fact - break - self.assertIsNotNone(fact_found) - #self.assertEqual(data['facts'][module], fact_found[module]) - - fact_found = None - fact_versions = FactVersion.objects.all() - self.assertEqual(len(fact_versions), 1) - for fact in fact_versions: - if fact.module == module: - fact_found = fact - break - self.assertIsNotNone(fact_found) - - - # Ensure that the message flows from the socket through to process_fact_message() - @mock.patch('awx.main.socket.Socket.listen') - def test_run_receiver(self, listen_mock): - listen_mock.return_value = [TEST_MSG] - - receiver = FactCacheReceiver() - receiver.process_fact_message = MagicMock(name='process_fact_message') - receiver.run_receiver(use_processing_threads=False) - - receiver.process_fact_message.assert_called_once_with(TEST_MSG) - - def test_process_fact_message_ansible(self): - data = copy_only_module(TEST_MSG, 'ansible') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'ansible') - - def test_process_fact_message_packages(self): - data = copy_only_module(TEST_MSG, 'packages') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'packages') - - def test_process_fact_message_services(self): - data = copy_only_module(TEST_MSG, 'services') - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - self.check_process_fact_message_module(data, 'services') - - - # Ensure that only a single host gets created for multiple invocations with the same hostname - def test_process_fact_message_single_host_created(self): - receiver = FactCacheReceiver() - - data = deepcopy(TEST_MSG) - receiver.process_fact_message(data) - data = deepcopy(TEST_MSG) - data['date_key'] = time.mktime(datetime.utcnow().timetuple()) - receiver.process_fact_message(data) - - fact_hosts = FactHost.objects.all() - self.assertEqual(len(fact_hosts), 1) - - def test_process_facts_message_ansible_overwrite(self): - data = copy_only_module(TEST_MSG, 'ansible') - key = 'ansible.overwrite' - value = 'hello world' - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - fact = Fact.objects.all()[0] - - data = copy_only_module(TEST_MSG, 'ansible') - data['facts'][key] = value - receiver.process_fact_message(data) - - fact = Fact.objects.get(id=fact.id) - self.assertIn(key, fact.fact) - self.assertEqual(fact.fact[key], value) - self.assertEqual(fact.fact, data['facts']) - - def test_large_overwrite(self): - data = deepcopy(TEST_MSG_BASE) - data['facts'] = { - 'ansible': {} - } - - receiver = FactCacheReceiver() - receiver.process_fact_message(data) - - fact = Fact.objects.all()[0] - - data['facts']['ansible'] = TEST_MSG_LARGE - receiver.process_fact_message(data) - - fact = Fact.objects.get(id=fact.id) - self.assertEqual(fact.fact, data['facts']['ansible']) diff --git a/awx/main/tests/old/fact/fact_api.py b/awx/main/tests/old/fact/fact_api.py deleted file mode 100644 index d13b17f060..0000000000 --- a/awx/main/tests/old/fact/fact_api.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -import unittest2 as unittest - -# Django -from django.core.urlresolvers import reverse - -# AWX -from awx.main.utils import timestamp_apiformat -from awx.main.models import * # noqa -from awx.main.tests.base import BaseLiveServerTest -from awx.fact.models import * # noqa -from awx.fact.tests.base import BaseFactTestMixin, FactScanBuilder, TEST_FACT_ANSIBLE, TEST_FACT_PACKAGES, TEST_FACT_SERVICES -from awx.main.utils import build_url - -__all__ = ['FactVersionApiTest', 'FactViewApiTest', 'SingleFactApiTest',] - -class FactApiBaseTest(BaseLiveServerTest, BaseFactTestMixin): - def setUp(self): - super(FactApiBaseTest, self).setUp() - self.create_test_license_file() - self.setup_instances() - self.setup_users() - self.organization = self.make_organization(self.super_django_user) - self.organization.admins.add(self.normal_django_user) - self.inventory = self.organization.inventories.create(name='test-inventory', description='description for test-inventory') - self.host = self.inventory.hosts.create(name='host.example.com') - self.host2 = self.inventory.hosts.create(name='host2.example.com') - self.host3 = self.inventory.hosts.create(name='host3.example.com') - - def setup_facts(self, scan_count): - self.builder = FactScanBuilder() - self.builder.set_inventory_id(self.inventory.pk) - self.builder.add_fact('ansible', TEST_FACT_ANSIBLE) - self.builder.add_fact('packages', TEST_FACT_PACKAGES) - self.builder.add_fact('services', TEST_FACT_SERVICES) - self.builder.add_hostname('host.example.com') - self.builder.add_hostname('host2.example.com') - self.builder.add_hostname('host3.example.com') - self.builder.build(scan_count=scan_count, host_count=3) - - self.fact_host = FactHost.objects.get(hostname=self.host.name) - -class FactVersionApiTest(FactApiBaseTest): - def check_equal(self, fact_versions, results): - def find(element, set1): - for e in set1: - if all([ e.get(field) == element.get(field) for field in element.keys()]): - return e - return None - - self.assertEqual(len(results), len(fact_versions)) - for v in fact_versions: - v_dict = { - 'timestamp': timestamp_apiformat(v.timestamp), - 'module': v.module - } - e = find(v_dict, results) - self.assertIsNotNone(e, "%s not found in %s" % (v_dict, results)) - - def get_list(self, fact_versions, params=None): - url = build_url('api:host_fact_versions_list', args=(self.host.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - - self.check_equal(fact_versions, response['results']) - return response - - def test_permission_list(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def test_list_empty(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertIn('results', response) - self.assertIsInstance(response['results'], list) - self.assertEqual(len(response['results']), 0) - - def test_list_related_fact_view(self): - self.setup_facts(2) - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - for entry in response['results']: - self.assertIn('fact_view', entry['related']) - self.get(entry['related']['fact_view'], expect=200) - - def test_list(self): - self.setup_facts(2) - self.get_list(FactVersion.objects.filter(host=self.fact_host)) - - def test_list_module(self): - self.setup_facts(10) - self.get_list(FactVersion.objects.filter(host=self.fact_host, module='packages'), dict(module='packages')) - - def test_list_time_from(self): - self.setup_facts(10) - - params = { - 'from': timestamp_apiformat(self.builder.get_timestamp(1)), - } - # 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from']) - self.get_list(fact_versions, params) - - def test_list_time_to(self): - self.setup_facts(10) - - params = { - 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - } - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__lte=params['to']) - self.get_list(fact_versions, params) - - def test_list_time_from_to(self): - self.setup_facts(10) - - params = { - 'from': timestamp_apiformat(self.builder.get_timestamp(1)), - 'to': timestamp_apiformat(self.builder.get_timestamp(3)) - } - fact_versions = FactVersion.objects.filter(host=self.fact_host, timestamp__gt=params['from'], timestamp__lte=params['to']) - self.get_list(fact_versions, params) - - -class FactViewApiTest(FactApiBaseTest): - def check_equal(self, fact_obj, results): - fact_dict = { - 'timestamp': timestamp_apiformat(fact_obj.timestamp), - 'module': fact_obj.module, - 'host': { - 'hostname': fact_obj.host.hostname, - 'inventory_id': fact_obj.host.inventory_id, - 'id': str(fact_obj.host.id) - }, - 'fact': fact_obj.fact - } - self.assertEqual(fact_dict, results) - - def test_permission_view(self): - url = reverse('api:host_fact_compare_view', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def get_fact(self, fact_obj, params=None): - url = build_url('api:host_fact_compare_view', args=(self.host.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - - self.check_equal(fact_obj, response) - - def test_view(self): - self.setup_facts(2) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible').order_by('-timestamp')[0]) - - def test_view_module_filter(self): - self.setup_facts(2) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='services').order_by('-timestamp')[0], dict(module='services')) - - def test_view_time_filter(self): - self.setup_facts(6) - ts = self.builder.get_timestamp(3) - self.get_fact(Fact.objects.filter(host=self.fact_host, module='ansible', timestamp__lte=ts).order_by('-timestamp')[0], - dict(datetime=ts)) - - -@unittest.skip("single fact query needs to be updated to use inventory_id attribute on host document") -class SingleFactApiTest(FactApiBaseTest): - def setUp(self): - super(SingleFactApiTest, self).setUp() - - self.group = self.inventory.groups.create(name='test-group') - self.group.hosts.add(self.host, self.host2, self.host3) - - def test_permission_list(self): - url = reverse('api:host_fact_versions_list', args=(self.host.pk,)) - with self.current_user('admin'): - self.get(url, expect=200) - with self.current_user('normal'): - self.get(url, expect=200) - with self.current_user('other'): - self.get(url, expect=403) - with self.current_user('nobody'): - self.get(url, expect=403) - with self.current_user(None): - self.get(url, expect=401) - - def _test_related(self, url): - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertTrue(len(response['results']) > 0) - for entry in response['results']: - self.assertIn('single_fact', entry['related']) - # Requires fields - self.get(entry['related']['single_fact'], expect=400) - - def test_related_host_list(self): - self.setup_facts(2) - self._test_related(reverse('api:host_list')) - - def test_related_group_list(self): - self.setup_facts(2) - self._test_related(reverse('api:group_list')) - - def test_related_inventory_list(self): - self.setup_facts(2) - self._test_related(reverse('api:inventory_list')) - - def test_params(self): - self.setup_facts(2) - params = { - 'module': 'packages', - 'fact_key': 'name', - 'fact_value': 'acpid', - } - url = build_url('api:inventory_single_fact_view', args=(self.inventory.pk,), get=params) - with self.current_user(self.super_django_user): - response = self.get(url, expect=200) - self.assertEqual(len(response['results']), 3) - for entry in response['results']: - self.assertEqual(entry['fact'][0]['name'], 'acpid') diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 36f39ac3ec..7a1fb02a9e 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -662,10 +662,7 @@ ACTIVITY_STREAM_ENABLED = True ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC = False # Internal API URL for use by inventory scripts and callback plugin. -if 'devserver' in INSTALLED_APPS: - INTERNAL_API_URL = 'http://127.0.0.1:%s' % DEVSERVER_DEFAULT_PORT -else: - INTERNAL_API_URL = 'http://127.0.0.1:8000' +INTERNAL_API_URL = 'http://127.0.0.1:%s' % DEVSERVER_DEFAULT_PORT # ZeroMQ callback settings. CALLBACK_CONSUMER_PORT = "tcp://127.0.0.1:5556" diff --git a/awx/settings/development.py b/awx/settings/development.py index a214ab4670..46df026e06 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -13,7 +13,6 @@ from split_settings.tools import optional, include # Load default settings. from defaults import * # NOQA - MONGO_HOST = '127.0.0.1' MONGO_PORT = 27017 MONGO_USERNAME = None @@ -66,10 +65,13 @@ PASSWORD_HASHERS = ( # Configure a default UUID for development only. SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' -STATSD_CLIENT = 'django_statsd.clients.normal' -STATSD_HOST = 'graphite' +STATSD_CLIENT = 'django_statsd.clients.null' +STATSD_HOST = None +STATSD_PREFIX = None +#STATSD_CLIENT = 'django_statsd.clients.normal' +#STATSD_HOST = 'graphite' STATSD_PORT = 8125 -STATSD_PREFIX = 'tower' +#STATSD_PREFIX = 'tower' STATSD_MAXUDPSIZE = 512 # If there is an `/etc/tower/settings.py`, include it. diff --git a/pytest.ini b/pytest.ini index 90f45f0b2a..748c2919fd 100644 --- a/pytest.ini +++ b/pytest.ini @@ -4,3 +4,5 @@ python_paths = awx/lib/site-packages site_dirs = awx/lib/site-packages python_files = *.py addopts = --reuse-db +markers = + ac: access control test diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 8f889704fb..1894fe4045 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -49,7 +49,7 @@ importlib==1.0.3 ipaddress==1.0.14 iso8601==0.1.10 isodate==0.5.1 -git+https://github.com/chrismeyersfsu/django-jsonbfield@master#egg=django-jsonbfield +git+https://github.com/chrismeyersfsu/django-jsonbfield@fix-sqlite_serialization#egg=django-jsonbfield jsonpatch==1.11 jsonpointer==1.9 jsonschema==2.5.1 diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index 0e1fcaa1ba..9131465b10 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -7,3 +7,4 @@ pytest pytest-cov pytest-django pytest-pythonpath +pytest-mock diff --git a/requirements/requirements_jenkins.txt b/requirements/requirements_jenkins.txt index 7ea9c8642f..b1cdafc250 100644 --- a/requirements/requirements_jenkins.txt +++ b/requirements/requirements_jenkins.txt @@ -13,3 +13,4 @@ pytest pytest-cov pytest-django pytest-pythonpath +pytest-mock diff --git a/tools/docker-compose/start_development.sh b/tools/docker-compose/start_development.sh index 1ade0e0cf8..391df450dc 100755 --- a/tools/docker-compose/start_development.sh +++ b/tools/docker-compose/start_development.sh @@ -1,4 +1,5 @@ #!/bin/bash +set +x # Wait for the databases to come up ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all From ad204f45e1845ffb095995ef797fc96633608ad6 Mon Sep 17 00:00:00 2001 From: Leigh Johnson Date: Thu, 25 Feb 2016 13:30:20 -0500 Subject: [PATCH 27/38] quick config-line fix to #1039 --- awx/ui/client/src/app.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/awx/ui/client/src/app.js b/awx/ui/client/src/app.js index 43ad6627fb..005d489e32 100644 --- a/awx/ui/client/src/app.js +++ b/awx/ui/client/src/app.js @@ -199,9 +199,9 @@ var tower = angular.module('Tower', [ .config(['$pendolyticsProvider', function($pendolyticsProvider) { $pendolyticsProvider.doNotAutoStart(); }]) - .config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider', - function ($stateProvider, $urlRouterProvider, $breadcrumbProvider) { - + .config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider', '$urlMatcherFactoryProvider', + function ($stateProvider, $urlRouterProvider, $breadcrumbProvider, $urlMatcherFactoryProvider) { + $urlMatcherFactoryProvider.strictMode(false) $breadcrumbProvider.setOptions({ templateUrl: urlPrefix + 'partials/breadcrumb.html' }); From 60c3eeaef3211b11617c6fd9422891fe43135f86 Mon Sep 17 00:00:00 2001 From: Chris Church Date: Thu, 25 Feb 2016 15:01:29 -0500 Subject: [PATCH 28/38] Include @anoek's changes from #1029. --- awx/plugins/callback/job_event_callback.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/awx/plugins/callback/job_event_callback.py b/awx/plugins/callback/job_event_callback.py index f12f5e8489..ddffcaf974 100644 --- a/awx/plugins/callback/job_event_callback.py +++ b/awx/plugins/callback/job_event_callback.py @@ -2,10 +2,10 @@ # This file is a utility Ansible plugin that is not part of the AWX or Ansible # packages. It does not import any code from either package, nor does its # license apply to Ansible or AWX. -# +# # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: -# +# # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # @@ -90,10 +90,12 @@ CENSOR_FIELD_WHITELIST=[ 'skip_reason', ] -def censor(obj): +def censor(obj, no_log=False): if not isinstance(obj, dict): + if no_log: + return "the output has been hidden due to the fact that 'no_log: true' was specified for this result" return obj - if obj.get('_ansible_no_log', False): + if obj.get('_ansible_no_log', no_log): new_obj = {} for k in CENSOR_FIELD_WHITELIST: if k in obj: @@ -106,8 +108,12 @@ def censor(obj): new_obj['censored'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result" obj = new_obj if 'results' in obj: - for i in xrange(len(obj['results'])): - obj['results'][i] = censor(obj['results'][i]) + if isinstance(obj['results'], list): + for i in xrange(len(obj['results'])): + obj['results'][i] = censor(obj['results'][i], obj.get('_ansible_no_log', no_log)) + elif obj.get('_ansible_no_log', False): + obj['results'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result" + return obj class TokenAuth(requests.auth.AuthBase): @@ -462,7 +468,7 @@ class JobCallbackModule(BaseCallbackModule): # this from a normal task self._log_event('playbook_on_task_start', task=task, name=task.get_name()) - + def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None): From a9bf29d2a7cbf827ed77d52e9a14792e280d95c1 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Fri, 26 Feb 2016 11:30:34 -0500 Subject: [PATCH 29/38] Add the id field to the default summary fields This is to support the UI. ref: #1063 --- awx/api/serializers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 223803bcbf..948e61c00d 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -50,7 +50,7 @@ from awx.fact.models import * # noqa logger = logging.getLogger('awx.api.serializers') # Fields that should be summarized regardless of object type. -DEFAULT_SUMMARY_FIELDS = ('name', 'description')# , 'created_by', 'modified_by')#, 'type') +DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type') # Keys are fields (foreign keys) where, if found on an instance, summary info # should be added to the serialized data. Values are a tuple of field names on From 6870a5d60c1a8a13abed98e7df8e6e19e0208f1d Mon Sep 17 00:00:00 2001 From: Michael Abashian Date: Fri, 26 Feb 2016 11:58:53 -0500 Subject: [PATCH 30/38] Swap out regex on related for IDs in summary_fields --- awx/ui/client/src/controllers/Projects.js | 6 +++--- awx/ui/client/src/helpers/Groups.js | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/awx/ui/client/src/controllers/Projects.js b/awx/ui/client/src/controllers/Projects.js index 5650500088..911ebcf356 100644 --- a/awx/ui/client/src/controllers/Projects.js +++ b/awx/ui/client/src/controllers/Projects.js @@ -204,12 +204,12 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams, $scope.removeGoToJobDetails(); } $scope.removeGoToJobDetails = $scope.$on('GoToJobDetails', function(e, data) { - if (data.related.current_update || data.related.last_update) { + if (data.summary_fields.current_update || data.summary_fields.last_update) { Wait('start'); - // Pull the id out of the URL - var id = (data.related.current_update) ? data.related.current_update.replace(/^\//, '').split('/')[3] : data.related.last_update.replace(/^\//, '').split('/')[3]; + // Grab the id from summary_fields + var id = (data.summary_fields.current_update) ? data.summary_fields.current_update.id : data.summary_fields.last_update.id; $state.go('scmUpdateStdout', {id: id}); diff --git a/awx/ui/client/src/helpers/Groups.js b/awx/ui/client/src/helpers/Groups.js index b366a02a50..1ff3d391f4 100644 --- a/awx/ui/client/src/helpers/Groups.js +++ b/awx/ui/client/src/helpers/Groups.js @@ -79,7 +79,7 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name scope.removeSourceReady = scope.$on('SourceReady', function(e, source) { // Pull the ID out of related.current_update / related.last_update - var update_id = (source.current_update) ? source.related.current_update.replace(/^\//, '').split('/')[3] : source.related.last_update.replace(/^\//, '').split('/')[3]; + var update_id = (source.current_update) ? source.summary_fields.current_update.id : source.summary_fields.last_update.id; $state.go('inventorySyncStdout', {id: update_id}); From 38463673d5ec5f9c6bf5f6208ee0b0b6efb367ff Mon Sep 17 00:00:00 2001 From: Michael Abashian Date: Fri, 26 Feb 2016 12:25:24 -0500 Subject: [PATCH 31/38] Updated out-dated comment to reflect code --- awx/ui/client/src/helpers/Groups.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/client/src/helpers/Groups.js b/awx/ui/client/src/helpers/Groups.js index 1ff3d391f4..eeebb9d8bf 100644 --- a/awx/ui/client/src/helpers/Groups.js +++ b/awx/ui/client/src/helpers/Groups.js @@ -78,7 +78,7 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name } scope.removeSourceReady = scope.$on('SourceReady', function(e, source) { - // Pull the ID out of related.current_update / related.last_update + // Get the ID from the correct summary field var update_id = (source.current_update) ? source.summary_fields.current_update.id : source.summary_fields.last_update.id; $state.go('inventorySyncStdout', {id: update_id}); From 0ee12901fe4e5927f2c9d8e5f52a30223f9b5cb5 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 29 Feb 2016 12:30:00 -0500 Subject: [PATCH 32/38] Fix some notifications issues and write some tests * Fixes some notifier merging issues * Fixes some more unicode problems * Implements unit tests --- awx/main/models/inventory.py | 7 +- awx/main/models/jobs.py | 6 +- awx/main/models/projects.py | 15 +- awx/main/models/unified_jobs.py | 3 +- awx/main/notifications/twilio_backend.py | 3 +- awx/main/tasks.py | 4 +- awx/main/tests/functional/conftest.py | 160 ++++++++++++++++++ .../tests/functional/test_notifications.py | 124 ++++++++++++++ 8 files changed, 302 insertions(+), 20 deletions(-) create mode 100644 awx/main/tests/functional/conftest.py create mode 100644 awx/main/tests/functional/test_notifications.py diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index edf03a883d..c95c8488bd 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -1185,11 +1185,10 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions): @property def notifiers(self): - # Return all notifiers defined on the Project, and on the Organization for each trigger type base_notifiers = Notifier.objects.filter(active=True) - error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors__in=[self])) - success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success__in=[self])) - any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any__in=[self])) + error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors=self.inventory.organization)) + success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success=self.inventory.organization)) + any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any=self.inventory.organization)) return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) def clean_source(self): diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 01857b8b06..bd167d3474 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -341,7 +341,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions): error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project])) success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project])) any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project])) - return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) + # Get Organization Notifiers + error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.project.organizations.all()))) + success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.project.organizations.all()))) + any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.project.organizations.all()))) + return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers)) class Job(UnifiedJob, JobOptions): ''' diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 415c674bb1..db295023da 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -313,20 +313,15 @@ class Project(UnifiedJobTemplate, ProjectOptions): @property def notifiers(self): - # Return all notifiers defined on the Project, and on the Organization for each trigger type - # TODO: Currently there is no org fk on project so this will need to be added back once that is - # available after the rbac pr base_notifiers = Notifier.objects.filter(active=True) - # error_notifiers = list(base_notifiers.filter(Q(project_notifications_for_errors__in=self) | - # Q(organization_notifications_for_errors__in=self.organization))) - # success_notifiers = list(base_notifiers.filter(Q(project_notifications_for_success__in=self) | - # Q(organization_notifications_for_success__in=self.organization))) - # any_notifiers = list(base_notifiers.filter(Q(project_notifications_for_any__in=self) | - # Q(organization_notifications_for_any__in=self.organization))) error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self)) success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self)) any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self)) - return dict(error=error_notifiers, success=success_notifiers, any=any_notifiers) + # Get Organization Notifiers + error_notifiers = set(error_notifiers + list(base_notifiers.filter(organization_notifiers_for_errors__in=self.organizations.all()))) + success_notifiers = set(success_notifiers + list(base_notifiers.filter(organization_notifiers_for_success__in=self.organizations.all()))) + any_notifiers = set(any_notifiers + list(base_notifiers.filter(organization_notifiers_for_any__in=self.organizations.all()))) + return dict(error=list(error_notifiers), success=list(success_notifiers), any=list(any_notifiers)) def get_absolute_url(self): return reverse('api:project_detail', args=(self.pk,)) diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 7bb4cdd798..d83cfae978 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -17,6 +17,7 @@ from django.db import models from django.core.exceptions import NON_FIELD_ERRORS from django.utils.translation import ugettext_lazy as _ from django.utils.timezone import now +from django.utils.encoding import smart_text # Django-JSONField from jsonfield import JSONField @@ -741,7 +742,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique return dict(id=self.id, name=self.name, url=self.get_ui_url(), - created_by=str(self.created_by), + created_by=smart_text(self.created_by), started=self.started.isoformat(), finished=self.finished.isoformat(), status=self.status, diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index 1aea6f368e..df411c68c5 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -20,11 +20,10 @@ class TwilioBackend(TowerBaseEmailBackend): recipient_parameter = "to_numbers" sender_parameter = "from_number" - def __init__(self, account_sid, account_token, from_phone, fail_silently=False, **kwargs): + def __init__(self, account_sid, account_token, fail_silently=False, **kwargs): super(TwilioBackend, self).__init__(fail_silently=fail_silently) self.account_sid = account_sid self.account_token = account_token - self.from_phone = from_phone def send_messages(self, messages): sent_messages = 0 diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 4b285546bb..509c5d1e7e 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -235,7 +235,7 @@ def handle_work_success(self, result, task_actual): instance_name, notification_body['url']) send_notifications.delay([n.generate_notification(notification_subject, notification_body) - for n in notifiers.get('success', []) + notifiers.get('any', [])], + for n in set(notifiers.get('success', []) + notifiers.get('any', []))], job_id=task_actual['id']) @task(bind=True) @@ -292,7 +292,7 @@ def handle_work_error(self, task_id, subtasks=None): notification_body['url']) notification_body['friendly_name'] = first_task_friendly_name send_notifications.delay([n.generate_notification(notification_subject, notification_body).id - for n in notifiers.get('error', []) + notifiers.get('any', [])], + for n in set(notifiers.get('error', []) + notifiers.get('any', []))], job_id=first_task_id) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py new file mode 100644 index 0000000000..27ff76f816 --- /dev/null +++ b/awx/main/tests/functional/conftest.py @@ -0,0 +1,160 @@ +import pytest +import mock + +from django.core.urlresolvers import resolve +from django.utils.six.moves.urllib.parse import urlparse + +from awx.main.models.organization import Organization +from awx.main.models.projects import Project +from awx.main.models.ha import Instance +from django.contrib.auth.models import User +from rest_framework.test import ( + APIRequestFactory, + force_authenticate, +) + +@pytest.fixture +def user(): + def u(name, is_superuser=False): + try: + user = User.objects.get(username=name) + except User.DoesNotExist: + user = User(username=name, is_superuser=is_superuser, password=name) + user.save() + return user + return u + +@pytest.fixture +def post(): + def rf(url, data, user=None, middleware=None, **kwargs): + view, view_args, view_kwargs = resolve(urlparse(url)[2]) + if 'format' not in kwargs: + kwargs['format'] = 'json' + request = APIRequestFactory().post(url, data, **kwargs) + if middleware: + middleware.process_request(request) + if user: + force_authenticate(request, user=user) + response = view(request, *view_args, **view_kwargs) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def get(): + def rf(url, user=None, middleware=None, **kwargs): + view, view_args, view_kwargs = resolve(urlparse(url)[2]) + if 'format' not in kwargs: + kwargs['format'] = 'json' + request = APIRequestFactory().get(url, **kwargs) + if middleware: + middleware.process_request(request) + if user: + force_authenticate(request, user=user) + response = view(request, *view_args, **view_kwargs) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def put(): + def rf(url, data, user=None, middleware=None, **kwargs): + view, view_args, view_kwargs = resolve(urlparse(url)[2]) + if 'format' not in kwargs: + kwargs['format'] = 'json' + request = APIRequestFactory().put(url, data, **kwargs) + if middleware: + middleware.process_request(request) + if user: + force_authenticate(request, user=user) + response = view(request, *view_args, **view_kwargs) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def patch(): + def rf(url, data, user=None, middleware=None, **kwargs): + view, view_args, view_kwargs = resolve(urlparse(url)[2]) + if 'format' not in kwargs: + kwargs['format'] = 'json' + request = APIRequestFactory().patch(url, data, **kwargs) + if middleware: + middleware.process_request(request) + if user: + force_authenticate(request, user=user) + response = view(request, *view_args, **view_kwargs) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def delete(): + def rf(url, user=None, middleware=None, **kwargs): + view, view_args, view_kwargs = resolve(urlparse(url)[2]) + if 'format' not in kwargs: + kwargs['format'] = 'json' + request = APIRequestFactory().delete(url, **kwargs) + if middleware: + middleware.process_request(request) + if user: + force_authenticate(request, user=user) + response = view(request, *view_args, **view_kwargs) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def head(): + def rf(url, user=None, middleware=None, **kwargs): + view, view_args, view_kwargs = resolve(urlparse(url)[2]) + if 'format' not in kwargs: + kwargs['format'] = 'json' + request = APIRequestFactory().head(url, **kwargs) + if middleware: + middleware.process_request(request) + if user: + force_authenticate(request, user=user) + response = view(request, *view_args, **view_kwargs) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def options(): + def rf(url, data, user=None, middleware=None, **kwargs): + view, view_args, view_kwargs = resolve(urlparse(url)[2]) + if 'format' not in kwargs: + kwargs['format'] = 'json' + request = APIRequestFactory().options(url, data, **kwargs) + if middleware: + middleware.process_request(request) + if user: + force_authenticate(request, user=user) + response = view(request, *view_args, **view_kwargs) + if middleware: + middleware.process_response(request, response) + return response + return rf + +@pytest.fixture +def instance(settings): + return Instance.objects.create(uuid=settings.SYSTEM_UUID, primary=True, hostname="instance.example.org") + +@pytest.fixture +def organization(instance): + return Organization.objects.create(name="test-org", description="test-org-desc") + +@pytest.fixture +@mock.patch.object(Project, "update", lambda self, **kwargs: None) +def project(instance): + return Project.objects.create(name="test-proj", + description="test-proj-desc", + scm_type="git", + scm_url="https://github.com/jlaska/ansible-playbooks") diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py new file mode 100644 index 0000000000..72ac91b7b1 --- /dev/null +++ b/awx/main/tests/functional/test_notifications.py @@ -0,0 +1,124 @@ +import mock +import pytest + +from awx.main.models.notifications import Notification, Notifier +from awx.main.models.inventory import Inventory, Group +from awx.main.models.organization import Organization +from awx.main.models.projects import Project +from awx.main.models.jobs import JobTemplate + +from django.core.urlresolvers import reverse +from django.core.mail.message import EmailMessage + +@pytest.fixture +def notifier(): + return Notifier.objects.create(name="test-notification", + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})) + +@pytest.mark.django_db +def test_get_notifier_list(get, user, notifier): + url = reverse('api:notifier_list') + response = get(url, user('admin', True)) + assert response.status_code == 200 + assert len(response.data['results']) == 1 + +@pytest.mark.django_db +def test_basic_parameterization(get, post, user, organization): + u = user('admin-poster', True) + url = reverse('api:notifier_list') + response = post(url, + dict(name="test-webhook", + description="test webhook", + organization=1, + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})), + u) + assert response.status_code == 201 + url = reverse('api:notifier_detail', args=(response.data['id'],)) + response = get(url, u) + assert 'related' in response.data + assert 'organization' in response.data['related'] + assert 'summary_fields' in response.data + assert 'organization' in response.data['summary_fields'] + assert 'notifications' in response.data['related'] + assert 'notification_configuration' in response.data + assert 'url' in response.data['notification_configuration'] + assert 'headers' in response.data['notification_configuration'] + +@pytest.mark.django_db +def test_encrypted_subfields(get, post, user, organization): + def assert_send(self, messages): + assert self.account_token == "shouldhide" + return 1 + u = user('admin-poster', True) + url = reverse('api:notifier_list') + response = post(url, + dict(name="test-twilio", + description="test twilio", + organization=1, + notification_type="twilio", + notification_configuration=dict(account_sid="dummy", + account_token="shouldhide", + from_number="+19999999999", + to_numbers=["9998887777"])), + u) + assert response.status_code == 201 + notifier_actual = Notifier.objects.get(id=response.data['id']) + assert notifier_actual.notification_configuration['account_token'].startswith("$encrypted$") + url = reverse('api:notifier_detail', args=(response.data['id'],)) + response = get(url, u) + assert response.data['notification_configuration']['account_token'] == "$encrypted$" + with mock.patch.object(notifier_actual.notification_class, "send_messages", assert_send): + notifier_actual.send("Test", {'body': "Test"}) + +@pytest.mark.django_db +def test_inherited_notifiers(get, post, user, organization, project): + u = user('admin-poster', True) + url = reverse('api:notifier_list') + notifiers = [] + for nfiers in xrange(3): + response = post(url, + dict(name="test-webhook-{}".format(nfiers), + description="test webhook {}".format(nfiers), + organization=1, + notification_type="webhook", + notification_configuration=dict(url="http://localhost", + headers={"Test": "Header"})), + u) + assert response.status_code == 201 + notifiers.append(response.data['id']) + o = Organization.objects.get(id=1) + p = Project.objects.get(id=1) + o.projects.add(p) + i = Inventory.objects.create(name='test', organization=o) + i.save() + g = Group.objects.create(name='test', inventory=i) + g.save() + jt = JobTemplate.objects.create(name='test', inventory=i, project=p, playbook='debug.yml') + jt.save() + url = reverse('api:organization_notifiers_any_list', args=(1,)) + response = post(url, dict(id=notifiers[0]), u) + assert response.status_code == 204 + url = reverse('api:project_notifiers_any_list', args=(1,)) + response = post(url, dict(id=notifiers[1]), u) + assert response.status_code == 204 + url = reverse('api:job_template_notifiers_any_list', args=(jt.id,)) + response = post(url, dict(id=notifiers[2]), u) + assert response.status_code == 204 + assert len(jt.notifiers['any']) == 3 + assert len(p.notifiers['any']) == 2 + assert len(g.inventory_source.notifiers['any']) == 1 + +@pytest.mark.django_db +def test_notifier_merging(get, post, user, organization, project, notifier): + u = user('admin-poster', True) + o = Organization.objects.get(id=1) + p = Project.objects.get(id=1) + n = Notifier.objects.get(id=1) + o.projects.add(p) + o.notifiers_any.add(n) + p.notifiers_any.add(n) + assert len(p.notifiers['any']) == 1 From b892ee5f96dceb648aa64b075341752a1a551b9b Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 29 Feb 2016 13:25:49 -0500 Subject: [PATCH 33/38] Updates to notification unit tests after @wwitzel3's feedback --- .../tests/functional/test_notifications.py | 28 ++++++++----------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py index 72ac91b7b1..89beb5bcc6 100644 --- a/awx/main/tests/functional/test_notifications.py +++ b/awx/main/tests/functional/test_notifications.py @@ -58,7 +58,7 @@ def test_encrypted_subfields(get, post, user, organization): response = post(url, dict(name="test-twilio", description="test twilio", - organization=1, + organization=organization.id, notification_type="twilio", notification_configuration=dict(account_sid="dummy", account_token="shouldhide", @@ -67,7 +67,6 @@ def test_encrypted_subfields(get, post, user, organization): u) assert response.status_code == 201 notifier_actual = Notifier.objects.get(id=response.data['id']) - assert notifier_actual.notification_configuration['account_token'].startswith("$encrypted$") url = reverse('api:notifier_detail', args=(response.data['id'],)) response = get(url, u) assert response.data['notification_configuration']['account_token'] == "$encrypted$" @@ -90,35 +89,30 @@ def test_inherited_notifiers(get, post, user, organization, project): u) assert response.status_code == 201 notifiers.append(response.data['id']) - o = Organization.objects.get(id=1) - p = Project.objects.get(id=1) - o.projects.add(p) - i = Inventory.objects.create(name='test', organization=o) + organization.projects.add(project) + i = Inventory.objects.create(name='test', organization=organization) i.save() g = Group.objects.create(name='test', inventory=i) g.save() - jt = JobTemplate.objects.create(name='test', inventory=i, project=p, playbook='debug.yml') + jt = JobTemplate.objects.create(name='test', inventory=i, project=project, playbook='debug.yml') jt.save() - url = reverse('api:organization_notifiers_any_list', args=(1,)) + url = reverse('api:organization_notifiers_any_list', args=(organization.id,)) response = post(url, dict(id=notifiers[0]), u) assert response.status_code == 204 - url = reverse('api:project_notifiers_any_list', args=(1,)) + url = reverse('api:project_notifiers_any_list', args=(project.id,)) response = post(url, dict(id=notifiers[1]), u) assert response.status_code == 204 url = reverse('api:job_template_notifiers_any_list', args=(jt.id,)) response = post(url, dict(id=notifiers[2]), u) assert response.status_code == 204 assert len(jt.notifiers['any']) == 3 - assert len(p.notifiers['any']) == 2 + assert len(project.notifiers['any']) == 2 assert len(g.inventory_source.notifiers['any']) == 1 @pytest.mark.django_db def test_notifier_merging(get, post, user, organization, project, notifier): u = user('admin-poster', True) - o = Organization.objects.get(id=1) - p = Project.objects.get(id=1) - n = Notifier.objects.get(id=1) - o.projects.add(p) - o.notifiers_any.add(n) - p.notifiers_any.add(n) - assert len(p.notifiers['any']) == 1 + organization.projects.add(project) + organization.notifiers_any.add(notifier) + project.notifiers_any.add(notifier) + assert len(project.notifiers['any']) == 1 From 533c6290405c94969978784d2d6373c090931e8b Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 29 Feb 2016 23:50:16 -0500 Subject: [PATCH 34/38] Add irc requirements dependency --- requirements/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 4f9e49ac8c..dea5dc8def 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -48,6 +48,7 @@ idna==2.0 importlib==1.0.3 ip-associations-python-novaclient-ext==0.1 ipaddress==1.0.16 +irc==13.3.1 iso8601==0.1.11 isodate==0.5.1 jsonpatch==1.12 From f6fa11e383a8a6e84f2fb708221338f4df6230e0 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 1 Mar 2016 09:16:48 -0500 Subject: [PATCH 35/38] fix Fact migration --- .../{0003_auto_20160209_1615.py => 0004_v300_changes.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename awx/main/migrations/{0003_auto_20160209_1615.py => 0004_v300_changes.py} (95%) diff --git a/awx/main/migrations/0003_auto_20160209_1615.py b/awx/main/migrations/0004_v300_changes.py similarity index 95% rename from awx/main/migrations/0003_auto_20160209_1615.py rename to awx/main/migrations/0004_v300_changes.py index 712234f638..e4439bf3d6 100644 --- a/awx/main/migrations/0003_auto_20160209_1615.py +++ b/awx/main/migrations/0004_v300_changes.py @@ -8,7 +8,7 @@ import jsonbfield.fields class Migration(migrations.Migration): dependencies = [ - ('main', '0002_v300_changes'), + ('main', '0003_v300_changes'), ] operations = [ From 5340af2335fec62b7bcedd7aa74e1e687b7b7f92 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 1 Mar 2016 09:48:58 -0500 Subject: [PATCH 36/38] updated api tests to use new get fixture format --- .../functional/api/test_fact_versions.py | 20 +++++++++++------- .../tests/functional/api/test_fact_view.py | 21 ++++++++++++------- .../tests/functional/api/test_host_detail.py | 5 +---- 3 files changed, 26 insertions(+), 20 deletions(-) diff --git a/awx/main/tests/functional/api/test_fact_versions.py b/awx/main/tests/functional/api/test_fact_versions.py index b9ff345172..84a3c0c04e 100644 --- a/awx/main/tests/functional/api/test_fact_versions.py +++ b/awx/main/tests/functional/api/test_fact_versions.py @@ -7,9 +7,6 @@ import urllib # AWX from awx.main.models.fact import Fact -from awx.api.views import ( - HostFactVersionsList, -) from awx.main.utils import timestamp_apiformat # Django @@ -19,12 +16,19 @@ from django.utils import timezone def mock_feature_enabled(feature, bypass_database=None): return True +def build_url(*args, **kwargs): + get = kwargs.pop('get', {}) + url = reverse(*args, **kwargs) + if get: + url += '?' + urllib.urlencode(get) + return url + def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1): hosts = hosts(host_count=host_count) fact_scans(fact_scans=3, timestamp_epoch=epoch) - url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) - response = get(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id, params=get_params) + url = build_url('api:host_fact_versions_list', args=(hosts[0].pk,), get=get_params) + response = get(url, user('admin', True)) return (hosts[0], response) @@ -50,7 +54,7 @@ def check_response_facts(facts_known, response): def test_no_facts_db(hosts, get, user): hosts = hosts(host_count=1) url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) - response = get(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id) + response = get(url, user('admin', True)) response_expected = { 'results': [] @@ -81,7 +85,7 @@ def test_basic_options_fields(hosts, fact_scans, options, user): fact_scans(fact_scans=1) url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) - response = options(HostFactVersionsList, user('admin', True), url, pk=hosts[0].id) + response = options(url, user('admin', True), pk=hosts[0].id) #import json #print(json.dumps(response.data)) @@ -192,7 +196,7 @@ def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): team_obj.users.add(user_obj) url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) - response = get(HostFactVersionsList, user_obj, url, pk=hosts[0].id) + response = get(url, user_obj) return response @mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) diff --git a/awx/main/tests/functional/api/test_fact_view.py b/awx/main/tests/functional/api/test_fact_view.py index afa6361dcd..cb73ac7948 100644 --- a/awx/main/tests/functional/api/test_fact_view.py +++ b/awx/main/tests/functional/api/test_fact_view.py @@ -1,10 +1,8 @@ import mock import pytest import json +import urllib -from awx.api.views import ( - HostFactCompareView, -) from awx.main.utils import timestamp_apiformat from django.core.urlresolvers import reverse from django.utils import timezone @@ -12,6 +10,13 @@ from django.utils import timezone def mock_feature_enabled(feature, bypass_database=None): return True +def build_url(*args, **kwargs): + get = kwargs.pop('get', {}) + url = reverse(*args, **kwargs) + if get: + url += '?' + urllib.urlencode(get) + return url + # TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it def find_fact(facts, host_id, module_name, timestamp): for f in facts: @@ -23,8 +28,8 @@ def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name hosts = hosts(host_count=1) facts = fact_scans(fact_scans=1, timestamp_epoch=epoch) - url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) - response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id, params=get_params) + url = build_url('api:host_fact_compare_view', args=(hosts[0].pk,), get=get_params) + response = get(url, user('admin', True), params=get_params) fact_known = find_fact(facts, hosts[0].id, module_name, epoch) return (fact_known, response) @@ -34,7 +39,7 @@ def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name def test_no_fact_found(hosts, get, user): hosts = hosts(host_count=1) url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) - response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id) + response = get(url, user('admin', True)) expected_response = { "detail": "Fact not found" @@ -49,7 +54,7 @@ def test_basic_fields(hosts, fact_scans, get, user): fact_scans(fact_scans=1) url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) - response = get(HostFactCompareView, user('admin', True), url, pk=hosts[0].id) + response = get(url, user('admin', True)) assert 'related' in response.data assert 'id' in response.data @@ -111,7 +116,7 @@ def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj): team_obj.users.add(user_obj) url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) - response = get(HostFactCompareView, user_obj, url, pk=hosts[0].id) + response = get(url, user_obj) return response @mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled) diff --git a/awx/main/tests/functional/api/test_host_detail.py b/awx/main/tests/functional/api/test_host_detail.py index 18e183f396..79213490b0 100644 --- a/awx/main/tests/functional/api/test_host_detail.py +++ b/awx/main/tests/functional/api/test_host_detail.py @@ -2,9 +2,6 @@ # Other host tests should live here to make this test suite more complete. import pytest -from awx.api.views import ( - HostDetail, -) from django.core.urlresolvers import reverse @pytest.mark.django_db @@ -12,7 +9,7 @@ def test_basic_fields(hosts, fact_scans, get, user): hosts = hosts(host_count=1) url = reverse('api:host_detail', args=(hosts[0].pk,)) - response = get(HostDetail, user('admin', True), url, pk=hosts[0].id) + response = get(url, user('admin', True)) assert 'related' in response.data assert 'fact_versions' in response.data['related'] From b5d8aa52c924addfde9715162c21d83604144e50 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 1 Mar 2016 10:08:08 -0500 Subject: [PATCH 37/38] get accepts data param, use it --- awx/main/tests/functional/api/test_fact_versions.py | 11 ++--------- awx/main/tests/functional/api/test_fact_view.py | 11 ++--------- 2 files changed, 4 insertions(+), 18 deletions(-) diff --git a/awx/main/tests/functional/api/test_fact_versions.py b/awx/main/tests/functional/api/test_fact_versions.py index 84a3c0c04e..b203c3deff 100644 --- a/awx/main/tests/functional/api/test_fact_versions.py +++ b/awx/main/tests/functional/api/test_fact_versions.py @@ -16,19 +16,12 @@ from django.utils import timezone def mock_feature_enabled(feature, bypass_database=None): return True -def build_url(*args, **kwargs): - get = kwargs.pop('get', {}) - url = reverse(*args, **kwargs) - if get: - url += '?' + urllib.urlencode(get) - return url - def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1): hosts = hosts(host_count=host_count) fact_scans(fact_scans=3, timestamp_epoch=epoch) - url = build_url('api:host_fact_versions_list', args=(hosts[0].pk,), get=get_params) - response = get(url, user('admin', True)) + url = reverse('api:host_fact_versions_list', args=(hosts[0].pk,)) + response = get(url, user('admin', True), data=get_params) return (hosts[0], response) diff --git a/awx/main/tests/functional/api/test_fact_view.py b/awx/main/tests/functional/api/test_fact_view.py index cb73ac7948..e6cd724d91 100644 --- a/awx/main/tests/functional/api/test_fact_view.py +++ b/awx/main/tests/functional/api/test_fact_view.py @@ -10,13 +10,6 @@ from django.utils import timezone def mock_feature_enabled(feature, bypass_database=None): return True -def build_url(*args, **kwargs): - get = kwargs.pop('get', {}) - url = reverse(*args, **kwargs) - if get: - url += '?' + urllib.urlencode(get) - return url - # TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it def find_fact(facts, host_id, module_name, timestamp): for f in facts: @@ -28,8 +21,8 @@ def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name hosts = hosts(host_count=1) facts = fact_scans(fact_scans=1, timestamp_epoch=epoch) - url = build_url('api:host_fact_compare_view', args=(hosts[0].pk,), get=get_params) - response = get(url, user('admin', True), params=get_params) + url = reverse('api:host_fact_compare_view', args=(hosts[0].pk,)) + response = get(url, user('admin', True), data=get_params) fact_known = find_fact(facts, hosts[0].id, module_name, epoch) return (fact_known, response) From 67d1aa6c8688bc5854fe15a3749615c4d41eac36 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 1 Mar 2016 10:39:44 -0500 Subject: [PATCH 38/38] migration with correct help_text --- awx/main/migrations/0004_v300_changes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/awx/main/migrations/0004_v300_changes.py b/awx/main/migrations/0004_v300_changes.py index e4439bf3d6..66e523dc78 100644 --- a/awx/main/migrations/0004_v300_changes.py +++ b/awx/main/migrations/0004_v300_changes.py @@ -16,10 +16,10 @@ class Migration(migrations.Migration): name='Fact', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('timestamp', models.DateTimeField(default=None, editable=False)), + ('timestamp', models.DateTimeField(default=None, help_text='Date and time of the corresponding fact scan gathering time.', editable=False)), ('module', models.CharField(max_length=128)), - ('facts', jsonbfield.fields.JSONField(default={}, blank=True)), - ('host', models.ForeignKey(related_name='facts', to='main.Host')), + ('facts', jsonbfield.fields.JSONField(default={}, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True)), + ('host', models.ForeignKey(related_name='facts', to='main.Host', help_text='Host for the facts that the fact scan captured.')), ], ), migrations.AlterIndexTogether(