Merge branch 'downstream' into devel

This commit is contained in:
Ryan Petrello
2019-10-29 11:25:26 -04:00
93 changed files with 1589 additions and 596 deletions

View File

@@ -4338,13 +4338,30 @@ class NotificationTemplateSerializer(BaseSerializer):
error_list = []
collected_messages = []
def check_messages(messages):
for message_type in messages:
if message_type not in ('message', 'body'):
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
continue
message = messages[message_type]
if message is None:
continue
if not isinstance(message, str):
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
continue
if message_type == 'message':
if '\n' in message:
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
continue
collected_messages.append(message)
# Validate structure / content types
if not isinstance(messages, dict):
error_list.append(_("Expected dict for 'messages' field, found {}".format(type(messages))))
else:
for event in messages:
if event not in ['started', 'success', 'error']:
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', or 'error'").format(event))
if event not in ('started', 'success', 'error', 'workflow_approval'):
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', 'error', or 'workflow_approval'").format(event))
continue
event_messages = messages[event]
if event_messages is None:
@@ -4352,21 +4369,21 @@ class NotificationTemplateSerializer(BaseSerializer):
if not isinstance(event_messages, dict):
error_list.append(_("Expected dict for event '{}', found {}").format(event, type(event_messages)))
continue
for message_type in event_messages:
if message_type not in ['message', 'body']:
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
continue
message = event_messages[message_type]
if message is None:
continue
if not isinstance(message, str):
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
continue
if message_type == 'message':
if '\n' in message:
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
if event == 'workflow_approval':
for subevent in event_messages:
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
error_list.append(_("Workflow Approval event '{}' invalid, must be one of "
"'running', 'approved', 'timed_out', or 'denied'").format(subevent))
continue
collected_messages.append(message)
subevent_messages = event_messages[subevent]
if subevent_messages is None:
continue
if not isinstance(subevent_messages, dict):
error_list.append(_("Expected dict for workflow approval event '{}', found {}").format(subevent, type(subevent_messages)))
continue
check_messages(subevent_messages)
else:
check_messages(event_messages)
# Subclass to return name of undefined field
class DescriptiveUndefined(StrictUndefined):
@@ -4497,8 +4514,18 @@ class NotificationSerializer(BaseSerializer):
'notification_type', 'recipients', 'subject', 'body')
def get_body(self, obj):
if obj.notification_type == 'webhook' and 'body' in obj.body:
return obj.body['body']
if obj.notification_type in ('webhook', 'pagerduty'):
if isinstance(obj.body, dict):
if 'body' in obj.body:
return obj.body['body']
elif isinstance(obj.body, str):
# attempt to load json string
try:
potential_body = json.loads(obj.body)
if isinstance(potential_body, dict):
return potential_body
except json.JSONDecodeError:
pass
return obj.body
def get_related(self, obj):
@@ -4774,6 +4801,18 @@ class InstanceGroupSerializer(BaseSerializer):
raise serializers.ValidationError(_('Isolated instances may not be added or removed from instances groups via the API.'))
if self.instance and self.instance.controller_id is not None:
raise serializers.ValidationError(_('Isolated instance group membership may not be managed via the API.'))
if value and self.instance and self.instance.is_containerized:
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
return value
def validate_policy_instance_percentage(self, value):
if value and self.instance and self.instance.is_containerized:
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
return value
def validate_policy_instance_minimum(self, value):
if value and self.instance and self.instance.is_containerized:
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
return value
def validate_name(self, value):

View File

@@ -102,7 +102,7 @@ from awx.main.scheduler.dag_workflow import WorkflowDAG
from awx.api.views.mixin import (
ControlledByScmMixin, InstanceGroupMembershipMixin,
OrganizationCountsMixin, RelatedJobsPreventDeleteMixin,
UnifiedJobDeletionMixin,
UnifiedJobDeletionMixin, NoTruncateMixin,
)
from awx.api.views.organization import ( # noqa
OrganizationList,
@@ -383,6 +383,13 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP
serializer_class = serializers.InstanceGroupSerializer
permission_classes = (InstanceGroupTowerPermission,)
def update_raw_data(self, data):
if self.get_object().is_containerized:
data.pop('policy_instance_percentage', None)
data.pop('policy_instance_minimum', None)
data.pop('policy_instance_list', None)
return super(InstanceGroupDetail, self).update_raw_data(data)
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
if instance.controller is not None:
@@ -2136,12 +2143,21 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
def perform_list_destroy(self, instance_list):
inv_source = self.get_parent_object()
with ignore_inventory_computed_fields():
# Activity stream doesn't record disassociation here anyway
# no signals-related reason to not bulk-delete
models.Host.groups.through.objects.filter(
host__inventory_sources=inv_source
).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
from awx.main.signals import disable_activity_stream
with disable_activity_stream():
# job host summary deletion necessary to avoid deadlock
models.JobHostSummary.objects.filter(host__inventory_sources=inv_source).update(host=None)
models.Host.objects.filter(inventory_sources=inv_source).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy([])
else:
# Advance delete of group-host memberships to prevent deadlock
# Activity stream doesn't record disassociation here anyway
# no signals-related reason to not bulk-delete
models.Host.groups.through.objects.filter(
host__inventory_sources=inv_source
).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
return r
@@ -2157,11 +2173,18 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
def perform_list_destroy(self, instance_list):
inv_source = self.get_parent_object()
with ignore_inventory_computed_fields():
# Same arguments for bulk delete as with host list
models.Group.hosts.through.objects.filter(
group__inventory_sources=inv_source
).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
from awx.main.signals import disable_activity_stream
with disable_activity_stream():
models.Group.objects.filter(inventory_sources=inv_source).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy([])
else:
# Advance delete of group-host memberships to prevent deadlock
# Same arguments for bulk delete as with host list
models.Group.hosts.through.objects.filter(
group__inventory_sources=inv_source
).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
return r
@@ -3762,18 +3785,12 @@ class JobHostSummaryDetail(RetrieveAPIView):
serializer_class = serializers.JobHostSummarySerializer
class JobEventList(ListAPIView):
class JobEventList(NoTruncateMixin, ListAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
search_fields = ('stdout',)
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context
class JobEventDetail(RetrieveAPIView):
@@ -3786,7 +3803,7 @@ class JobEventDetail(RetrieveAPIView):
return context
class JobEventChildrenList(SubListAPIView):
class JobEventChildrenList(NoTruncateMixin, SubListAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
@@ -3811,7 +3828,7 @@ class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
name = _('Job Event Hosts List')
class BaseJobEventsList(SubListAPIView):
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
@@ -4007,18 +4024,12 @@ class AdHocCommandRelaunch(GenericAPIView):
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
class AdHocCommandEventList(ListAPIView):
class AdHocCommandEventList(NoTruncateMixin, ListAPIView):
model = models.AdHocCommandEvent
serializer_class = serializers.AdHocCommandEventSerializer
search_fields = ('stdout',)
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context
class AdHocCommandEventDetail(RetrieveAPIView):
@@ -4031,7 +4042,7 @@ class AdHocCommandEventDetail(RetrieveAPIView):
return context
class BaseAdHocCommandEventsList(SubListAPIView):
class BaseAdHocCommandEventsList(NoTruncateMixin, SubListAPIView):
model = models.AdHocCommandEvent
serializer_class = serializers.AdHocCommandEventSerializer
@@ -4297,8 +4308,15 @@ class NotificationTemplateTest(GenericAPIView):
def post(self, request, *args, **kwargs):
obj = self.get_object()
notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE),
{"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)})
msg = "Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE)
if obj.notification_type in ('email', 'pagerduty'):
body = "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)
elif obj.notification_type == 'webhook':
body = '{{"body": "Ansible Tower Test Notification {} {}"}}'.format(obj.id, settings.TOWER_URL_BASE)
else:
body = {"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)}
notification = obj.generate_notification(msg, body)
if not notification:
return Response({}, status=status.HTTP_400_BAD_REQUEST)
else:

View File

@@ -270,3 +270,11 @@ class ControlledByScmMixin(object):
obj = super(ControlledByScmMixin, self).get_parent_object()
self._reset_inv_src_rev(obj)
return obj
class NoTruncateMixin(object):
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context

View File

@@ -1,6 +1,5 @@
from hashlib import sha1
import hmac
import json
import logging
import urllib.parse
@@ -151,13 +150,13 @@ class WebhookReceiverBase(APIView):
'webhook_credential': obj.webhook_credential,
'webhook_guid': event_guid,
},
'extra_vars': json.dumps({
'extra_vars': {
'tower_webhook_event_type': event_type,
'tower_webhook_event_guid': event_guid,
'tower_webhook_event_ref': event_ref,
'tower_webhook_status_api': status_api,
'tower_webhook_payload': request.data,
})
}
}
new_job = obj.create_unified_job(**kwargs)