mirror of
https://github.com/ansible/awx.git
synced 2026-02-01 01:28:09 -03:30
Merge branch 'downstream' into devel
This commit is contained in:
7
.gitignore
vendored
7
.gitignore
vendored
@@ -135,9 +135,10 @@ use_dev_supervisor.txt
|
|||||||
|
|
||||||
|
|
||||||
# Ansible module tests
|
# Ansible module tests
|
||||||
awx_collection_test_venv/
|
/awx_collection_test_venv/
|
||||||
awx_collection/*.tar.gz
|
/awx_collection/*.tar.gz
|
||||||
awx_collection/galaxy.yml
|
/awx_collection/galaxy.yml
|
||||||
|
/sanity/
|
||||||
|
|
||||||
.idea/*
|
.idea/*
|
||||||
*.unison.tmp
|
*.unison.tmp
|
||||||
|
|||||||
7
Makefile
7
Makefile
@@ -399,6 +399,13 @@ flake8_collection:
|
|||||||
|
|
||||||
test_collection_all: prepare_collection_venv test_collection flake8_collection
|
test_collection_all: prepare_collection_venv test_collection flake8_collection
|
||||||
|
|
||||||
|
test_collection_sanity:
|
||||||
|
rm -rf sanity
|
||||||
|
mkdir -p sanity/ansible_collections/awx
|
||||||
|
cp -Ra awx_collection sanity/ansible_collections/awx/awx # symlinks do not work
|
||||||
|
cd sanity/ansible_collections/awx/awx && git init && git add . # requires both this file structure and a git repo, so there you go
|
||||||
|
cd sanity/ansible_collections/awx/awx && ansible-test sanity --test validate-modules
|
||||||
|
|
||||||
build_collection:
|
build_collection:
|
||||||
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
|
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
|
||||||
ansible-galaxy collection build awx_collection --output-path=awx_collection
|
ansible-galaxy collection build awx_collection --output-path=awx_collection
|
||||||
|
|||||||
@@ -4338,13 +4338,30 @@ class NotificationTemplateSerializer(BaseSerializer):
|
|||||||
error_list = []
|
error_list = []
|
||||||
collected_messages = []
|
collected_messages = []
|
||||||
|
|
||||||
|
def check_messages(messages):
|
||||||
|
for message_type in messages:
|
||||||
|
if message_type not in ('message', 'body'):
|
||||||
|
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
|
||||||
|
continue
|
||||||
|
message = messages[message_type]
|
||||||
|
if message is None:
|
||||||
|
continue
|
||||||
|
if not isinstance(message, str):
|
||||||
|
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
|
||||||
|
continue
|
||||||
|
if message_type == 'message':
|
||||||
|
if '\n' in message:
|
||||||
|
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
|
||||||
|
continue
|
||||||
|
collected_messages.append(message)
|
||||||
|
|
||||||
# Validate structure / content types
|
# Validate structure / content types
|
||||||
if not isinstance(messages, dict):
|
if not isinstance(messages, dict):
|
||||||
error_list.append(_("Expected dict for 'messages' field, found {}".format(type(messages))))
|
error_list.append(_("Expected dict for 'messages' field, found {}".format(type(messages))))
|
||||||
else:
|
else:
|
||||||
for event in messages:
|
for event in messages:
|
||||||
if event not in ['started', 'success', 'error']:
|
if event not in ('started', 'success', 'error', 'workflow_approval'):
|
||||||
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', or 'error'").format(event))
|
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', 'error', or 'workflow_approval'").format(event))
|
||||||
continue
|
continue
|
||||||
event_messages = messages[event]
|
event_messages = messages[event]
|
||||||
if event_messages is None:
|
if event_messages is None:
|
||||||
@@ -4352,21 +4369,21 @@ class NotificationTemplateSerializer(BaseSerializer):
|
|||||||
if not isinstance(event_messages, dict):
|
if not isinstance(event_messages, dict):
|
||||||
error_list.append(_("Expected dict for event '{}', found {}").format(event, type(event_messages)))
|
error_list.append(_("Expected dict for event '{}', found {}").format(event, type(event_messages)))
|
||||||
continue
|
continue
|
||||||
for message_type in event_messages:
|
if event == 'workflow_approval':
|
||||||
if message_type not in ['message', 'body']:
|
for subevent in event_messages:
|
||||||
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
|
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
|
||||||
continue
|
error_list.append(_("Workflow Approval event '{}' invalid, must be one of "
|
||||||
message = event_messages[message_type]
|
"'running', 'approved', 'timed_out', or 'denied'").format(subevent))
|
||||||
if message is None:
|
|
||||||
continue
|
|
||||||
if not isinstance(message, str):
|
|
||||||
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
|
|
||||||
continue
|
|
||||||
if message_type == 'message':
|
|
||||||
if '\n' in message:
|
|
||||||
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
|
|
||||||
continue
|
continue
|
||||||
collected_messages.append(message)
|
subevent_messages = event_messages[subevent]
|
||||||
|
if subevent_messages is None:
|
||||||
|
continue
|
||||||
|
if not isinstance(subevent_messages, dict):
|
||||||
|
error_list.append(_("Expected dict for workflow approval event '{}', found {}").format(subevent, type(subevent_messages)))
|
||||||
|
continue
|
||||||
|
check_messages(subevent_messages)
|
||||||
|
else:
|
||||||
|
check_messages(event_messages)
|
||||||
|
|
||||||
# Subclass to return name of undefined field
|
# Subclass to return name of undefined field
|
||||||
class DescriptiveUndefined(StrictUndefined):
|
class DescriptiveUndefined(StrictUndefined):
|
||||||
@@ -4497,8 +4514,18 @@ class NotificationSerializer(BaseSerializer):
|
|||||||
'notification_type', 'recipients', 'subject', 'body')
|
'notification_type', 'recipients', 'subject', 'body')
|
||||||
|
|
||||||
def get_body(self, obj):
|
def get_body(self, obj):
|
||||||
if obj.notification_type == 'webhook' and 'body' in obj.body:
|
if obj.notification_type in ('webhook', 'pagerduty'):
|
||||||
return obj.body['body']
|
if isinstance(obj.body, dict):
|
||||||
|
if 'body' in obj.body:
|
||||||
|
return obj.body['body']
|
||||||
|
elif isinstance(obj.body, str):
|
||||||
|
# attempt to load json string
|
||||||
|
try:
|
||||||
|
potential_body = json.loads(obj.body)
|
||||||
|
if isinstance(potential_body, dict):
|
||||||
|
return potential_body
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
return obj.body
|
return obj.body
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
@@ -4774,6 +4801,18 @@ class InstanceGroupSerializer(BaseSerializer):
|
|||||||
raise serializers.ValidationError(_('Isolated instances may not be added or removed from instances groups via the API.'))
|
raise serializers.ValidationError(_('Isolated instances may not be added or removed from instances groups via the API.'))
|
||||||
if self.instance and self.instance.controller_id is not None:
|
if self.instance and self.instance.controller_id is not None:
|
||||||
raise serializers.ValidationError(_('Isolated instance group membership may not be managed via the API.'))
|
raise serializers.ValidationError(_('Isolated instance group membership may not be managed via the API.'))
|
||||||
|
if value and self.instance and self.instance.is_containerized:
|
||||||
|
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate_policy_instance_percentage(self, value):
|
||||||
|
if value and self.instance and self.instance.is_containerized:
|
||||||
|
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate_policy_instance_minimum(self, value):
|
||||||
|
if value and self.instance and self.instance.is_containerized:
|
||||||
|
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def validate_name(self, value):
|
def validate_name(self, value):
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ from awx.main.scheduler.dag_workflow import WorkflowDAG
|
|||||||
from awx.api.views.mixin import (
|
from awx.api.views.mixin import (
|
||||||
ControlledByScmMixin, InstanceGroupMembershipMixin,
|
ControlledByScmMixin, InstanceGroupMembershipMixin,
|
||||||
OrganizationCountsMixin, RelatedJobsPreventDeleteMixin,
|
OrganizationCountsMixin, RelatedJobsPreventDeleteMixin,
|
||||||
UnifiedJobDeletionMixin,
|
UnifiedJobDeletionMixin, NoTruncateMixin,
|
||||||
)
|
)
|
||||||
from awx.api.views.organization import ( # noqa
|
from awx.api.views.organization import ( # noqa
|
||||||
OrganizationList,
|
OrganizationList,
|
||||||
@@ -383,6 +383,13 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP
|
|||||||
serializer_class = serializers.InstanceGroupSerializer
|
serializer_class = serializers.InstanceGroupSerializer
|
||||||
permission_classes = (InstanceGroupTowerPermission,)
|
permission_classes = (InstanceGroupTowerPermission,)
|
||||||
|
|
||||||
|
def update_raw_data(self, data):
|
||||||
|
if self.get_object().is_containerized:
|
||||||
|
data.pop('policy_instance_percentage', None)
|
||||||
|
data.pop('policy_instance_minimum', None)
|
||||||
|
data.pop('policy_instance_list', None)
|
||||||
|
return super(InstanceGroupDetail, self).update_raw_data(data)
|
||||||
|
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
instance = self.get_object()
|
instance = self.get_object()
|
||||||
if instance.controller is not None:
|
if instance.controller is not None:
|
||||||
@@ -2136,12 +2143,21 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
|
|||||||
def perform_list_destroy(self, instance_list):
|
def perform_list_destroy(self, instance_list):
|
||||||
inv_source = self.get_parent_object()
|
inv_source = self.get_parent_object()
|
||||||
with ignore_inventory_computed_fields():
|
with ignore_inventory_computed_fields():
|
||||||
# Activity stream doesn't record disassociation here anyway
|
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
|
||||||
# no signals-related reason to not bulk-delete
|
from awx.main.signals import disable_activity_stream
|
||||||
models.Host.groups.through.objects.filter(
|
with disable_activity_stream():
|
||||||
host__inventory_sources=inv_source
|
# job host summary deletion necessary to avoid deadlock
|
||||||
).delete()
|
models.JobHostSummary.objects.filter(host__inventory_sources=inv_source).update(host=None)
|
||||||
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
|
models.Host.objects.filter(inventory_sources=inv_source).delete()
|
||||||
|
r = super(InventorySourceHostsList, self).perform_list_destroy([])
|
||||||
|
else:
|
||||||
|
# Advance delete of group-host memberships to prevent deadlock
|
||||||
|
# Activity stream doesn't record disassociation here anyway
|
||||||
|
# no signals-related reason to not bulk-delete
|
||||||
|
models.Host.groups.through.objects.filter(
|
||||||
|
host__inventory_sources=inv_source
|
||||||
|
).delete()
|
||||||
|
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
|
||||||
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@@ -2157,11 +2173,18 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
|
|||||||
def perform_list_destroy(self, instance_list):
|
def perform_list_destroy(self, instance_list):
|
||||||
inv_source = self.get_parent_object()
|
inv_source = self.get_parent_object()
|
||||||
with ignore_inventory_computed_fields():
|
with ignore_inventory_computed_fields():
|
||||||
# Same arguments for bulk delete as with host list
|
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
|
||||||
models.Group.hosts.through.objects.filter(
|
from awx.main.signals import disable_activity_stream
|
||||||
group__inventory_sources=inv_source
|
with disable_activity_stream():
|
||||||
).delete()
|
models.Group.objects.filter(inventory_sources=inv_source).delete()
|
||||||
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
|
r = super(InventorySourceGroupsList, self).perform_list_destroy([])
|
||||||
|
else:
|
||||||
|
# Advance delete of group-host memberships to prevent deadlock
|
||||||
|
# Same arguments for bulk delete as with host list
|
||||||
|
models.Group.hosts.through.objects.filter(
|
||||||
|
group__inventory_sources=inv_source
|
||||||
|
).delete()
|
||||||
|
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
|
||||||
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@@ -3762,18 +3785,12 @@ class JobHostSummaryDetail(RetrieveAPIView):
|
|||||||
serializer_class = serializers.JobHostSummarySerializer
|
serializer_class = serializers.JobHostSummarySerializer
|
||||||
|
|
||||||
|
|
||||||
class JobEventList(ListAPIView):
|
class JobEventList(NoTruncateMixin, ListAPIView):
|
||||||
|
|
||||||
model = models.JobEvent
|
model = models.JobEvent
|
||||||
serializer_class = serializers.JobEventSerializer
|
serializer_class = serializers.JobEventSerializer
|
||||||
search_fields = ('stdout',)
|
search_fields = ('stdout',)
|
||||||
|
|
||||||
def get_serializer_context(self):
|
|
||||||
context = super().get_serializer_context()
|
|
||||||
if self.request.query_params.get('no_truncate'):
|
|
||||||
context.update(no_truncate=True)
|
|
||||||
return context
|
|
||||||
|
|
||||||
|
|
||||||
class JobEventDetail(RetrieveAPIView):
|
class JobEventDetail(RetrieveAPIView):
|
||||||
|
|
||||||
@@ -3786,7 +3803,7 @@ class JobEventDetail(RetrieveAPIView):
|
|||||||
return context
|
return context
|
||||||
|
|
||||||
|
|
||||||
class JobEventChildrenList(SubListAPIView):
|
class JobEventChildrenList(NoTruncateMixin, SubListAPIView):
|
||||||
|
|
||||||
model = models.JobEvent
|
model = models.JobEvent
|
||||||
serializer_class = serializers.JobEventSerializer
|
serializer_class = serializers.JobEventSerializer
|
||||||
@@ -3811,7 +3828,7 @@ class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
|
|||||||
name = _('Job Event Hosts List')
|
name = _('Job Event Hosts List')
|
||||||
|
|
||||||
|
|
||||||
class BaseJobEventsList(SubListAPIView):
|
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
|
||||||
|
|
||||||
model = models.JobEvent
|
model = models.JobEvent
|
||||||
serializer_class = serializers.JobEventSerializer
|
serializer_class = serializers.JobEventSerializer
|
||||||
@@ -4007,18 +4024,12 @@ class AdHocCommandRelaunch(GenericAPIView):
|
|||||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandEventList(ListAPIView):
|
class AdHocCommandEventList(NoTruncateMixin, ListAPIView):
|
||||||
|
|
||||||
model = models.AdHocCommandEvent
|
model = models.AdHocCommandEvent
|
||||||
serializer_class = serializers.AdHocCommandEventSerializer
|
serializer_class = serializers.AdHocCommandEventSerializer
|
||||||
search_fields = ('stdout',)
|
search_fields = ('stdout',)
|
||||||
|
|
||||||
def get_serializer_context(self):
|
|
||||||
context = super().get_serializer_context()
|
|
||||||
if self.request.query_params.get('no_truncate'):
|
|
||||||
context.update(no_truncate=True)
|
|
||||||
return context
|
|
||||||
|
|
||||||
|
|
||||||
class AdHocCommandEventDetail(RetrieveAPIView):
|
class AdHocCommandEventDetail(RetrieveAPIView):
|
||||||
|
|
||||||
@@ -4031,7 +4042,7 @@ class AdHocCommandEventDetail(RetrieveAPIView):
|
|||||||
return context
|
return context
|
||||||
|
|
||||||
|
|
||||||
class BaseAdHocCommandEventsList(SubListAPIView):
|
class BaseAdHocCommandEventsList(NoTruncateMixin, SubListAPIView):
|
||||||
|
|
||||||
model = models.AdHocCommandEvent
|
model = models.AdHocCommandEvent
|
||||||
serializer_class = serializers.AdHocCommandEventSerializer
|
serializer_class = serializers.AdHocCommandEventSerializer
|
||||||
@@ -4297,8 +4308,15 @@ class NotificationTemplateTest(GenericAPIView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE),
|
msg = "Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE)
|
||||||
{"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)})
|
if obj.notification_type in ('email', 'pagerduty'):
|
||||||
|
body = "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)
|
||||||
|
elif obj.notification_type == 'webhook':
|
||||||
|
body = '{{"body": "Ansible Tower Test Notification {} {}"}}'.format(obj.id, settings.TOWER_URL_BASE)
|
||||||
|
else:
|
||||||
|
body = {"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)}
|
||||||
|
notification = obj.generate_notification(msg, body)
|
||||||
|
|
||||||
if not notification:
|
if not notification:
|
||||||
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -270,3 +270,11 @@ class ControlledByScmMixin(object):
|
|||||||
obj = super(ControlledByScmMixin, self).get_parent_object()
|
obj = super(ControlledByScmMixin, self).get_parent_object()
|
||||||
self._reset_inv_src_rev(obj)
|
self._reset_inv_src_rev(obj)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
class NoTruncateMixin(object):
|
||||||
|
def get_serializer_context(self):
|
||||||
|
context = super().get_serializer_context()
|
||||||
|
if self.request.query_params.get('no_truncate'):
|
||||||
|
context.update(no_truncate=True)
|
||||||
|
return context
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
import hmac
|
import hmac
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
@@ -151,13 +150,13 @@ class WebhookReceiverBase(APIView):
|
|||||||
'webhook_credential': obj.webhook_credential,
|
'webhook_credential': obj.webhook_credential,
|
||||||
'webhook_guid': event_guid,
|
'webhook_guid': event_guid,
|
||||||
},
|
},
|
||||||
'extra_vars': json.dumps({
|
'extra_vars': {
|
||||||
'tower_webhook_event_type': event_type,
|
'tower_webhook_event_type': event_type,
|
||||||
'tower_webhook_event_guid': event_guid,
|
'tower_webhook_event_guid': event_guid,
|
||||||
'tower_webhook_event_ref': event_ref,
|
'tower_webhook_event_ref': event_ref,
|
||||||
'tower_webhook_status_api': status_api,
|
'tower_webhook_status_api': status_api,
|
||||||
'tower_webhook_payload': request.data,
|
'tower_webhook_payload': request.data,
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
new_job = obj.create_unified_job(**kwargs)
|
new_job = obj.create_unified_job(**kwargs)
|
||||||
|
|||||||
@@ -465,7 +465,7 @@ class BaseAccess(object):
|
|||||||
else:
|
else:
|
||||||
relationship = 'members'
|
relationship = 'members'
|
||||||
return access_method(obj, parent_obj, relationship, skip_sub_obj_read_check=True, data={})
|
return access_method(obj, parent_obj, relationship, skip_sub_obj_read_check=True, data={})
|
||||||
except (ParseError, ObjectDoesNotExist):
|
except (ParseError, ObjectDoesNotExist, PermissionDenied):
|
||||||
return False
|
return False
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -1660,26 +1660,19 @@ class JobAccess(BaseAccess):
|
|||||||
except JobLaunchConfig.DoesNotExist:
|
except JobLaunchConfig.DoesNotExist:
|
||||||
config = None
|
config = None
|
||||||
|
|
||||||
|
if obj.job_template and (self.user not in obj.job_template.execute_role):
|
||||||
|
return False
|
||||||
|
|
||||||
# Check if JT execute access (and related prompts) is sufficient
|
# Check if JT execute access (and related prompts) is sufficient
|
||||||
if obj.job_template is not None:
|
if config and obj.job_template:
|
||||||
if config is None:
|
if not config.has_user_prompts(obj.job_template):
|
||||||
prompts_access = False
|
|
||||||
elif not config.has_user_prompts(obj.job_template):
|
|
||||||
prompts_access = True
|
|
||||||
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
|
||||||
prompts_access = False
|
|
||||||
if self.save_messages:
|
|
||||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
|
||||||
else:
|
|
||||||
prompts_access = (
|
|
||||||
JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}) and
|
|
||||||
not config.has_unprompted(obj.job_template)
|
|
||||||
)
|
|
||||||
jt_access = self.user in obj.job_template.execute_role
|
|
||||||
if prompts_access and jt_access:
|
|
||||||
return True
|
return True
|
||||||
elif not jt_access:
|
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||||
return False
|
# never allowed, not even for org admins
|
||||||
|
raise PermissionDenied(_('Job was launched with secret prompts provided by another user.'))
|
||||||
|
elif not config.has_unprompted(obj.job_template):
|
||||||
|
if JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||||
|
return True
|
||||||
|
|
||||||
org_access = bool(obj.inventory) and self.user in obj.inventory.organization.inventory_admin_role
|
org_access = bool(obj.inventory) and self.user in obj.inventory.organization.inventory_admin_role
|
||||||
project_access = obj.project is None or self.user in obj.project.admin_role
|
project_access = obj.project is None or self.user in obj.project.admin_role
|
||||||
@@ -2098,23 +2091,20 @@ class WorkflowJobAccess(BaseAccess):
|
|||||||
self.messages['detail'] = _('Workflow Job was launched with unknown prompts.')
|
self.messages['detail'] = _('Workflow Job was launched with unknown prompts.')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# execute permission to WFJT is mandatory for any relaunch
|
||||||
|
if self.user not in template.execute_role:
|
||||||
|
return False
|
||||||
|
|
||||||
# Check if access to prompts to prevent relaunch
|
# Check if access to prompts to prevent relaunch
|
||||||
if config.prompts_dict():
|
if config.prompts_dict():
|
||||||
if obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
if obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||||
if self.save_messages:
|
raise PermissionDenied(_("Job was launched with secret prompts provided by another user."))
|
||||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
|
||||||
return False
|
|
||||||
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||||
if self.save_messages:
|
raise PermissionDenied(_('Job was launched with prompts you lack access to.'))
|
||||||
self.messages['detail'] = _('Job was launched with prompts you lack access to.')
|
|
||||||
return False
|
|
||||||
if config.has_unprompted(template):
|
if config.has_unprompted(template):
|
||||||
if self.save_messages:
|
raise PermissionDenied(_('Job was launched with prompts no longer accepted.'))
|
||||||
self.messages['detail'] = _('Job was launched with prompts no longer accepted.')
|
|
||||||
return False
|
|
||||||
|
|
||||||
# execute permission to WFJT is mandatory for any relaunch
|
return True # passed config checks
|
||||||
return (self.user in template.execute_role)
|
|
||||||
|
|
||||||
def can_recreate(self, obj):
|
def can_recreate(self, obj):
|
||||||
node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credentials', 'unified_job_template')
|
node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credentials', 'unified_job_template')
|
||||||
|
|||||||
@@ -513,6 +513,16 @@ register(
|
|||||||
category_slug='jobs'
|
category_slug='jobs'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'PUBLIC_GALAXY_ENABLED',
|
||||||
|
field_class=fields.BooleanField,
|
||||||
|
default=True,
|
||||||
|
label=_('Allow Access to Public Galaxy'),
|
||||||
|
help_text=_('Allow or deny access to the public Ansible Galaxy during project updates.'),
|
||||||
|
category=_('Jobs'),
|
||||||
|
category_slug='jobs'
|
||||||
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
'STDOUT_MAX_BYTES_DISPLAY',
|
'STDOUT_MAX_BYTES_DISPLAY',
|
||||||
field_class=fields.IntegerField,
|
field_class=fields.IntegerField,
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import importlib
|
|||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from kubernetes.config import kube_config
|
||||||
|
|
||||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
||||||
|
|
||||||
@@ -107,6 +108,14 @@ class TaskWorker(BaseWorker):
|
|||||||
for callback in body.get('errbacks', []) or []:
|
for callback in body.get('errbacks', []) or []:
|
||||||
callback['uuid'] = body['uuid']
|
callback['uuid'] = body['uuid']
|
||||||
self.perform_work(callback)
|
self.perform_work(callback)
|
||||||
|
finally:
|
||||||
|
# It's frustrating that we have to do this, but the python k8s
|
||||||
|
# client leaves behind cacert files in /tmp, so we must clean up
|
||||||
|
# the tmpdir per-dispatcher process every time a new task comes in
|
||||||
|
try:
|
||||||
|
kube_config._cleanup_temp_files()
|
||||||
|
except Exception:
|
||||||
|
logger.exception('failed to cleanup k8s client tmp files')
|
||||||
|
|
||||||
for callback in body.get('callbacks', []) or []:
|
for callback in body.get('callbacks', []) or []:
|
||||||
callback['uuid'] = body['uuid']
|
callback['uuid'] = body['uuid']
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import stat
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
import yaml
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
import ansible_runner
|
import ansible_runner
|
||||||
@@ -48,10 +49,17 @@ class IsolatedManager(object):
|
|||||||
def build_inventory(self, hosts):
|
def build_inventory(self, hosts):
|
||||||
if self.instance and self.instance.is_containerized:
|
if self.instance and self.instance.is_containerized:
|
||||||
inventory = {'all': {'hosts': {}}}
|
inventory = {'all': {'hosts': {}}}
|
||||||
|
fd, path = tempfile.mkstemp(
|
||||||
|
prefix='.kubeconfig', dir=self.private_data_dir
|
||||||
|
)
|
||||||
|
with open(path, 'wb') as temp:
|
||||||
|
temp.write(yaml.dump(self.pod_manager.kube_config).encode())
|
||||||
|
temp.flush()
|
||||||
|
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
inventory['all']['hosts'][host] = {
|
inventory['all']['hosts'][host] = {
|
||||||
"ansible_connection": "kubectl",
|
"ansible_connection": "kubectl",
|
||||||
"ansible_kubectl_config": self.pod_manager.kube_config
|
"ansible_kubectl_config": path,
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
inventory = '\n'.join([
|
inventory = '\n'.join([
|
||||||
@@ -143,6 +151,8 @@ class IsolatedManager(object):
|
|||||||
'- /artifacts/job_events/*-partial.json.tmp',
|
'- /artifacts/job_events/*-partial.json.tmp',
|
||||||
# don't rsync the ssh_key FIFO
|
# don't rsync the ssh_key FIFO
|
||||||
'- /env/ssh_key',
|
'- /env/ssh_key',
|
||||||
|
# don't rsync kube config files
|
||||||
|
'- .kubeconfig*'
|
||||||
]
|
]
|
||||||
|
|
||||||
for filename, data in (
|
for filename, data in (
|
||||||
|
|||||||
@@ -295,7 +295,10 @@ class PrimordialModel(HasEditsMixin, CreatedModifiedModel):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
r = super(PrimordialModel, self).__init__(*args, **kwargs)
|
r = super(PrimordialModel, self).__init__(*args, **kwargs)
|
||||||
self._prior_values_store = self._get_fields_snapshot()
|
if self.pk:
|
||||||
|
self._prior_values_store = self._get_fields_snapshot()
|
||||||
|
else:
|
||||||
|
self._prior_values_store = {}
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
|||||||
notification_configuration = prevent_search(JSONField(blank=False))
|
notification_configuration = prevent_search(JSONField(blank=False))
|
||||||
|
|
||||||
def default_messages():
|
def default_messages():
|
||||||
return {'started': None, 'success': None, 'error': None}
|
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
|
||||||
|
|
||||||
messages = JSONField(
|
messages = JSONField(
|
||||||
null=True,
|
null=True,
|
||||||
@@ -92,25 +92,6 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
|||||||
def get_message(self, condition):
|
def get_message(self, condition):
|
||||||
return self.messages.get(condition, {})
|
return self.messages.get(condition, {})
|
||||||
|
|
||||||
def build_notification_message(self, event_type, context):
|
|
||||||
env = sandbox.ImmutableSandboxedEnvironment()
|
|
||||||
templates = self.get_message(event_type)
|
|
||||||
msg_template = templates.get('message', {})
|
|
||||||
|
|
||||||
try:
|
|
||||||
notification_subject = env.from_string(msg_template).render(**context)
|
|
||||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
|
||||||
notification_subject = ''
|
|
||||||
|
|
||||||
|
|
||||||
msg_body = templates.get('body', {})
|
|
||||||
try:
|
|
||||||
notification_body = env.from_string(msg_body).render(**context)
|
|
||||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
|
||||||
notification_body = ''
|
|
||||||
|
|
||||||
return (notification_subject, notification_body)
|
|
||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
@@ -128,19 +109,34 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
|||||||
old_messages = old_nt.messages
|
old_messages = old_nt.messages
|
||||||
new_messages = self.messages
|
new_messages = self.messages
|
||||||
|
|
||||||
|
def merge_messages(local_old_messages, local_new_messages, local_event):
|
||||||
|
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
|
||||||
|
local_old_event_msgs = local_old_messages[local_event]
|
||||||
|
local_new_event_msgs = local_new_messages[local_event]
|
||||||
|
for msg_type in ['message', 'body']:
|
||||||
|
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
|
||||||
|
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
|
||||||
if old_messages is not None and new_messages is not None:
|
if old_messages is not None and new_messages is not None:
|
||||||
for event in ['started', 'success', 'error']:
|
for event in ('started', 'success', 'error', 'workflow_approval'):
|
||||||
if not new_messages.get(event, {}) and old_messages.get(event, {}):
|
if not new_messages.get(event, {}) and old_messages.get(event, {}):
|
||||||
new_messages[event] = old_messages[event]
|
new_messages[event] = old_messages[event]
|
||||||
continue
|
continue
|
||||||
if new_messages.get(event, {}) and old_messages.get(event, {}):
|
|
||||||
old_event_msgs = old_messages[event]
|
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
|
||||||
new_event_msgs = new_messages[event]
|
new_messages.setdefault('workflow_approval', {})
|
||||||
for msg_type in ['message', 'body']:
|
for subevent in ('running', 'approved', 'timed_out', 'denied'):
|
||||||
if msg_type not in new_event_msgs and old_event_msgs.get(msg_type, None):
|
old_wfa_messages = old_messages['workflow_approval']
|
||||||
new_event_msgs[msg_type] = old_event_msgs[msg_type]
|
new_wfa_messages = new_messages['workflow_approval']
|
||||||
|
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
|
||||||
|
new_wfa_messages[subevent] = old_wfa_messages[subevent]
|
||||||
|
continue
|
||||||
|
if old_wfa_messages:
|
||||||
|
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
|
||||||
|
else:
|
||||||
|
merge_messages(old_messages, new_messages, event)
|
||||||
new_messages.setdefault(event, None)
|
new_messages.setdefault(event, None)
|
||||||
|
|
||||||
|
|
||||||
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
|
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
|
||||||
self.notification_class.init_parameters):
|
self.notification_class.init_parameters):
|
||||||
if self.notification_configuration[field].startswith("$encrypted$"):
|
if self.notification_configuration[field].startswith("$encrypted$"):
|
||||||
@@ -169,12 +165,12 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
|||||||
def recipients(self):
|
def recipients(self):
|
||||||
return self.notification_configuration[self.notification_class.recipient_parameter]
|
return self.notification_configuration[self.notification_class.recipient_parameter]
|
||||||
|
|
||||||
def generate_notification(self, subject, message):
|
def generate_notification(self, msg, body):
|
||||||
notification = Notification(notification_template=self,
|
notification = Notification(notification_template=self,
|
||||||
notification_type=self.notification_type,
|
notification_type=self.notification_type,
|
||||||
recipients=smart_str(self.recipients),
|
recipients=smart_str(self.recipients),
|
||||||
subject=subject,
|
subject=msg,
|
||||||
body=message)
|
body=body)
|
||||||
notification.save()
|
notification.save()
|
||||||
return notification
|
return notification
|
||||||
|
|
||||||
@@ -370,7 +366,7 @@ class JobNotificationMixin(object):
|
|||||||
'verbosity': 0},
|
'verbosity': 0},
|
||||||
'job_friendly_name': 'Job',
|
'job_friendly_name': 'Job',
|
||||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||||
'job_summary_dict': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||||
'traceback': '',
|
'traceback': '',
|
||||||
'status': 'running',
|
'status': 'running',
|
||||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||||
@@ -389,14 +385,14 @@ class JobNotificationMixin(object):
|
|||||||
return context
|
return context
|
||||||
|
|
||||||
def context(self, serialized_job):
|
def context(self, serialized_job):
|
||||||
"""Returns a context that can be used for rendering notification messages.
|
"""Returns a dictionary that can be used for rendering notification messages.
|
||||||
Context contains whitelisted content retrieved from a serialized job object
|
The context will contain whitelisted content retrieved from a serialized job object
|
||||||
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
|
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
|
||||||
and a url to the job run."""
|
and a url to the job run."""
|
||||||
context = {'job': {},
|
context = {'job': {},
|
||||||
'job_friendly_name': self.get_notification_friendly_name(),
|
'job_friendly_name': self.get_notification_friendly_name(),
|
||||||
'url': self.get_ui_url(),
|
'url': self.get_ui_url(),
|
||||||
'job_summary_dict': json.dumps(self.notification_data(), indent=4)}
|
'job_metadata': json.dumps(self.notification_data(), indent=4)}
|
||||||
|
|
||||||
def build_context(node, fields, whitelisted_fields):
|
def build_context(node, fields, whitelisted_fields):
|
||||||
for safe_field in whitelisted_fields:
|
for safe_field in whitelisted_fields:
|
||||||
@@ -434,32 +430,33 @@ class JobNotificationMixin(object):
|
|||||||
context = self.context(job_serialization)
|
context = self.context(job_serialization)
|
||||||
|
|
||||||
msg_template = body_template = None
|
msg_template = body_template = None
|
||||||
|
msg = body = ''
|
||||||
|
|
||||||
|
# Use custom template if available
|
||||||
if nt.messages:
|
if nt.messages:
|
||||||
templates = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
|
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
|
||||||
msg_template = templates.get('message', {})
|
msg_template = template.get('message', None)
|
||||||
body_template = templates.get('body', {})
|
body_template = template.get('body', None)
|
||||||
|
# If custom template not provided, look up default template
|
||||||
|
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
|
||||||
|
if not msg_template:
|
||||||
|
msg_template = default_template.get('message', None)
|
||||||
|
if not body_template:
|
||||||
|
body_template = default_template.get('body', None)
|
||||||
|
|
||||||
if msg_template:
|
if msg_template:
|
||||||
try:
|
try:
|
||||||
notification_subject = env.from_string(msg_template).render(**context)
|
msg = env.from_string(msg_template).render(**context)
|
||||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||||
notification_subject = ''
|
msg = ''
|
||||||
else:
|
|
||||||
notification_subject = u"{} #{} '{}' {}: {}".format(self.get_notification_friendly_name(),
|
|
||||||
self.id,
|
|
||||||
self.name,
|
|
||||||
status,
|
|
||||||
self.get_ui_url())
|
|
||||||
notification_body = self.notification_data()
|
|
||||||
notification_body['friendly_name'] = self.get_notification_friendly_name()
|
|
||||||
if body_template:
|
if body_template:
|
||||||
try:
|
try:
|
||||||
notification_body['body'] = env.from_string(body_template).render(**context)
|
body = env.from_string(body_template).render(**context)
|
||||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||||
notification_body['body'] = ''
|
body = ''
|
||||||
|
|
||||||
return (notification_subject, notification_body)
|
return (msg, body)
|
||||||
|
|
||||||
def send_notification_templates(self, status):
|
def send_notification_templates(self, status):
|
||||||
from awx.main.tasks import send_notifications # avoid circular import
|
from awx.main.tasks import send_notifications # avoid circular import
|
||||||
@@ -475,16 +472,13 @@ class JobNotificationMixin(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
|
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
|
||||||
try:
|
(msg, body) = self.build_notification_message(nt, status)
|
||||||
(notification_subject, notification_body) = self.build_notification_message(nt, status)
|
|
||||||
except AttributeError:
|
|
||||||
raise NotImplementedError("build_notification_message() does not exist" % status)
|
|
||||||
|
|
||||||
# Use kwargs to force late-binding
|
# Use kwargs to force late-binding
|
||||||
# https://stackoverflow.com/a/3431699/10669572
|
# https://stackoverflow.com/a/3431699/10669572
|
||||||
def send_it(local_nt=nt, local_subject=notification_subject, local_body=notification_body):
|
def send_it(local_nt=nt, local_msg=msg, local_body=body):
|
||||||
def _func():
|
def _func():
|
||||||
send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id],
|
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
|
||||||
job_id=self.id)
|
job_id=self.id)
|
||||||
return _func
|
return _func
|
||||||
connection.on_commit(send_it())
|
connection.on_commit(send_it())
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
# Python
|
# Python
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
@@ -16,6 +17,9 @@ from django.core.exceptions import ObjectDoesNotExist
|
|||||||
# Django-CRUM
|
# Django-CRUM
|
||||||
from crum import get_current_user
|
from crum import get_current_user
|
||||||
|
|
||||||
|
from jinja2 import sandbox
|
||||||
|
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
|
from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
|
||||||
@@ -763,22 +767,45 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
|||||||
connection.on_commit(send_it())
|
connection.on_commit(send_it())
|
||||||
|
|
||||||
def build_approval_notification_message(self, nt, approval_status):
|
def build_approval_notification_message(self, nt, approval_status):
|
||||||
subject = []
|
env = sandbox.ImmutableSandboxedEnvironment()
|
||||||
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
|
||||||
subject.append(('The approval node "{}"').format(self.workflow_approval_template.name))
|
|
||||||
if approval_status == 'running':
|
|
||||||
subject.append(('needs review. This node can be viewed at: {}').format(workflow_url))
|
|
||||||
if approval_status == 'approved':
|
|
||||||
subject.append(('was approved. {}').format(workflow_url))
|
|
||||||
if approval_status == 'timed_out':
|
|
||||||
subject.append(('has timed out. {}').format(workflow_url))
|
|
||||||
elif approval_status == 'denied':
|
|
||||||
subject.append(('was denied. {}').format(workflow_url))
|
|
||||||
subject = " ".join(subject)
|
|
||||||
body = self.notification_data()
|
|
||||||
body['body'] = subject
|
|
||||||
|
|
||||||
return subject, body
|
context = self.context(approval_status)
|
||||||
|
|
||||||
|
msg_template = body_template = None
|
||||||
|
msg = body = ''
|
||||||
|
|
||||||
|
# Use custom template if available
|
||||||
|
if nt.messages and nt.messages.get('workflow_approval', None):
|
||||||
|
template = nt.messages['workflow_approval'].get(approval_status, {})
|
||||||
|
msg_template = template.get('message', None)
|
||||||
|
body_template = template.get('body', None)
|
||||||
|
# If custom template not provided, look up default template
|
||||||
|
default_template = nt.notification_class.default_messages['workflow_approval'][approval_status]
|
||||||
|
if not msg_template:
|
||||||
|
msg_template = default_template.get('message', None)
|
||||||
|
if not body_template:
|
||||||
|
body_template = default_template.get('body', None)
|
||||||
|
|
||||||
|
if msg_template:
|
||||||
|
try:
|
||||||
|
msg = env.from_string(msg_template).render(**context)
|
||||||
|
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||||
|
msg = ''
|
||||||
|
|
||||||
|
if body_template:
|
||||||
|
try:
|
||||||
|
body = env.from_string(body_template).render(**context)
|
||||||
|
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||||
|
body = ''
|
||||||
|
|
||||||
|
return (msg, body)
|
||||||
|
|
||||||
|
def context(self, approval_status):
|
||||||
|
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
|
||||||
|
return {'approval_status': approval_status,
|
||||||
|
'approval_node_name': self.workflow_approval_template.name,
|
||||||
|
'workflow_url': workflow_url,
|
||||||
|
'job_metadata': json.dumps(self.notification_data(), indent=4)}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workflow_job_template(self):
|
def workflow_job_template(self):
|
||||||
|
|||||||
@@ -1,21 +1,10 @@
|
|||||||
# Copyright (c) 2016 Ansible, Inc.
|
# Copyright (c) 2016 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from django.utils.encoding import smart_text
|
|
||||||
from django.core.mail.backends.base import BaseEmailBackend
|
from django.core.mail.backends.base import BaseEmailBackend
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
|
|
||||||
|
|
||||||
class AWXBaseEmailBackend(BaseEmailBackend):
|
class AWXBaseEmailBackend(BaseEmailBackend):
|
||||||
|
|
||||||
def format_body(self, body):
|
def format_body(self, body):
|
||||||
if "body" in body:
|
return body
|
||||||
body_actual = body['body']
|
|
||||||
else:
|
|
||||||
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
|
|
||||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
|
||||||
)
|
|
||||||
body_actual += json.dumps(body, indent=4)
|
|
||||||
return body_actual
|
|
||||||
|
|||||||
20
awx/main/notifications/custom_notification_base.py
Normal file
20
awx/main/notifications/custom_notification_base.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Copyright (c) 2019 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
|
||||||
|
class CustomNotificationBase(object):
|
||||||
|
DEFAULT_MSG = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
||||||
|
DEFAULT_BODY = "{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_metadata }}"
|
||||||
|
|
||||||
|
default_messages = {"started": {"message": DEFAULT_MSG, "body": None},
|
||||||
|
"success": {"message": DEFAULT_MSG, "body": None},
|
||||||
|
"error": {"message": DEFAULT_MSG, "body": None},
|
||||||
|
"workflow_approval": {"running": {"message": 'The approval node "{{ approval_node_name }}" needs review. '
|
||||||
|
'This node can be viewed at: {{ workflow_url }}',
|
||||||
|
"body": None},
|
||||||
|
"approved": {"message": 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}',
|
||||||
|
"body": None},
|
||||||
|
"timed_out": {"message": 'The approval node "{{ approval_node_name }}" has timed out. {{ workflow_url }}',
|
||||||
|
"body": None},
|
||||||
|
"denied": {"message": 'The approval node "{{ approval_node_name }}" was denied. {{ workflow_url }}',
|
||||||
|
"body": None}}}
|
||||||
@@ -1,14 +1,15 @@
|
|||||||
# Copyright (c) 2016 Ansible, Inc.
|
# Copyright (c) 2016 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from django.utils.encoding import smart_text
|
|
||||||
from django.core.mail.backends.smtp import EmailBackend
|
from django.core.mail.backends.smtp import EmailBackend
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
|
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
|
||||||
|
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
|
||||||
|
|
||||||
|
|
||||||
class CustomEmailBackend(EmailBackend):
|
class CustomEmailBackend(EmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"host": {"label": "Host", "type": "string"},
|
init_parameters = {"host": {"label": "Host", "type": "string"},
|
||||||
"port": {"label": "Port", "type": "int"},
|
"port": {"label": "Port", "type": "int"},
|
||||||
@@ -19,22 +20,17 @@ class CustomEmailBackend(EmailBackend):
|
|||||||
"sender": {"label": "Sender Email", "type": "string"},
|
"sender": {"label": "Sender Email", "type": "string"},
|
||||||
"recipients": {"label": "Recipient List", "type": "list"},
|
"recipients": {"label": "Recipient List", "type": "list"},
|
||||||
"timeout": {"label": "Timeout", "type": "int", "default": 30}}
|
"timeout": {"label": "Timeout", "type": "int", "default": 30}}
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
DEFAULT_BODY = smart_text(_("{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_summary_dict }}"))
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
|
|
||||||
recipient_parameter = "recipients"
|
recipient_parameter = "recipients"
|
||||||
sender_parameter = "sender"
|
sender_parameter = "sender"
|
||||||
|
|
||||||
|
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"workflow_approval": {"running": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"approved": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"timed_out": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"denied": {"message": DEFAULT_MSG, "body": DEFAULT_BODY}}}
|
||||||
|
|
||||||
def format_body(self, body):
|
def format_body(self, body):
|
||||||
if "body" in body:
|
# leave body unchanged (expect a string)
|
||||||
body_actual = body['body']
|
return body
|
||||||
else:
|
|
||||||
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
|
|
||||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
|
||||||
)
|
|
||||||
body_actual += json.dumps(body, indent=4)
|
|
||||||
return body_actual
|
|
||||||
|
|||||||
@@ -8,24 +8,21 @@ import dateutil.parser as dp
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.grafana_backend')
|
logger = logging.getLogger('awx.main.notifications.grafana_backend')
|
||||||
|
|
||||||
|
|
||||||
class GrafanaBackend(AWXBaseEmailBackend):
|
class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"},
|
init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"},
|
||||||
"grafana_key": {"label": "Grafana API Key", "type": "password"}}
|
"grafana_key": {"label": "Grafana API Key", "type": "password"}}
|
||||||
recipient_parameter = "grafana_url"
|
recipient_parameter = "grafana_url"
|
||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT}}
|
|
||||||
|
|
||||||
def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
|
def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
|
||||||
fail_silently=False, **kwargs):
|
fail_silently=False, **kwargs):
|
||||||
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
|
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
|
||||||
|
|||||||
@@ -7,12 +7,14 @@ import requests
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
|
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
|
||||||
|
|
||||||
|
|
||||||
class HipChatBackend(AWXBaseEmailBackend):
|
class HipChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||||
"rooms": {"label": "Destination Rooms", "type": "list"},
|
"rooms": {"label": "Destination Rooms", "type": "list"},
|
||||||
@@ -23,11 +25,6 @@ class HipChatBackend(AWXBaseEmailBackend):
|
|||||||
recipient_parameter = "rooms"
|
recipient_parameter = "rooms"
|
||||||
sender_parameter = "message_from"
|
sender_parameter = "message_from"
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT}}
|
|
||||||
|
|
||||||
def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs):
|
def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs):
|
||||||
super(HipChatBackend, self).__init__(fail_silently=fail_silently)
|
super(HipChatBackend, self).__init__(fail_silently=fail_silently)
|
||||||
self.token = token
|
self.token = token
|
||||||
|
|||||||
@@ -9,12 +9,14 @@ import irc.client
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
||||||
|
|
||||||
|
|
||||||
class IrcBackend(AWXBaseEmailBackend):
|
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
|
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
|
||||||
"port": {"label": "IRC Server Port", "type": "int"},
|
"port": {"label": "IRC Server Port", "type": "int"},
|
||||||
@@ -25,11 +27,6 @@ class IrcBackend(AWXBaseEmailBackend):
|
|||||||
recipient_parameter = "targets"
|
recipient_parameter = "targets"
|
||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT}}
|
|
||||||
|
|
||||||
def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs):
|
def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs):
|
||||||
super(IrcBackend, self).__init__(fail_silently=fail_silently)
|
super(IrcBackend, self).__init__(fail_silently=fail_silently)
|
||||||
self.server = server
|
self.server = server
|
||||||
|
|||||||
@@ -7,23 +7,20 @@ import json
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.mattermost_backend')
|
logger = logging.getLogger('awx.main.notifications.mattermost_backend')
|
||||||
|
|
||||||
|
|
||||||
class MattermostBackend(AWXBaseEmailBackend):
|
class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"mattermost_url": {"label": "Target URL", "type": "string"},
|
init_parameters = {"mattermost_url": {"label": "Target URL", "type": "string"},
|
||||||
"mattermost_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
"mattermost_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
||||||
recipient_parameter = "mattermost_url"
|
recipient_parameter = "mattermost_url"
|
||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT}}
|
|
||||||
|
|
||||||
def __init__(self, mattermost_no_verify_ssl=False, mattermost_channel=None, mattermost_username=None,
|
def __init__(self, mattermost_no_verify_ssl=False, mattermost_channel=None, mattermost_username=None,
|
||||||
mattermost_icon_url=None, fail_silently=False, **kwargs):
|
mattermost_icon_url=None, fail_silently=False, **kwargs):
|
||||||
super(MattermostBackend, self).__init__(fail_silently=fail_silently)
|
super(MattermostBackend, self).__init__(fail_silently=fail_silently)
|
||||||
|
|||||||
@@ -1,17 +1,23 @@
|
|||||||
# Copyright (c) 2016 Ansible, Inc.
|
# Copyright (c) 2016 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import pygerduty
|
import pygerduty
|
||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
|
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
|
||||||
|
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
|
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
|
||||||
|
|
||||||
|
|
||||||
class PagerDutyBackend(AWXBaseEmailBackend):
|
class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
|
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
|
||||||
"token": {"label": "API Token", "type": "password"},
|
"token": {"label": "API Token", "type": "password"},
|
||||||
@@ -20,11 +26,14 @@ class PagerDutyBackend(AWXBaseEmailBackend):
|
|||||||
recipient_parameter = "service_key"
|
recipient_parameter = "service_key"
|
||||||
sender_parameter = "client_name"
|
sender_parameter = "client_name"
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
DEFAULT_BODY = "{{ job_metadata }}"
|
||||||
DEFAULT_BODY = "{{ job_summary_dict }}"
|
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
default_messages = {"started": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
"success": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
|
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
"error": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
|
"workflow_approval": {"running": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"approved": {"message": DEFAULT_MSG,"body": DEFAULT_BODY},
|
||||||
|
"timed_out": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
|
||||||
|
"denied": {"message": DEFAULT_MSG, "body": DEFAULT_BODY}}}
|
||||||
|
|
||||||
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
|
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
|
||||||
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
|
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
|
||||||
@@ -32,6 +41,16 @@ class PagerDutyBackend(AWXBaseEmailBackend):
|
|||||||
self.token = token
|
self.token = token
|
||||||
|
|
||||||
def format_body(self, body):
|
def format_body(self, body):
|
||||||
|
# cast to dict if possible # TODO: is it true that this can be a dict or str?
|
||||||
|
try:
|
||||||
|
potential_body = json.loads(body)
|
||||||
|
if isinstance(potential_body, dict):
|
||||||
|
body = potential_body
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# but it's okay if this is also just a string
|
||||||
|
|
||||||
return body
|
return body
|
||||||
|
|
||||||
def send_messages(self, messages):
|
def send_messages(self, messages):
|
||||||
|
|||||||
@@ -7,22 +7,20 @@ import json
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
|
logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
|
||||||
|
|
||||||
|
|
||||||
class RocketChatBackend(AWXBaseEmailBackend):
|
class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"rocketchat_url": {"label": "Target URL", "type": "string"},
|
init_parameters = {"rocketchat_url": {"label": "Target URL", "type": "string"},
|
||||||
"rocketchat_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
"rocketchat_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
|
||||||
recipient_parameter = "rocketchat_url"
|
recipient_parameter = "rocketchat_url"
|
||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT}}
|
|
||||||
|
|
||||||
def __init__(self, rocketchat_no_verify_ssl=False, rocketchat_username=None, rocketchat_icon_url=None, fail_silently=False, **kwargs):
|
def __init__(self, rocketchat_no_verify_ssl=False, rocketchat_username=None, rocketchat_icon_url=None, fail_silently=False, **kwargs):
|
||||||
super(RocketChatBackend, self).__init__(fail_silently=fail_silently)
|
super(RocketChatBackend, self).__init__(fail_silently=fail_silently)
|
||||||
|
|||||||
@@ -6,24 +6,21 @@ from slackclient import SlackClient
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.slack_backend')
|
logger = logging.getLogger('awx.main.notifications.slack_backend')
|
||||||
WEBSOCKET_TIMEOUT = 30
|
WEBSOCKET_TIMEOUT = 30
|
||||||
|
|
||||||
|
|
||||||
class SlackBackend(AWXBaseEmailBackend):
|
class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||||
"channels": {"label": "Destination Channels", "type": "list"}}
|
"channels": {"label": "Destination Channels", "type": "list"}}
|
||||||
recipient_parameter = "channels"
|
recipient_parameter = "channels"
|
||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT}}
|
|
||||||
|
|
||||||
def __init__(self, token, hex_color="", fail_silently=False, **kwargs):
|
def __init__(self, token, hex_color="", fail_silently=False, **kwargs):
|
||||||
super(SlackBackend, self).__init__(fail_silently=fail_silently)
|
super(SlackBackend, self).__init__(fail_silently=fail_silently)
|
||||||
self.token = token
|
self.token = token
|
||||||
|
|||||||
@@ -7,12 +7,14 @@ from twilio.rest import Client
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.twilio_backend')
|
logger = logging.getLogger('awx.main.notifications.twilio_backend')
|
||||||
|
|
||||||
|
|
||||||
class TwilioBackend(AWXBaseEmailBackend):
|
class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
|
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
|
||||||
"account_token": {"label": "Account Token", "type": "password"},
|
"account_token": {"label": "Account Token", "type": "password"},
|
||||||
@@ -21,11 +23,6 @@ class TwilioBackend(AWXBaseEmailBackend):
|
|||||||
recipient_parameter = "to_numbers"
|
recipient_parameter = "to_numbers"
|
||||||
sender_parameter = "from_number"
|
sender_parameter = "from_number"
|
||||||
|
|
||||||
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
|
|
||||||
default_messages = {"started": {"message": DEFAULT_SUBJECT},
|
|
||||||
"success": {"message": DEFAULT_SUBJECT},
|
|
||||||
"error": {"message": DEFAULT_SUBJECT}}
|
|
||||||
|
|
||||||
def __init__(self, account_sid, account_token, fail_silently=False, **kwargs):
|
def __init__(self, account_sid, account_token, fail_silently=False, **kwargs):
|
||||||
super(TwilioBackend, self).__init__(fail_silently=fail_silently)
|
super(TwilioBackend, self).__init__(fail_silently=fail_silently)
|
||||||
self.account_sid = account_sid
|
self.account_sid = account_sid
|
||||||
|
|||||||
@@ -7,13 +7,15 @@ import requests
|
|||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils.encoding import smart_text
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
from awx.main.utils import get_awx_version
|
from awx.main.utils import get_awx_version
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
||||||
|
|
||||||
|
|
||||||
class WebhookBackend(AWXBaseEmailBackend):
|
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
|
||||||
init_parameters = {"url": {"label": "Target URL", "type": "string"},
|
init_parameters = {"url": {"label": "Target URL", "type": "string"},
|
||||||
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
||||||
@@ -24,10 +26,16 @@ class WebhookBackend(AWXBaseEmailBackend):
|
|||||||
recipient_parameter = "url"
|
recipient_parameter = "url"
|
||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_BODY = "{{ job_summary_dict }}"
|
DEFAULT_BODY = "{{ job_metadata }}"
|
||||||
default_messages = {"started": {"body": DEFAULT_BODY},
|
default_messages = {"started": {"body": DEFAULT_BODY},
|
||||||
"success": {"body": DEFAULT_BODY},
|
"success": {"body": DEFAULT_BODY},
|
||||||
"error": {"body": DEFAULT_BODY}}
|
"error": {"body": DEFAULT_BODY},
|
||||||
|
"workflow_approval": {
|
||||||
|
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. '
|
||||||
|
'This node can be viewed at: {{ workflow_url }}"}'},
|
||||||
|
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||||
|
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||||
|
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'}}}
|
||||||
|
|
||||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||||
self.http_method = http_method
|
self.http_method = http_method
|
||||||
@@ -38,15 +46,13 @@ class WebhookBackend(AWXBaseEmailBackend):
|
|||||||
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
|
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
|
||||||
|
|
||||||
def format_body(self, body):
|
def format_body(self, body):
|
||||||
# If `body` has body field, attempt to use this as the main body,
|
# expect body to be a string representing a dict
|
||||||
# otherwise, leave it as a sub-field
|
try:
|
||||||
if isinstance(body, dict) and 'body' in body and isinstance(body['body'], str):
|
potential_body = json.loads(body)
|
||||||
try:
|
if isinstance(potential_body, dict):
|
||||||
potential_body = json.loads(body['body'])
|
body = potential_body
|
||||||
if isinstance(potential_body, dict):
|
except json.JSONDecodeError:
|
||||||
body = potential_body
|
body = {}
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
return body
|
return body
|
||||||
|
|
||||||
def send_messages(self, messages):
|
def send_messages(self, messages):
|
||||||
|
|||||||
@@ -12,10 +12,12 @@ class UriCleaner(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_sensitive(cleartext):
|
def remove_sensitive(cleartext):
|
||||||
|
# exclude_list contains the items that will _not_ be redacted
|
||||||
|
exclude_list = [settings.PUBLIC_GALAXY_SERVER['url']]
|
||||||
if settings.PRIMARY_GALAXY_URL:
|
if settings.PRIMARY_GALAXY_URL:
|
||||||
exclude_list = [settings.PRIMARY_GALAXY_URL] + [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
exclude_list += [settings.PRIMARY_GALAXY_URL]
|
||||||
else:
|
if settings.FALLBACK_GALAXY_SERVERS:
|
||||||
exclude_list = [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
exclude_list += [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
|
||||||
redactedtext = cleartext
|
redactedtext = cleartext
|
||||||
text_index = 0
|
text_index = 0
|
||||||
while True:
|
while True:
|
||||||
|
|||||||
@@ -1,9 +1,5 @@
|
|||||||
import collections
|
import collections
|
||||||
import os
|
|
||||||
import stat
|
|
||||||
import time
|
import time
|
||||||
import yaml
|
|
||||||
import tempfile
|
|
||||||
import logging
|
import logging
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
@@ -88,8 +84,17 @@ class PodManager(object):
|
|||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def kube_api(self):
|
def kube_api(self):
|
||||||
my_client = config.new_client_from_config(config_file=self.kube_config)
|
# this feels a little janky, but it's what k8s' own code does
|
||||||
return client.CoreV1Api(api_client=my_client)
|
# internally when it reads kube config files from disk:
|
||||||
|
# https://github.com/kubernetes-client/python-base/blob/0b208334ef0247aad9afcaae8003954423b61a0d/config/kube_config.py#L643
|
||||||
|
loader = config.kube_config.KubeConfigLoader(
|
||||||
|
config_dict=self.kube_config
|
||||||
|
)
|
||||||
|
cfg = type.__call__(client.Configuration)
|
||||||
|
loader.load_and_set(cfg)
|
||||||
|
return client.CoreV1Api(api_client=client.ApiClient(
|
||||||
|
configuration=cfg
|
||||||
|
))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pod_name(self):
|
def pod_name(self):
|
||||||
@@ -174,10 +179,4 @@ def generate_tmp_kube_config(credential, namespace):
|
|||||||
).decode() # decode the base64 data into a str
|
).decode() # decode the base64 data into a str
|
||||||
else:
|
else:
|
||||||
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
|
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
|
||||||
|
return config
|
||||||
fd, path = tempfile.mkstemp(prefix='kubeconfig')
|
|
||||||
with open(path, 'wb') as temp:
|
|
||||||
temp.write(yaml.dump(config).encode())
|
|
||||||
temp.flush()
|
|
||||||
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
|
||||||
return path
|
|
||||||
|
|||||||
@@ -252,19 +252,25 @@ class TaskManager():
|
|||||||
logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(
|
logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(
|
||||||
task.log_format, task.execution_node, controller_node))
|
task.log_format, task.execution_node, controller_node))
|
||||||
elif rampart_group.is_containerized:
|
elif rampart_group.is_containerized:
|
||||||
|
# find one real, non-containerized instance with capacity to
|
||||||
|
# act as the controller for k8s API interaction
|
||||||
|
match = None
|
||||||
|
for group in InstanceGroup.objects.all():
|
||||||
|
if group.is_containerized or group.controller_id:
|
||||||
|
continue
|
||||||
|
match = group.find_largest_idle_instance()
|
||||||
|
if match:
|
||||||
|
break
|
||||||
task.instance_group = rampart_group
|
task.instance_group = rampart_group
|
||||||
if not task.supports_isolation():
|
if task.supports_isolation():
|
||||||
|
task.controller_node = match.hostname
|
||||||
|
else:
|
||||||
# project updates and inventory updates don't *actually* run in pods,
|
# project updates and inventory updates don't *actually* run in pods,
|
||||||
# so just pick *any* non-isolated, non-containerized host and use it
|
# so just pick *any* non-isolated, non-containerized host and use it
|
||||||
for group in InstanceGroup.objects.all():
|
# as the execution node
|
||||||
if group.is_containerized or group.controller_id:
|
task.execution_node = match.hostname
|
||||||
continue
|
logger.debug('Submitting containerized {} to queue {}.'.format(
|
||||||
match = group.find_largest_idle_instance()
|
task.log_format, task.execution_node))
|
||||||
if match:
|
|
||||||
task.execution_node = match.hostname
|
|
||||||
logger.debug('Submitting containerized {} to queue {}.'.format(
|
|
||||||
task.log_format, task.execution_node))
|
|
||||||
break
|
|
||||||
else:
|
else:
|
||||||
task.instance_group = rampart_group
|
task.instance_group = rampart_group
|
||||||
if instance is not None:
|
if instance is not None:
|
||||||
|
|||||||
@@ -1423,7 +1423,6 @@ class BaseTask(object):
|
|||||||
def deploy_container_group_pod(self, task):
|
def deploy_container_group_pod(self, task):
|
||||||
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
|
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
|
||||||
pod_manager = PodManager(self.instance)
|
pod_manager = PodManager(self.instance)
|
||||||
self.cleanup_paths.append(pod_manager.kube_config)
|
|
||||||
try:
|
try:
|
||||||
log_name = task.log_format
|
log_name = task.log_format
|
||||||
logger.debug(f"Launching pod for {log_name}.")
|
logger.debug(f"Launching pod for {log_name}.")
|
||||||
@@ -1452,7 +1451,7 @@ class BaseTask(object):
|
|||||||
self.update_model(task.pk, execution_node=pod_manager.pod_name)
|
self.update_model(task.pk, execution_node=pod_manager.pod_name)
|
||||||
return pod_manager
|
return pod_manager
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1959,9 +1958,15 @@ class RunProjectUpdate(BaseTask):
|
|||||||
env['PROJECT_UPDATE_ID'] = str(project_update.pk)
|
env['PROJECT_UPDATE_ID'] = str(project_update.pk)
|
||||||
env['ANSIBLE_CALLBACK_PLUGINS'] = self.get_path_to('..', 'plugins', 'callback')
|
env['ANSIBLE_CALLBACK_PLUGINS'] = self.get_path_to('..', 'plugins', 'callback')
|
||||||
env['ANSIBLE_GALAXY_IGNORE'] = True
|
env['ANSIBLE_GALAXY_IGNORE'] = True
|
||||||
# Set up the fallback server, which is the normal Ansible Galaxy by default
|
# Set up the public Galaxy server, if enabled
|
||||||
galaxy_servers = list(settings.FALLBACK_GALAXY_SERVERS)
|
if settings.PUBLIC_GALAXY_ENABLED:
|
||||||
# If private galaxy URL is non-blank, that means this feature is enabled
|
galaxy_servers = [settings.PUBLIC_GALAXY_SERVER]
|
||||||
|
else:
|
||||||
|
galaxy_servers = []
|
||||||
|
# Set up fallback Galaxy servers, if configured
|
||||||
|
if settings.FALLBACK_GALAXY_SERVERS:
|
||||||
|
galaxy_servers = settings.FALLBACK_GALAXY_SERVERS + galaxy_servers
|
||||||
|
# Set up the primary Galaxy server, if configured
|
||||||
if settings.PRIMARY_GALAXY_URL:
|
if settings.PRIMARY_GALAXY_URL:
|
||||||
galaxy_servers = [{'id': 'primary_galaxy'}] + galaxy_servers
|
galaxy_servers = [{'id': 'primary_galaxy'}] + galaxy_servers
|
||||||
for key in GALAXY_SERVER_FIELDS:
|
for key in GALAXY_SERVER_FIELDS:
|
||||||
@@ -2354,6 +2359,27 @@ class RunInventoryUpdate(BaseTask):
|
|||||||
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
|
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
|
||||||
elif inventory_update.source == 'file':
|
elif inventory_update.source == 'file':
|
||||||
raise NotImplementedError('Cannot update file sources through the task system.')
|
raise NotImplementedError('Cannot update file sources through the task system.')
|
||||||
|
|
||||||
|
if inventory_update.source == 'scm' and inventory_update.source_project_update:
|
||||||
|
env_key = 'ANSIBLE_COLLECTIONS_PATHS'
|
||||||
|
config_setting = 'collections_paths'
|
||||||
|
folder = 'requirements_collections'
|
||||||
|
default = '~/.ansible/collections:/usr/share/ansible/collections'
|
||||||
|
|
||||||
|
config_values = read_ansible_config(os.path.join(private_data_dir, 'project'), [config_setting])
|
||||||
|
|
||||||
|
paths = default.split(':')
|
||||||
|
if env_key in env:
|
||||||
|
for path in env[env_key].split(':'):
|
||||||
|
if path not in paths:
|
||||||
|
paths = [env[env_key]] + paths
|
||||||
|
elif config_setting in config_values:
|
||||||
|
for path in config_values[config_setting].split(':'):
|
||||||
|
if path not in paths:
|
||||||
|
paths = [config_values[config_setting]] + paths
|
||||||
|
paths = [os.path.join(private_data_dir, folder)] + paths
|
||||||
|
env[env_key] = os.pathsep.join(paths)
|
||||||
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
def write_args_file(self, private_data_dir, args):
|
def write_args_file(self, private_data_dir, args):
|
||||||
@@ -2452,7 +2478,7 @@ class RunInventoryUpdate(BaseTask):
|
|||||||
# Use the vendored script path
|
# Use the vendored script path
|
||||||
inventory_path = self.get_path_to('..', 'plugins', 'inventory', injector.script_name)
|
inventory_path = self.get_path_to('..', 'plugins', 'inventory', injector.script_name)
|
||||||
elif src == 'scm':
|
elif src == 'scm':
|
||||||
inventory_path = inventory_update.get_actual_source_path()
|
inventory_path = os.path.join(private_data_dir, 'project', inventory_update.source_path)
|
||||||
elif src == 'custom':
|
elif src == 'custom':
|
||||||
handle, inventory_path = tempfile.mkstemp(dir=private_data_dir)
|
handle, inventory_path = tempfile.mkstemp(dir=private_data_dir)
|
||||||
f = os.fdopen(handle, 'w')
|
f = os.fdopen(handle, 'w')
|
||||||
@@ -2473,7 +2499,7 @@ class RunInventoryUpdate(BaseTask):
|
|||||||
'''
|
'''
|
||||||
src = inventory_update.source
|
src = inventory_update.source
|
||||||
if src == 'scm' and inventory_update.source_project_update:
|
if src == 'scm' and inventory_update.source_project_update:
|
||||||
return inventory_update.source_project_update.get_project_path(check_if_exists=False)
|
return os.path.join(private_data_dir, 'project')
|
||||||
if src in CLOUD_PROVIDERS:
|
if src in CLOUD_PROVIDERS:
|
||||||
injector = None
|
injector = None
|
||||||
if src in InventorySource.injectors:
|
if src in InventorySource.injectors:
|
||||||
@@ -2509,8 +2535,10 @@ class RunInventoryUpdate(BaseTask):
|
|||||||
|
|
||||||
project_update_task = local_project_sync._get_task_class()
|
project_update_task = local_project_sync._get_task_class()
|
||||||
try:
|
try:
|
||||||
project_update_task().run(local_project_sync.id)
|
sync_task = project_update_task(job_private_data_dir=private_data_dir)
|
||||||
inventory_update.inventory_source.scm_last_revision = local_project_sync.project.scm_revision
|
sync_task.run(local_project_sync.id)
|
||||||
|
local_project_sync.refresh_from_db()
|
||||||
|
inventory_update.inventory_source.scm_last_revision = local_project_sync.scm_revision
|
||||||
inventory_update.inventory_source.save(update_fields=['scm_last_revision'])
|
inventory_update.inventory_source.save(update_fields=['scm_last_revision'])
|
||||||
except Exception:
|
except Exception:
|
||||||
inventory_update = self.update_model(
|
inventory_update = self.update_model(
|
||||||
@@ -2518,6 +2546,13 @@ class RunInventoryUpdate(BaseTask):
|
|||||||
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
|
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
|
||||||
('project_update', local_project_sync.name, local_project_sync.id)))
|
('project_update', local_project_sync.name, local_project_sync.id)))
|
||||||
raise
|
raise
|
||||||
|
elif inventory_update.source == 'scm' and inventory_update.launch_type == 'scm' and source_project:
|
||||||
|
# This follows update, not sync, so make copy here
|
||||||
|
project_path = source_project.get_project_path(check_if_exists=False)
|
||||||
|
RunProjectUpdate.make_local_copy(
|
||||||
|
project_path, os.path.join(private_data_dir, 'project'),
|
||||||
|
source_project.scm_type, source_project.scm_revision
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@task()
|
@task()
|
||||||
|
|||||||
45
awx/main/tests/functional/api/test_events.py
Normal file
45
awx/main/tests/functional/api/test_events.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from awx.api.versioning import reverse
|
||||||
|
from awx.main.models import AdHocCommand, AdHocCommandEvent, JobEvent
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@pytest.mark.parametrize('truncate, expected', [
|
||||||
|
(True, False),
|
||||||
|
(False, True),
|
||||||
|
])
|
||||||
|
def test_job_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
|
||||||
|
objs = organization_factory("org", superusers=['admin'])
|
||||||
|
jt = job_template_factory("jt", organization=objs.organization,
|
||||||
|
inventory='test_inv', project='test_proj').job_template
|
||||||
|
job = jt.create_unified_job()
|
||||||
|
JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start',
|
||||||
|
stdout='a' * 1025)
|
||||||
|
|
||||||
|
url = reverse('api:job_job_events_list', kwargs={'pk': job.pk})
|
||||||
|
if not truncate:
|
||||||
|
url += '?no_truncate=1'
|
||||||
|
|
||||||
|
response = get(url, user=objs.superusers.admin, expect=200)
|
||||||
|
assert (len(response.data['results'][0]['stdout']) == 1025) == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@pytest.mark.parametrize('truncate, expected', [
|
||||||
|
(True, False),
|
||||||
|
(False, True),
|
||||||
|
])
|
||||||
|
def test_ad_hoc_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
|
||||||
|
objs = organization_factory("org", superusers=['admin'])
|
||||||
|
adhoc = AdHocCommand()
|
||||||
|
adhoc.save()
|
||||||
|
AdHocCommandEvent.create_from_data(ad_hoc_command_id=adhoc.pk, uuid='abc123', event='runner_on_start',
|
||||||
|
stdout='a' * 1025)
|
||||||
|
|
||||||
|
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': adhoc.pk})
|
||||||
|
if not truncate:
|
||||||
|
url += '?no_truncate=1'
|
||||||
|
|
||||||
|
response = get(url, user=objs.superusers.admin, expect=200)
|
||||||
|
assert (len(response.data['results'][0]['stdout']) == 1025) == expected
|
||||||
@@ -117,3 +117,10 @@ def test_handle_content_type(post, admin):
|
|||||||
admin,
|
admin,
|
||||||
content_type='text/html',
|
content_type='text/html',
|
||||||
expect=415)
|
expect=415)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_basic_not_found(get, admin_user):
|
||||||
|
root_url = reverse('api:api_v2_root_view')
|
||||||
|
r = get(root_url + 'fooooooo', user=admin_user, expect=404)
|
||||||
|
assert r.data.get('detail') == 'The requested resource could not be found.'
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ from unittest.mock import PropertyMock
|
|||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.urls import resolve
|
from django.urls import resolve
|
||||||
|
from django.http import Http404
|
||||||
|
from django.core.handlers.exception import response_for_exception
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||||
@@ -581,8 +583,12 @@ def _request(verb):
|
|||||||
if 'format' not in kwargs and 'content_type' not in kwargs:
|
if 'format' not in kwargs and 'content_type' not in kwargs:
|
||||||
kwargs['format'] = 'json'
|
kwargs['format'] = 'json'
|
||||||
|
|
||||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
|
||||||
request = getattr(APIRequestFactory(), verb)(url, **kwargs)
|
request = getattr(APIRequestFactory(), verb)(url, **kwargs)
|
||||||
|
request_error = None
|
||||||
|
try:
|
||||||
|
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||||
|
except Http404 as e:
|
||||||
|
request_error = e
|
||||||
if isinstance(kwargs.get('cookies', None), dict):
|
if isinstance(kwargs.get('cookies', None), dict):
|
||||||
for key, value in kwargs['cookies'].items():
|
for key, value in kwargs['cookies'].items():
|
||||||
request.COOKIES[key] = value
|
request.COOKIES[key] = value
|
||||||
@@ -591,7 +597,10 @@ def _request(verb):
|
|||||||
if user:
|
if user:
|
||||||
force_authenticate(request, user=user)
|
force_authenticate(request, user=user)
|
||||||
|
|
||||||
response = view(request, *view_args, **view_kwargs)
|
if not request_error:
|
||||||
|
response = view(request, *view_args, **view_kwargs)
|
||||||
|
else:
|
||||||
|
response = response_for_exception(request, request_error)
|
||||||
if middleware:
|
if middleware:
|
||||||
middleware.process_response(request, response)
|
middleware.process_response(request, response)
|
||||||
if expect:
|
if expect:
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ class TestJobNotificationMixin(object):
|
|||||||
'use_fact_cache': bool,
|
'use_fact_cache': bool,
|
||||||
'verbosity': int},
|
'verbosity': int},
|
||||||
'job_friendly_name': str,
|
'job_friendly_name': str,
|
||||||
'job_summary_dict': str,
|
'job_metadata': str,
|
||||||
'url': str}
|
'url': str}
|
||||||
|
|
||||||
|
|
||||||
@@ -144,5 +144,3 @@ class TestJobNotificationMixin(object):
|
|||||||
|
|
||||||
context_stub = JobNotificationMixin.context_stub()
|
context_stub = JobNotificationMixin.context_stub()
|
||||||
check_structure_and_completeness(TestJobNotificationMixin.CONTEXT_STRUCTURE, context_stub)
|
check_structure_and_completeness(TestJobNotificationMixin.CONTEXT_STRUCTURE, context_stub)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
import yaml
|
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
from unittest import mock # noqa
|
from unittest import mock # noqa
|
||||||
@@ -51,6 +50,5 @@ def test_kubectl_ssl_verification(containerized_job):
|
|||||||
cred.inputs['ssl_ca_cert'] = cert.stdout
|
cred.inputs['ssl_ca_cert'] = cert.stdout
|
||||||
cred.save()
|
cred.save()
|
||||||
pm = PodManager(containerized_job)
|
pm = PodManager(containerized_job)
|
||||||
config = yaml.load(open(pm.kube_config), Loader=yaml.FullLoader)
|
ca_data = pm.kube_config['clusters'][0]['cluster']['certificate-authority-data']
|
||||||
ca_data = config['clusters'][0]['cluster']['certificate-authority-data']
|
|
||||||
assert cert.stdout == base64.b64decode(ca_data.encode())
|
assert cert.stdout == base64.b64decode(ca_data.encode())
|
||||||
|
|||||||
@@ -264,6 +264,7 @@ def test_inventory_update_injected_content(this_kind, script_or_plugin, inventor
|
|||||||
assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == ('auto' if use_plugin else 'script')
|
assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == ('auto' if use_plugin else 'script')
|
||||||
set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0'])
|
set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0'])
|
||||||
env, content = read_content(private_data_dir, envvars, inventory_update)
|
env, content = read_content(private_data_dir, envvars, inventory_update)
|
||||||
|
env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test
|
||||||
base_dir = os.path.join(DATA, script_or_plugin)
|
base_dir = os.path.join(DATA, script_or_plugin)
|
||||||
if not os.path.exists(base_dir):
|
if not os.path.exists(base_dir):
|
||||||
os.mkdir(base_dir)
|
os.mkdir(base_dir)
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ def test_basic_parameterization(get, post, user, organization):
|
|||||||
assert 'url' in response.data['notification_configuration']
|
assert 'url' in response.data['notification_configuration']
|
||||||
assert 'headers' in response.data['notification_configuration']
|
assert 'headers' in response.data['notification_configuration']
|
||||||
assert 'messages' in response.data
|
assert 'messages' in response.data
|
||||||
assert response.data['messages'] == {'started': None, 'success': None, 'error': None}
|
assert response.data['messages'] == {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ from awx.main.models import (
|
|||||||
Credential
|
Credential
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
from crum import impersonate
|
from crum import impersonate
|
||||||
|
|
||||||
|
|
||||||
@@ -252,7 +254,8 @@ class TestJobRelaunchAccess:
|
|||||||
|
|
||||||
assert 'job_var' in job.launch_config.extra_data
|
assert 'job_var' in job.launch_config.extra_data
|
||||||
assert bob.can_access(Job, 'start', job, validate_license=False)
|
assert bob.can_access(Job, 'start', job, validate_license=False)
|
||||||
assert not alice.can_access(Job, 'start', job, validate_license=False)
|
with pytest.raises(PermissionDenied):
|
||||||
|
alice.can_access(Job, 'start', job, validate_license=False)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ from awx.main.access import (
|
|||||||
# WorkflowJobNodeAccess
|
# WorkflowJobNodeAccess
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
from awx.main.models import InventorySource, JobLaunchConfig
|
from awx.main.models import InventorySource, JobLaunchConfig
|
||||||
|
|
||||||
|
|
||||||
@@ -169,7 +171,8 @@ class TestWorkflowJobAccess:
|
|||||||
wfjt.ask_inventory_on_launch = True
|
wfjt.ask_inventory_on_launch = True
|
||||||
wfjt.save()
|
wfjt.save()
|
||||||
JobLaunchConfig.objects.create(job=workflow_job, inventory=inventory)
|
JobLaunchConfig.objects.create(job=workflow_job, inventory=inventory)
|
||||||
assert not WorkflowJobAccess(rando).can_start(workflow_job)
|
with pytest.raises(PermissionDenied):
|
||||||
|
WorkflowJobAccess(rando).can_start(workflow_job)
|
||||||
inventory.use_role.members.add(rando)
|
inventory.use_role.members.add(rando)
|
||||||
assert WorkflowJobAccess(rando).can_start(workflow_job)
|
assert WorkflowJobAccess(rando).can_start(workflow_job)
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class TestNotificationTemplateSerializer():
|
|||||||
{'started': {'message': '{{ job.id }}', 'body': '{{ job.status }}'},
|
{'started': {'message': '{{ job.id }}', 'body': '{{ job.status }}'},
|
||||||
'success': {'message': None, 'body': '{{ job_friendly_name }}'},
|
'success': {'message': None, 'body': '{{ job_friendly_name }}'},
|
||||||
'error': {'message': '{{ url }}', 'body': None}},
|
'error': {'message': '{{ url }}', 'body': None}},
|
||||||
{'started': {'body': '{{ job_summary_dict }}'}},
|
{'started': {'body': '{{ job_metadata }}'}},
|
||||||
{'started': {'body': '{{ job.summary_fields.inventory.total_hosts }}'}},
|
{'started': {'body': '{{ job.summary_fields.inventory.total_hosts }}'}},
|
||||||
{'started': {'body': u'Iñtërnâtiônàlizætiøn'}}
|
{'started': {'body': u'Iñtërnâtiônàlizætiøn'}}
|
||||||
])
|
])
|
||||||
|
|||||||
@@ -288,13 +288,17 @@ class AWXProxyHandler(logging.Handler):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
thread_local = threading.local()
|
thread_local = threading.local()
|
||||||
|
_auditor = None
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
# TODO: process 'level' kwarg
|
# TODO: process 'level' kwarg
|
||||||
super(AWXProxyHandler, self).__init__(**kwargs)
|
super(AWXProxyHandler, self).__init__(**kwargs)
|
||||||
self._handler = None
|
self._handler = None
|
||||||
self._old_kwargs = {}
|
self._old_kwargs = {}
|
||||||
if settings.LOG_AGGREGATOR_AUDIT:
|
|
||||||
|
@property
|
||||||
|
def auditor(self):
|
||||||
|
if not self._auditor:
|
||||||
self._auditor = logging.handlers.RotatingFileHandler(
|
self._auditor = logging.handlers.RotatingFileHandler(
|
||||||
filename='/var/log/tower/external.log',
|
filename='/var/log/tower/external.log',
|
||||||
maxBytes=1024 * 1024 * 50, # 50 MB
|
maxBytes=1024 * 1024 * 50, # 50 MB
|
||||||
@@ -307,6 +311,7 @@ class AWXProxyHandler(logging.Handler):
|
|||||||
return json.dumps(message)
|
return json.dumps(message)
|
||||||
|
|
||||||
self._auditor.setFormatter(WritableLogstashFormatter())
|
self._auditor.setFormatter(WritableLogstashFormatter())
|
||||||
|
return self._auditor
|
||||||
|
|
||||||
def get_handler_class(self, protocol):
|
def get_handler_class(self, protocol):
|
||||||
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
|
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
|
||||||
@@ -341,8 +346,8 @@ class AWXProxyHandler(logging.Handler):
|
|||||||
if AWXProxyHandler.thread_local.enabled:
|
if AWXProxyHandler.thread_local.enabled:
|
||||||
actual_handler = self.get_handler()
|
actual_handler = self.get_handler()
|
||||||
if settings.LOG_AGGREGATOR_AUDIT:
|
if settings.LOG_AGGREGATOR_AUDIT:
|
||||||
self._auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
|
self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
|
||||||
self._auditor.emit(record)
|
self.auditor.emit(record)
|
||||||
return actual_handler.emit(record)
|
return actual_handler.emit(record)
|
||||||
|
|
||||||
def perform_test(self, custom_settings):
|
def perform_test(self, custom_settings):
|
||||||
|
|||||||
@@ -635,16 +635,18 @@ PRIMARY_GALAXY_USERNAME = ''
|
|||||||
PRIMARY_GALAXY_TOKEN = ''
|
PRIMARY_GALAXY_TOKEN = ''
|
||||||
PRIMARY_GALAXY_PASSWORD = ''
|
PRIMARY_GALAXY_PASSWORD = ''
|
||||||
PRIMARY_GALAXY_AUTH_URL = ''
|
PRIMARY_GALAXY_AUTH_URL = ''
|
||||||
# Settings for the fallback galaxy server(s), normally this is the
|
|
||||||
# actual Ansible Galaxy site.
|
# Settings for the public galaxy server(s).
|
||||||
# server options: 'id', 'url', 'username', 'password', 'token', 'auth_url'
|
PUBLIC_GALAXY_ENABLED = True
|
||||||
# To not use any fallback servers set this to []
|
PUBLIC_GALAXY_SERVER = {
|
||||||
FALLBACK_GALAXY_SERVERS = [
|
'id': 'galaxy',
|
||||||
{
|
'url': 'https://galaxy.ansible.com'
|
||||||
'id': 'galaxy',
|
}
|
||||||
'url': 'https://galaxy.ansible.com'
|
|
||||||
}
|
# List of dicts of fallback (additional) Galaxy servers. If configured, these
|
||||||
]
|
# will be higher precedence than public Galaxy, but lower than primary Galaxy.
|
||||||
|
# Available options: 'id', 'url', 'username', 'password', 'token', 'auth_url'
|
||||||
|
FALLBACK_GALAXY_SERVERS = []
|
||||||
|
|
||||||
# Enable bubblewrap support for running jobs (playbook runs only).
|
# Enable bubblewrap support for running jobs (playbook runs only).
|
||||||
# Note: This setting may be overridden by database settings.
|
# Note: This setting may be overridden by database settings.
|
||||||
|
|||||||
@@ -282,10 +282,12 @@ function getLaunchedByDetails () {
|
|||||||
tooltip = strings.get('tooltips.SCHEDULE');
|
tooltip = strings.get('tooltips.SCHEDULE');
|
||||||
link = `/#/templates/job_template/${jobTemplate.id}/schedules/${schedule.id}`;
|
link = `/#/templates/job_template/${jobTemplate.id}/schedules/${schedule.id}`;
|
||||||
value = $filter('sanitize')(schedule.name);
|
value = $filter('sanitize')(schedule.name);
|
||||||
} else {
|
} else if (schedule) {
|
||||||
tooltip = null;
|
tooltip = null;
|
||||||
link = null;
|
link = null;
|
||||||
value = $filter('sanitize')(schedule.name);
|
value = $filter('sanitize')(schedule.name);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return { label, link, tooltip, value };
|
return { label, link, tooltip, value };
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
<!-- LEFT PANE HEADER ACTIONS -->
|
<!-- LEFT PANE HEADER ACTIONS -->
|
||||||
<div class="JobResults-panelHeaderButtonActions">
|
<div class="JobResults-panelHeaderButtonActions">
|
||||||
<!-- RELAUNCH ACTION -->
|
<!-- RELAUNCH ACTION -->
|
||||||
<at-relaunch job="vm.job"></at-relaunch>
|
<at-relaunch ng-if="vm.job" job="vm.job"></at-relaunch>
|
||||||
|
|
||||||
<!-- CANCEL ACTION -->
|
<!-- CANCEL ACTION -->
|
||||||
<button
|
<button
|
||||||
|
|||||||
@@ -213,8 +213,8 @@ function JobRenderService ($q, $compile, $sce, $window) {
|
|||||||
const record = this.createRecord(event, lines);
|
const record = this.createRecord(event, lines);
|
||||||
|
|
||||||
if (lines.length === 1 && lines[0] === '') {
|
if (lines.length === 1 && lines[0] === '') {
|
||||||
// Some events, mainly runner_on_start events, have an actual line count of 1
|
// runner_on_start, runner_on_ok, and a few other events have an actual line count
|
||||||
// (stdout = '') and a claimed line count of 0 (end_line - start_line = 0).
|
// of 1 (stdout = '') and a claimed line count of 0 (end_line - start_line = 0).
|
||||||
// Since a zero-length string has an actual line count of 1, they'll still get
|
// Since a zero-length string has an actual line count of 1, they'll still get
|
||||||
// rendered as blank lines unless we intercept them and add some special
|
// rendered as blank lines unless we intercept them and add some special
|
||||||
// handling to remove them.
|
// handling to remove them.
|
||||||
|
|||||||
@@ -208,6 +208,7 @@
|
|||||||
max-width: none !important;
|
max-width: none !important;
|
||||||
width: 100% !important;
|
width: 100% !important;
|
||||||
padding-right: 0px !important;
|
padding-right: 0px !important;
|
||||||
|
margin-top: 10px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.Form-formGroup--checkbox{
|
.Form-formGroup--checkbox{
|
||||||
|
|||||||
@@ -15,7 +15,9 @@
|
|||||||
title="{{ label || vm.strings.get('code_mirror.label.VARIABLES') }}"
|
title="{{ label || vm.strings.get('code_mirror.label.VARIABLES') }}"
|
||||||
tabindex="-1"
|
tabindex="-1"
|
||||||
ng-if="tooltip">
|
ng-if="tooltip">
|
||||||
<i class="fa fa-question-circle"></i>
|
<span class="at-Popover-icon" ng-class="{ 'at-Popover-icon--defaultCursor': popover.on === 'mouseenter' && !popover.click }">
|
||||||
|
<i class="fa fa-question-circle"></i>
|
||||||
|
</span>
|
||||||
</a>
|
</a>
|
||||||
<div class="atCodeMirror-toggleContainer FormToggle-container">
|
<div class="atCodeMirror-toggleContainer FormToggle-container">
|
||||||
<div id="{{ name }}_parse_type" class="btn-group">
|
<div id="{{ name }}_parse_type" class="btn-group">
|
||||||
|
|||||||
@@ -202,6 +202,7 @@
|
|||||||
.at-Row-toggle {
|
.at-Row-toggle {
|
||||||
align-self: flex-start;
|
align-self: flex-start;
|
||||||
margin-right: @at-space-4x;
|
margin-right: @at-space-4x;
|
||||||
|
margin-left: 15px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.at-Row-actions {
|
.at-Row-actions {
|
||||||
@@ -385,29 +386,3 @@
|
|||||||
margin-right: @at-margin-right-list-row-item-inline-label;
|
margin-right: @at-margin-right-list-row-item-inline-label;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@media screen and (max-width: @at-breakpoint-instances-wrap) {
|
|
||||||
.at-Row-items--instances {
|
|
||||||
margin-bottom: @at-padding-bottom-instances-wrap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (max-width: @at-breakpoint-compact-list) {
|
|
||||||
.at-Row-actions {
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.at-RowAction {
|
|
||||||
margin: @at-margin-list-row-action-mobile;
|
|
||||||
}
|
|
||||||
|
|
||||||
.at-RowItem--inline {
|
|
||||||
display: flex;
|
|
||||||
margin-right: inherit;
|
|
||||||
|
|
||||||
.at-RowItem-label {
|
|
||||||
width: @at-width-list-row-item-label;
|
|
||||||
margin-right: inherit;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -89,6 +89,9 @@ export default ['i18n', function(i18n) {
|
|||||||
type: 'text',
|
type: 'text',
|
||||||
reset: 'PRIMARY_GALAXY_AUTH_URL',
|
reset: 'PRIMARY_GALAXY_AUTH_URL',
|
||||||
},
|
},
|
||||||
|
PUBLIC_GALAXY_ENABLED: {
|
||||||
|
type: 'toggleSwitch',
|
||||||
|
},
|
||||||
AWX_TASK_ENV: {
|
AWX_TASK_ENV: {
|
||||||
type: 'textarea',
|
type: 'textarea',
|
||||||
reset: 'AWX_TASK_ENV',
|
reset: 'AWX_TASK_ENV',
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
.CapacityAdjuster {
|
.CapacityAdjuster {
|
||||||
margin-right: @at-space-4x;
|
margin-right: @at-space-4x;
|
||||||
|
margin-top: 15px;
|
||||||
|
margin-left: -10px;
|
||||||
position: relative;
|
position: relative;
|
||||||
|
|
||||||
&-valueLabel {
|
&-valueLabel {
|
||||||
bottom: @at-space-5x;
|
top: -10px;
|
||||||
color: @at-color-body-text;
|
color: @at-color-body-text;
|
||||||
font-size: @at-font-size;
|
font-size: @at-font-size;
|
||||||
position: absolute;
|
position: absolute;
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ capacity-bar {
|
|||||||
font-size: @at-font-size;
|
font-size: @at-font-size;
|
||||||
min-width: 100px;
|
min-width: 100px;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
|
margin-top: 5px;
|
||||||
|
margin-bottom: 5px;
|
||||||
|
|
||||||
.CapacityBar {
|
.CapacityBar {
|
||||||
background-color: @default-bg;
|
background-color: @default-bg;
|
||||||
@@ -42,12 +44,4 @@ capacity-bar {
|
|||||||
text-align: right;
|
text-align: right;
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
}
|
}
|
||||||
|
|
||||||
.Capacity-details--percentage {
|
|
||||||
width: 40px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&:only-child {
|
|
||||||
margin-right: 50px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
@@ -12,6 +12,7 @@ function AddContainerGroupController(ToJSON, $scope, $state, models, strings, i1
|
|||||||
|
|
||||||
vm.form = instanceGroup.createFormSchema('post');
|
vm.form = instanceGroup.createFormSchema('post');
|
||||||
vm.form.name.required = true;
|
vm.form.name.required = true;
|
||||||
|
delete vm.form.name.help_text;
|
||||||
|
|
||||||
vm.form.credential = {
|
vm.form.credential = {
|
||||||
type: 'field',
|
type: 'field',
|
||||||
@@ -22,6 +23,7 @@ function AddContainerGroupController(ToJSON, $scope, $state, models, strings, i1
|
|||||||
vm.form.credential._route = "instanceGroups.addContainerGroup.credentials";
|
vm.form.credential._route = "instanceGroups.addContainerGroup.credentials";
|
||||||
vm.form.credential._model = credential;
|
vm.form.credential._model = credential;
|
||||||
vm.form.credential._placeholder = strings.get('container.CREDENTIAL_PLACEHOLDER');
|
vm.form.credential._placeholder = strings.get('container.CREDENTIAL_PLACEHOLDER');
|
||||||
|
vm.form.credential.help_text = strings.get('container.CREDENTIAL_HELP_TEXT');
|
||||||
vm.form.credential.required = true;
|
vm.form.credential.required = true;
|
||||||
|
|
||||||
vm.form.extraVars = {
|
vm.form.extraVars = {
|
||||||
@@ -29,6 +31,7 @@ function AddContainerGroupController(ToJSON, $scope, $state, models, strings, i1
|
|||||||
value: DataSet.data.actions.POST.pod_spec_override.default,
|
value: DataSet.data.actions.POST.pod_spec_override.default,
|
||||||
name: 'extraVars',
|
name: 'extraVars',
|
||||||
toggleLabel: strings.get('container.POD_SPEC_TOGGLE'),
|
toggleLabel: strings.get('container.POD_SPEC_TOGGLE'),
|
||||||
|
tooltip: strings.get('container.EXTRA_VARS_HELP_TEXT')
|
||||||
};
|
};
|
||||||
|
|
||||||
vm.tab = {
|
vm.tab = {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
<div ui-view="credentials"></div>
|
<div ui-view="credentials"></div>
|
||||||
<a class="containerGroups-messageBar-link" href="https://docs.ansible.com/ansible-tower/latest/html/administration/external_execution_envs.html#container-group-considerations" target="_blank" style="color: white">
|
<a class="containerGroups-messageBar-link" href="https://docs.ansible.com/ansible-tower/latest/html/administration/external_execution_envs.html#container-groups" target="_blank" style="color: white">
|
||||||
<div class="Section-messageBar">
|
<div class="Section-messageBar">
|
||||||
<i class="Section-messageBar-warning fa fa-warning"></i>
|
<i class="Section-messageBar-warning fa fa-warning"></i>
|
||||||
<span class="Section-messageBar-text">This feature is tech preview, and is subject to change in a future release. Click here for documentation.</span>
|
<span class="Section-messageBar-text">This feature is currently in tech preview and is subject to change in a future release. Click here for documentation.</span>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
<at-panel>
|
<at-panel>
|
||||||
@@ -34,6 +34,7 @@
|
|||||||
variables="vm.form.extraVars.value"
|
variables="vm.form.extraVars.value"
|
||||||
label="{{ vm.form.extraVars.label }}"
|
label="{{ vm.form.extraVars.label }}"
|
||||||
name="{{ vm.form.extraVars.name }}"
|
name="{{ vm.form.extraVars.name }}"
|
||||||
|
tooltip="{{ vm.form.extraVars.tooltip }}"
|
||||||
>
|
>
|
||||||
</at-code-mirror>
|
</at-code-mirror>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
|
|||||||
vm.switchDisabled = false;
|
vm.switchDisabled = false;
|
||||||
vm.form.disabled = !instanceGroup.has('options', 'actions.PUT');
|
vm.form.disabled = !instanceGroup.has('options', 'actions.PUT');
|
||||||
vm.form.name.required = true;
|
vm.form.name.required = true;
|
||||||
|
delete vm.form.name.help_text;
|
||||||
vm.form.credential = {
|
vm.form.credential = {
|
||||||
type: 'field',
|
type: 'field',
|
||||||
label: i18n._('Credential'),
|
label: i18n._('Credential'),
|
||||||
@@ -38,6 +39,7 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
|
|||||||
vm.form.credential._displayValue = EditContainerGroupDataset.data.summary_fields.credential.name;
|
vm.form.credential._displayValue = EditContainerGroupDataset.data.summary_fields.credential.name;
|
||||||
vm.form.credential.required = true;
|
vm.form.credential.required = true;
|
||||||
vm.form.credential._value = EditContainerGroupDataset.data.summary_fields.credential.id;
|
vm.form.credential._value = EditContainerGroupDataset.data.summary_fields.credential.id;
|
||||||
|
vm.form.credential.help_text = strings.get('container.CREDENTIAL_HELP_TEXT');
|
||||||
|
|
||||||
vm.tab = {
|
vm.tab = {
|
||||||
details: {
|
details: {
|
||||||
@@ -59,7 +61,8 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
|
|||||||
label: strings.get('container.POD_SPEC_LABEL'),
|
label: strings.get('container.POD_SPEC_LABEL'),
|
||||||
value: EditContainerGroupDataset.data.pod_spec_override || "---",
|
value: EditContainerGroupDataset.data.pod_spec_override || "---",
|
||||||
name: 'extraVars',
|
name: 'extraVars',
|
||||||
disabled: true
|
disabled: true,
|
||||||
|
tooltip: strings.get('container.EXTRA_VARS_HELP_TEXT')
|
||||||
};
|
};
|
||||||
vm.switchDisabled = true;
|
vm.switchDisabled = true;
|
||||||
} else {
|
} else {
|
||||||
@@ -67,7 +70,8 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
|
|||||||
label: strings.get('container.POD_SPEC_LABEL'),
|
label: strings.get('container.POD_SPEC_LABEL'),
|
||||||
value: EditContainerGroupDataset.data.pod_spec_override || instanceGroup.model.OPTIONS.actions.PUT.pod_spec_override.default,
|
value: EditContainerGroupDataset.data.pod_spec_override || instanceGroup.model.OPTIONS.actions.PUT.pod_spec_override.default,
|
||||||
name: 'extraVars',
|
name: 'extraVars',
|
||||||
toggleLabel: strings.get('container.POD_SPEC_TOGGLE')
|
toggleLabel: strings.get('container.POD_SPEC_TOGGLE'),
|
||||||
|
tooltip: strings.get('container.EXTRA_VARS_HELP_TEXT')
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,135 +1,100 @@
|
|||||||
.InstanceGroups {
|
.at-Row--instances {
|
||||||
.at-Row-actions{
|
.at-Row-content {
|
||||||
justify-content: flex-start;
|
flex-wrap: nowrap;
|
||||||
width: 300px;
|
|
||||||
& > capacity-bar:only-child{
|
|
||||||
margin-left: 0px;
|
|
||||||
margin-top: 5px
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.at-RowAction{
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
.at-Row-links{
|
|
||||||
justify-content: flex-start;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.BreadCrumb-menuLinkImage:hover {
|
.at-Row-toggle {
|
||||||
color: @default-link;
|
align-self: auto;
|
||||||
|
flex: initial;
|
||||||
}
|
}
|
||||||
|
|
||||||
.List-details {
|
.at-Row-itemGroup {
|
||||||
align-self: flex-end;
|
|
||||||
color: @default-interface-txt;
|
|
||||||
display: flex;
|
display: flex;
|
||||||
flex: 0 0 auto;
|
flex: 1;
|
||||||
font-size: 12px;
|
flex-wrap: wrap;
|
||||||
margin-right:20px;
|
|
||||||
text-transform: uppercase;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.Capacity-details {
|
.at-Row-items--instances {
|
||||||
display: flex;
|
display: flex;
|
||||||
margin-right: 20px;
|
flex-wrap: wrap;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
|
align-content: center;
|
||||||
.Capacity-details--label {
|
flex: 1;
|
||||||
color: @default-interface-txt;
|
|
||||||
margin: 0 10px 0 0;
|
|
||||||
width: 100px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.RunningJobs-details {
|
.at-RowItem--isHeader {
|
||||||
align-items: center;
|
min-width: 250px;
|
||||||
display: flex;
|
|
||||||
|
|
||||||
.RunningJobs-details--label {
|
|
||||||
margin: 0 10px 0 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.List-tableCell--capacityColumn {
|
.at-Row-items--capacity {
|
||||||
display: flex;
|
display: flex;
|
||||||
height: 40px;
|
flex-wrap: wrap;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
.List-noItems {
|
.CapacityAdjuster {
|
||||||
margin-top: 20px;
|
padding-bottom: 15px;
|
||||||
}
|
|
||||||
|
|
||||||
.List-tableRow .List-titleBadge {
|
|
||||||
margin: 0 0 0 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.Panel-docsLink {
|
|
||||||
cursor: pointer;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
padding: 7px;
|
|
||||||
background: @at-white;
|
|
||||||
border-radius: @at-border-radius;
|
|
||||||
height: 30px;
|
|
||||||
width: 30px;
|
|
||||||
margin: 0 20px 0 auto;
|
|
||||||
|
|
||||||
i {
|
|
||||||
font-size: @at-font-size-icon;
|
|
||||||
color: @at-gray-646972;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.Panel-docsLink:hover {
|
|
||||||
background-color: @at-blue;
|
|
||||||
|
|
||||||
i {
|
|
||||||
color: @at-white;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.at-Row-toggle{
|
|
||||||
margin-top: 20px;
|
|
||||||
padding-left: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.ContainerGroups-codeMirror{
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.at-Row-container{
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.containerGroups-messageBar-link:hover{
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (max-width: 1060px) and (min-width: 769px){
|
|
||||||
.at-Row-links {
|
|
||||||
justify-content: flex-start;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media screen and (min-width: 1061px){
|
|
||||||
.at-Row-actions{
|
|
||||||
justify-content: flex-end;
|
|
||||||
& > capacity-bar:only-child {
|
|
||||||
margin-right: 30px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.instanceGroupsList-details{
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
.at-Row-links {
|
|
||||||
justify-content: flex-end;
|
|
||||||
display: flex;
|
|
||||||
width: 445px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.at-Row--instanceGroups {
|
||||||
|
.at-Row-content {
|
||||||
|
flex-wrap: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row-itemGroup {
|
||||||
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row-items--instanceGroups {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
align-items: center;
|
||||||
|
flex: 1;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row-itemHeaderGroup {
|
||||||
|
min-width: 320px;
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row-items--capacity {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
align-items: center;
|
||||||
|
margin-right: 5px;
|
||||||
|
min-width: 215px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row--instanceSpacer {
|
||||||
|
width: 140px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row--capacitySpacer {
|
||||||
|
flex: .6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row-actions {
|
||||||
|
min-width: 50px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 1260px) {
|
||||||
|
.at-Row--instances .at-Row-items--capacity {
|
||||||
|
flex: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
.at-Row--instances .CapacityAdjuster {
|
||||||
|
padding-bottom: 5px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 600px) {
|
||||||
|
.at-Row--instanceGroups .at-Row-itemHeaderGroup,
|
||||||
|
.at-Row--instanceGroups .at-Row-itemGroup {
|
||||||
|
max-width: 270px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -72,8 +72,9 @@ function InstanceGroupsStrings(BaseString) {
|
|||||||
CREDENTIAL_PLACEHOLDER: t.s('SELECT A CREDENTIAL'),
|
CREDENTIAL_PLACEHOLDER: t.s('SELECT A CREDENTIAL'),
|
||||||
POD_SPEC_LABEL: t.s('Pod Spec Override'),
|
POD_SPEC_LABEL: t.s('Pod Spec Override'),
|
||||||
BADGE_TEXT: t.s('Container Group'),
|
BADGE_TEXT: t.s('Container Group'),
|
||||||
POD_SPEC_TOGGLE: t.s('Customize Pod Spec')
|
POD_SPEC_TOGGLE: t.s('Customize Pod Spec'),
|
||||||
|
CREDENTIAL_HELP_TEXT: t.s('Credential to authenticate with Kubernetes or OpenShift. Must be of type \"Kubernetes/OpenShift API Bearer Token\”.'),
|
||||||
|
EXTRA_VARS_HELP_TEXT: t.s('Field for passing a custom Kubernetes or OpenShift Pod specification.')
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -43,35 +43,45 @@
|
|||||||
</at-list-toolbar>
|
</at-list-toolbar>
|
||||||
<at-list results='vm.instances'>
|
<at-list results='vm.instances'>
|
||||||
<at-row ng-repeat="instance in vm.instances"
|
<at-row ng-repeat="instance in vm.instances"
|
||||||
ng-class="{'at-Row--active': (instance.id === vm.activeId)}">
|
ng-class="{'at-Row--active': (instance.id === vm.activeId)}"
|
||||||
|
class="at-Row--instances">
|
||||||
<div class="at-Row-toggle">
|
<div class="at-Row-toggle">
|
||||||
<at-switch on-toggle="vm.toggle(instance)" switch-on="instance.enabled" switch-disabled="vm.rowAction.toggle._disabled"></at-switch>
|
<at-switch on-toggle="vm.toggle(instance)" switch-on="instance.enabled" switch-disabled="vm.rowAction.toggle._disabled"></at-switch>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="at-Row-itemGroup">
|
||||||
<div class="at-Row-items at-Row-items--instances">
|
<div class="at-Row-items at-Row-items--instances">
|
||||||
<at-row-item
|
<at-row-item
|
||||||
header-value="{{ instance.hostname }}"
|
header-value="{{ instance.hostname }}"
|
||||||
header-tag="{{ instance.managed_by_policy ? '' : vm.strings.get('list.MANUAL') }}">
|
header-tag="{{ instance.managed_by_policy ? '' : vm.strings.get('list.MANUAL') }}">
|
||||||
</at-row-item>
|
</at-row-item>
|
||||||
<at-row-item
|
<div class="at-Row-nonHeaderItems">
|
||||||
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RUNNING_JOBS') }}"
|
<at-row-item
|
||||||
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}, job_search: {status__in: ['running,waiting']}})"
|
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RUNNING_JOBS') }}"
|
||||||
value="{{ instance.jobs_running }}"
|
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}, job_search: {status__in: ['running,waiting']}})"
|
||||||
inline="true"
|
value="{{ instance.jobs_running }}"
|
||||||
badge="true">
|
inline="true"
|
||||||
</at-row-item>
|
badge="true">
|
||||||
<at-row-item
|
</at-row-item>
|
||||||
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_TOTAL_JOBS') }}"
|
<at-row-item
|
||||||
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}})"
|
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_TOTAL_JOBS') }}"
|
||||||
value="{{ instance.jobs_total }}"
|
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}})"
|
||||||
inline="true"
|
value="{{ instance.jobs_total }}"
|
||||||
badge="true">
|
inline="true"
|
||||||
</at-row-item>
|
badge="true">
|
||||||
</div>
|
</at-row-item>
|
||||||
|
</div>
|
||||||
<div class="at-Row-actions">
|
</div>
|
||||||
<capacity-adjuster state="instance" disabled="{{vm.rowAction.capacity_adjustment._disabled}}"></capacity-adjuster>
|
<div class="at-Row-items--capacity">
|
||||||
<capacity-bar label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}" capacity="instance.consumed_capacity" total-capacity="instance.capacity"></capacity-bar>
|
<capacity-adjuster
|
||||||
|
state="instance"
|
||||||
|
disabled="{{vm.rowAction.capacity_adjustment._disabled}}">
|
||||||
|
</capacity-adjuster>
|
||||||
|
<capacity-bar
|
||||||
|
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}"
|
||||||
|
capacity="instance.consumed_capacity"
|
||||||
|
total-capacity="instance.capacity">
|
||||||
|
</capacity-bar>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</at-row>
|
</at-row>
|
||||||
</at-list>
|
</at-list>
|
||||||
|
|||||||
@@ -41,10 +41,11 @@
|
|||||||
</at-list-toolbar>
|
</at-list-toolbar>
|
||||||
<at-list results="instance_groups">
|
<at-list results="instance_groups">
|
||||||
<at-row ng-repeat="instance_group in instance_groups"
|
<at-row ng-repeat="instance_group in instance_groups"
|
||||||
ng-class="{'at-Row--active': (instance_group.id === vm.activeId)}" >
|
ng-class="{'at-Row--active': (instance_group.id === vm.activeId)}"
|
||||||
<div class="at-Row-items">
|
class="at-Row--instanceGroups">
|
||||||
<div class="at-Row-container">
|
<div class="at-Row-itemGroup">
|
||||||
<div class="at-Row-content">
|
<div class="at-Row-items at-Row-items--instanceGroups">
|
||||||
|
<div class="at-Row-itemHeaderGroup">
|
||||||
<at-row-item
|
<at-row-item
|
||||||
ng-if="!instance_group.credential"
|
ng-if="!instance_group.credential"
|
||||||
header-value="{{ instance_group.name }}"
|
header-value="{{ instance_group.name }}"
|
||||||
@@ -67,23 +68,14 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="at-RowItem--labels" ng-if="!instance_group.credential">
|
<div class="at-RowItem--labels" ng-if="!instance_group.credential">
|
||||||
<div class="LabelList-tagContainer">
|
<div class="LabelList-tagContainer">
|
||||||
<div class="LabelList-tag" ng-class="{'LabelList-tag--deletable' : (showDelete && template.summary_fields.user_capabilities.edit)}">
|
<div class="LabelList-tag" ng-class="{'LabelList-tag--deletable' : (showDelete && template.summary_fields.user_capabilities.edit)}">
|
||||||
<span class="LabelList-name">{{vm.strings.get('instance.BADGE_TEXT') }}</span>
|
<span class="LabelList-name">{{vm.strings.get('instance.BADGE_TEXT') }}</span>
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="instanceGroupsList-details">
|
<div class="at-Row-nonHeaderItems">
|
||||||
<div class="at-Row-links">
|
|
||||||
<at-row-item
|
|
||||||
ng-if="!instance_group.credential"
|
|
||||||
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_INSTANCES') }}"
|
|
||||||
label-state="instanceGroups.instances({instance_group_id: {{ instance_group.id }}})"
|
|
||||||
value="{{ instance_group.instances }}"
|
|
||||||
inline="true"
|
|
||||||
badge="true">
|
|
||||||
</at-row-item>
|
|
||||||
<at-row-item
|
<at-row-item
|
||||||
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RUNNING_JOBS') }}"
|
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RUNNING_JOBS') }}"
|
||||||
label-state="instanceGroups.jobs({instance_group_id: {{ instance_group.id }}, job_search: {status__in: ['running,waiting']}})"
|
label-state="instanceGroups.jobs({instance_group_id: {{ instance_group.id }}, job_search: {status__in: ['running,waiting']}})"
|
||||||
@@ -98,14 +90,38 @@
|
|||||||
inline="true"
|
inline="true"
|
||||||
badge="true">
|
badge="true">
|
||||||
</at-row-item>
|
</at-row-item>
|
||||||
</div>
|
<at-row-item
|
||||||
<div class="at-Row-actions" >
|
ng-if="!instance_group.credential"
|
||||||
<capacity-bar ng-show="!instance_group.credential" label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}" capacity="instance_group.consumed_capacity" total-capacity="instance_group.capacity"></capacity-bar>
|
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_INSTANCES') }}"
|
||||||
<at-row-action icon="fa-trash" ng-click="vm.deleteInstanceGroup(instance_group)" ng-if="vm.rowAction.trash(instance_group)">
|
label-state="instanceGroups.instances({instance_group_id: {{ instance_group.id }}})"
|
||||||
</at-row-action>
|
value="{{ instance_group.instances }}"
|
||||||
</div>
|
inline="true"
|
||||||
|
badge="true">
|
||||||
|
</at-row-item>
|
||||||
|
<div
|
||||||
|
ng-if="instance_group.credential"
|
||||||
|
class="at-Row--instanceSpacer">
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="at-Row-items--capacity" ng-if="!instance_group.credential">
|
||||||
|
<capacity-bar
|
||||||
|
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}"
|
||||||
|
capacity="instance_group.consumed_capacity"
|
||||||
|
total-capacity="instance_group.capacity">
|
||||||
|
</capacity-bar>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
ng-if="instance_group.credential"
|
||||||
|
class="at-Row--capacitySpacer">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="at-Row-actions" >
|
||||||
|
<at-row-action
|
||||||
|
icon="fa-trash"
|
||||||
|
ng-click="vm.deleteInstanceGroup(instance_group)"
|
||||||
|
ng-if="vm.rowAction.trash(instance_group)">
|
||||||
|
</at-row-action>
|
||||||
</div>
|
</div>
|
||||||
</at-row>
|
</at-row>
|
||||||
</at-list>
|
</at-list>
|
||||||
|
|||||||
@@ -671,6 +671,98 @@ export default ['i18n', function(i18n) {
|
|||||||
"|| notification_type.value == 'webhook')",
|
"|| notification_type.value == 'webhook')",
|
||||||
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
},
|
},
|
||||||
|
approved_message: {
|
||||||
|
label: i18n._('Workflow Approved Message'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && notification_type.value != 'webhook'",
|
||||||
|
rows: 2,
|
||||||
|
oneLine: 'true',
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
|
approved_body: {
|
||||||
|
label: i18n._('Workflow Approved Message Body'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && " +
|
||||||
|
"(notification_type.value == 'email' " +
|
||||||
|
"|| notification_type.value == 'pagerduty' " +
|
||||||
|
"|| notification_type.value == 'webhook')",
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
|
denied_message: {
|
||||||
|
label: i18n._('Workflow Denied Message'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && notification_type.value != 'webhook'",
|
||||||
|
rows: 2,
|
||||||
|
oneLine: 'true',
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
|
denied_body: {
|
||||||
|
label: i18n._('Workflow Denied Message Body'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && " +
|
||||||
|
"(notification_type.value == 'email' " +
|
||||||
|
"|| notification_type.value == 'pagerduty' " +
|
||||||
|
"|| notification_type.value == 'webhook')",
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
|
running_message: {
|
||||||
|
label: i18n._('Workflow Running Message'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && notification_type.value != 'webhook'",
|
||||||
|
rows: 2,
|
||||||
|
oneLine: 'true',
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
|
running_body: {
|
||||||
|
label: i18n._('Workflow Running Message Body'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && " +
|
||||||
|
"(notification_type.value == 'email' " +
|
||||||
|
"|| notification_type.value == 'pagerduty' " +
|
||||||
|
"|| notification_type.value == 'webhook')",
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
|
timed_out_message: {
|
||||||
|
label: i18n._('Workflow Timed Out Message'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && notification_type.value != 'webhook'",
|
||||||
|
rows: 2,
|
||||||
|
oneLine: 'true',
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
|
timed_out_body: {
|
||||||
|
label: i18n._('Workflow Timed Out Message Body'),
|
||||||
|
class: 'Form-formGroup--fullWidth',
|
||||||
|
type: 'syntax_highlight',
|
||||||
|
mode: 'jinja2',
|
||||||
|
default: '',
|
||||||
|
ngShow: "customize_messages && " +
|
||||||
|
"(notification_type.value == 'email' " +
|
||||||
|
"|| notification_type.value == 'pagerduty' " +
|
||||||
|
"|| notification_type.value == 'webhook')",
|
||||||
|
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
buttons: { //for now always generates <button> tags
|
buttons: { //for now always generates <button> tags
|
||||||
|
|||||||
@@ -1,19 +1,20 @@
|
|||||||
|
|
||||||
const emptyDefaults = {
|
const emptyDefaults = {
|
||||||
started: {
|
started: { message: '', body: '' },
|
||||||
message: '',
|
success: { message: '', body: '' },
|
||||||
body: '',
|
error: { message: '', body: '' },
|
||||||
},
|
workflow_approval: {
|
||||||
success: {
|
approved: { message: '', body: '' },
|
||||||
message: '',
|
denied: { message: '', body: '' },
|
||||||
body: '',
|
running: { message: '', body: '' },
|
||||||
},
|
timed_out: { message: '', body: '' },
|
||||||
error: {
|
}
|
||||||
message: '',
|
|
||||||
body: '',
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function getMessageIfUpdated(message, defaultValue) {
|
||||||
|
return message === defaultValue ? null : message;
|
||||||
|
}
|
||||||
|
|
||||||
export default [function() {
|
export default [function() {
|
||||||
return {
|
return {
|
||||||
getMessagesObj: function ($scope, defaultMessages) {
|
getMessagesObj: function ($scope, defaultMessages) {
|
||||||
@@ -23,22 +24,34 @@ export default [function() {
|
|||||||
const defaults = defaultMessages[$scope.notification_type.value] || {};
|
const defaults = defaultMessages[$scope.notification_type.value] || {};
|
||||||
return {
|
return {
|
||||||
started: {
|
started: {
|
||||||
message: $scope.started_message === defaults.started.message ?
|
message: getMessageIfUpdated($scope.started_message, defaults.started.message),
|
||||||
null : $scope.started_message,
|
body: getMessageIfUpdated($scope.started_body, defaults.started.body),
|
||||||
body: $scope.started_body === defaults.started.body ?
|
|
||||||
null : $scope.started_body,
|
|
||||||
},
|
},
|
||||||
success: {
|
success: {
|
||||||
message: $scope.success_message === defaults.success.message ?
|
message: getMessageIfUpdated($scope.success_message, defaults.success.message),
|
||||||
null : $scope.success_message,
|
body: getMessageIfUpdated($scope.success_body, defaults.success.body),
|
||||||
body: $scope.success_body === defaults.success.body ?
|
|
||||||
null : $scope.success_body,
|
|
||||||
},
|
},
|
||||||
error: {
|
error: {
|
||||||
message: $scope.error_message === defaults.error.message ?
|
message: getMessageIfUpdated($scope.error_message, defaults.error.message),
|
||||||
null : $scope.error_message,
|
body: getMessageIfUpdated($scope.error_body, defaults.error.body),
|
||||||
body: $scope.error_body === defaults.error.body ?
|
},
|
||||||
null : $scope.error_body,
|
workflow_approval: {
|
||||||
|
approved: {
|
||||||
|
message: getMessageIfUpdated($scope.approved_message, defaults.workflow_approval.approved.message),
|
||||||
|
body: getMessageIfUpdated($scope.approved_body, defaults.workflow_approval.approved.body),
|
||||||
|
},
|
||||||
|
denied: {
|
||||||
|
message: getMessageIfUpdated($scope.denied_message, defaults.workflow_approval.denied.message),
|
||||||
|
body: getMessageIfUpdated($scope.denied_body, defaults.workflow_approval.denied.body),
|
||||||
|
},
|
||||||
|
running: {
|
||||||
|
message: getMessageIfUpdated($scope.running_message, defaults.workflow_approval.running.message),
|
||||||
|
body: getMessageIfUpdated($scope.running_body, defaults.workflow_approval.running.body),
|
||||||
|
},
|
||||||
|
timed_out: {
|
||||||
|
message: getMessageIfUpdated($scope.timed_out_message, defaults.workflow_approval.timed_out.message),
|
||||||
|
body: getMessageIfUpdated($scope.timed_out_body, defaults.workflow_approval.timed_out.body),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
@@ -56,6 +69,15 @@ export default [function() {
|
|||||||
$scope.success_body = defaults.success.body;
|
$scope.success_body = defaults.success.body;
|
||||||
$scope.error_message = defaults.error.message;
|
$scope.error_message = defaults.error.message;
|
||||||
$scope.error_body = defaults.error.body;
|
$scope.error_body = defaults.error.body;
|
||||||
|
$scope.approved_message = defaults.workflow_approval.approved.message;
|
||||||
|
$scope.approved_body = defaults.workflow_approval.approved.body;
|
||||||
|
$scope.denied_message = defaults.workflow_approval.denied.message;
|
||||||
|
$scope.denied_body = defaults.workflow_approval.denied.body;
|
||||||
|
$scope.running_message = defaults.workflow_approval.running.message;
|
||||||
|
$scope.running_body = defaults.workflow_approval.running.body;
|
||||||
|
$scope.timed_out_message = defaults.workflow_approval.timed_out.message;
|
||||||
|
$scope.timed_out_body = defaults.workflow_approval.timed_out.body;
|
||||||
|
|
||||||
if (!messages) {
|
if (!messages) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -84,6 +106,48 @@ export default [function() {
|
|||||||
isCustomized = true;
|
isCustomized = true;
|
||||||
$scope.error_body = messages.error.body;
|
$scope.error_body = messages.error.body;
|
||||||
}
|
}
|
||||||
|
if (messages.workflow_approval) {
|
||||||
|
if (messages.workflow_approval.approved &&
|
||||||
|
messages.workflow_approval.approved.message) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.approved_message = messages.workflow_approval.approved.message;
|
||||||
|
}
|
||||||
|
if (messages.workflow_approval.approved &&
|
||||||
|
messages.workflow_approval.approved.body) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.approved_body = messages.workflow_approval.approved.body;
|
||||||
|
}
|
||||||
|
if (messages.workflow_approval.denied &&
|
||||||
|
messages.workflow_approval.denied.message) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.denied_message = messages.workflow_approval.denied.message;
|
||||||
|
}
|
||||||
|
if (messages.workflow_approval.denied &&
|
||||||
|
messages.workflow_approval.denied.body) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.denied_body = messages.workflow_approval.denied.body;
|
||||||
|
}
|
||||||
|
if (messages.workflow_approval.running &&
|
||||||
|
messages.workflow_approval.running.message) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.running_message = messages.workflow_approval.running.message;
|
||||||
|
}
|
||||||
|
if (messages.workflow_approval.running &&
|
||||||
|
messages.workflow_approval.running.body) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.running_body = messages.workflow_approval.running.body;
|
||||||
|
}
|
||||||
|
if (messages.workflow_approval.timed_out &&
|
||||||
|
messages.workflow_approval.timed_out.message) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.timed_out_message = messages.workflow_approval.timed_out.message;
|
||||||
|
}
|
||||||
|
if (messages.workflow_approval.timed_out &&
|
||||||
|
messages.workflow_approval.timed_out.body) {
|
||||||
|
isCustomized = true;
|
||||||
|
$scope.timed_out_body = messages.workflow_approval.timed_out.body;
|
||||||
|
}
|
||||||
|
}
|
||||||
$scope.customize_messages = isCustomized;
|
$scope.customize_messages = isCustomized;
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -110,6 +174,30 @@ export default [function() {
|
|||||||
if ($scope.error_body === oldDefaults.error.body) {
|
if ($scope.error_body === oldDefaults.error.body) {
|
||||||
$scope.error_body = newDefaults.error.body;
|
$scope.error_body = newDefaults.error.body;
|
||||||
}
|
}
|
||||||
|
if ($scope.approved_message === oldDefaults.workflow_approval.approved.message) {
|
||||||
|
$scope.approved_message = newDefaults.workflow_approval.approved.message;
|
||||||
|
}
|
||||||
|
if ($scope.approved_body === oldDefaults.workflow_approval.approved.body) {
|
||||||
|
$scope.approved_body = newDefaults.workflow_approval.approved.body;
|
||||||
|
}
|
||||||
|
if ($scope.denied_message === oldDefaults.workflow_approval.denied.message) {
|
||||||
|
$scope.denied_message = newDefaults.workflow_approval.denied.message;
|
||||||
|
}
|
||||||
|
if ($scope.denied_body === oldDefaults.workflow_approval.denied.body) {
|
||||||
|
$scope.denied_body = newDefaults.workflow_approval.denied.body;
|
||||||
|
}
|
||||||
|
if ($scope.running_message === oldDefaults.workflow_approval.running.message) {
|
||||||
|
$scope.running_message = newDefaults.workflow_approval.running.message;
|
||||||
|
}
|
||||||
|
if ($scope.running_body === oldDefaults.workflow_approval.running.body) {
|
||||||
|
$scope.running_body = newDefaults.workflow_approval.running.body;
|
||||||
|
}
|
||||||
|
if ($scope.timed_out_message === oldDefaults.workflow_approval.timed_out.message) {
|
||||||
|
$scope.timed_out_message = newDefaults.workflow_approval.timed_out.message;
|
||||||
|
}
|
||||||
|
if ($scope.timed_out_body === oldDefaults.workflow_approval.timed_out.body) {
|
||||||
|
$scope.timed_out_body = newDefaults.workflow_approval.timed_out.body;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}];
|
}];
|
||||||
|
|||||||
@@ -233,6 +233,38 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f
|
|||||||
}, true);
|
}, true);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function getSelectedTags(tagId) {
|
||||||
|
const selectedTags = [];
|
||||||
|
const choiceElements = $(tagId).siblings(".select2").first()
|
||||||
|
.find(".select2-selection__choice");
|
||||||
|
choiceElements.each((index, option) => {
|
||||||
|
selectedTags.push({
|
||||||
|
value: option.title,
|
||||||
|
name: option.title,
|
||||||
|
label: option.title
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return selectedTags;
|
||||||
|
}
|
||||||
|
|
||||||
|
function consolidateTags (tags, otherTags) {
|
||||||
|
const seen = [];
|
||||||
|
const consolidated = [];
|
||||||
|
tags.forEach(tag => {
|
||||||
|
if (!seen.includes(tag.value)) {
|
||||||
|
seen.push(tag.value);
|
||||||
|
consolidated.push(tag);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
otherTags.forEach(tag => {
|
||||||
|
if (!seen.includes(tag.value)) {
|
||||||
|
seen.push(tag.value);
|
||||||
|
consolidated.push(tag);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return consolidated;
|
||||||
|
}
|
||||||
|
|
||||||
vm.next = (currentTab) => {
|
vm.next = (currentTab) => {
|
||||||
if(_.has(vm, 'steps.other_prompts.tab._active') && vm.steps.other_prompts.tab._active === true){
|
if(_.has(vm, 'steps.other_prompts.tab._active') && vm.steps.other_prompts.tab._active === true){
|
||||||
try {
|
try {
|
||||||
@@ -243,6 +275,22 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f
|
|||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The current tag input state lives somewhere in the associated select2
|
||||||
|
// widgetry and isn't directly tied to the vm, so extract the tag values
|
||||||
|
// and update the vm to keep it in sync.
|
||||||
|
if (vm.promptDataClone.launchConf.ask_tags_on_launch) {
|
||||||
|
vm.promptDataClone.prompts.tags.value = consolidateTags(
|
||||||
|
angular.copy(vm.promptDataClone.prompts.tags.value),
|
||||||
|
getSelectedTags("#job_launch_job_tags")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (vm.promptDataClone.launchConf.ask_skip_tags_on_launch) {
|
||||||
|
vm.promptDataClone.prompts.skipTags.value = consolidateTags(
|
||||||
|
angular.copy(vm.promptDataClone.prompts.skipTags.value),
|
||||||
|
getSelectedTags("#job_launch_skip_tags")
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let nextStep;
|
let nextStep;
|
||||||
|
|||||||
@@ -12,19 +12,6 @@ export default
|
|||||||
|
|
||||||
let scope;
|
let scope;
|
||||||
|
|
||||||
let consolidateTags = (tagModel, tagId) => {
|
|
||||||
let tags = angular.copy(tagModel);
|
|
||||||
$(tagId).siblings(".select2").first().find(".select2-selection__choice").each((optionIndex, option) => {
|
|
||||||
tags.push({
|
|
||||||
value: option.title,
|
|
||||||
name: option.title,
|
|
||||||
label: option.title
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return [...tags.reduce((map, tag) => map.has(tag.value) ? map : map.set(tag.value, tag), new Map()).values()];
|
|
||||||
};
|
|
||||||
|
|
||||||
vm.init = (_scope_) => {
|
vm.init = (_scope_) => {
|
||||||
scope = _scope_;
|
scope = _scope_;
|
||||||
|
|
||||||
@@ -35,14 +22,6 @@ export default
|
|||||||
|
|
||||||
const surveyPasswords = {};
|
const surveyPasswords = {};
|
||||||
|
|
||||||
if (scope.promptData.launchConf.ask_tags_on_launch) {
|
|
||||||
scope.promptData.prompts.tags.value = consolidateTags(scope.promptData.prompts.tags.value, "#job_launch_job_tags");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (scope.promptData.launchConf.ask_skip_tags_on_launch) {
|
|
||||||
scope.promptData.prompts.skipTags.value = consolidateTags(scope.promptData.prompts.skipTags.value, "#job_launch_skip_tags");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (scope.promptData.launchConf.survey_enabled){
|
if (scope.promptData.launchConf.survey_enabled){
|
||||||
scope.promptData.extraVars = ToJSON(scope.parseType, scope.promptData.prompts.variables.value, false);
|
scope.promptData.extraVars = ToJSON(scope.parseType, scope.promptData.prompts.variables.value, false);
|
||||||
scope.promptData.surveyQuestions.forEach(surveyQuestion => {
|
scope.promptData.surveyQuestions.forEach(surveyQuestion => {
|
||||||
|
|||||||
@@ -8,6 +8,17 @@ inside the folder `lib/ansible/modules/web_infrastructure/ansible_tower`
|
|||||||
as well as other folders for the inventory plugin, module utils, and
|
as well as other folders for the inventory plugin, module utils, and
|
||||||
doc fragment.
|
doc fragment.
|
||||||
|
|
||||||
|
## Release and Upgrade Notes
|
||||||
|
|
||||||
|
The release 7.0.0 of the `awx.awx` collection is intended to be identical
|
||||||
|
to the content prior to the migration, aside from changes necessary to
|
||||||
|
have it function as a collection.
|
||||||
|
|
||||||
|
The following notes are changes that may require changes to playbooks.
|
||||||
|
|
||||||
|
- Specifying `inputs` or `injectors` as strings in the
|
||||||
|
`tower_credential_type` module is no longer supported. Provide as dictionaries instead.
|
||||||
|
|
||||||
## Running
|
## Running
|
||||||
|
|
||||||
To use this collection, the "old" tower-cli needs to be installed
|
To use this collection, the "old" tower-cli needs to be installed
|
||||||
@@ -29,12 +40,31 @@ in `awx_collection/test/awx`. These tests require that python packages
|
|||||||
are available for all of `awx`, `ansible`, `tower_cli`, and the collection
|
are available for all of `awx`, `ansible`, `tower_cli`, and the collection
|
||||||
itself.
|
itself.
|
||||||
|
|
||||||
|
### Inside Development Container
|
||||||
|
|
||||||
The target `make prepare_collection_venv` will prepare some requirements
|
The target `make prepare_collection_venv` will prepare some requirements
|
||||||
in the `awx_collection_test_venv` folder so that `make test_collection` can
|
in the `awx_collection_test_venv` folder so that `make test_collection` can
|
||||||
be ran to actually run the tests. A single test can be ran via:
|
be ran to actually run the tests. A single test can be ran via:
|
||||||
|
|
||||||
```
|
```
|
||||||
make test_collection MODULE_TEST_DIRS=awx_collection/test/awx/test_organization.py
|
make test_collection COLLECTION_TEST_DIRS=awx_collection/test/awx/test_organization.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Manually
|
||||||
|
|
||||||
|
As a faster alternative if you do not want to use the container, or
|
||||||
|
run against Ansible or tower-cli source, it is possible to set up a
|
||||||
|
working environment yourself.
|
||||||
|
|
||||||
|
```
|
||||||
|
mkvirtualenv my_new_venv
|
||||||
|
# may need to replace psycopg2 with psycopg2-binary in requirements/requirements.txt
|
||||||
|
pip install -r requirements/requirements.txt -r requirements/requirements_dev.txt -r requirements/requirements_git.txt
|
||||||
|
make clean-api
|
||||||
|
pip install -e <path to your Ansible>
|
||||||
|
pip install -e <path to your tower-cli>
|
||||||
|
pip install -e .
|
||||||
|
PYTHONPATH=awx_collection:$PYTHONPATH py.test awx_collection/test/awx/
|
||||||
```
|
```
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|||||||
@@ -98,8 +98,8 @@ class TowerModule(AnsibleModule):
|
|||||||
)
|
)
|
||||||
args.update(argument_spec)
|
args.update(argument_spec)
|
||||||
|
|
||||||
mutually_exclusive = kwargs.get('mutually_exclusive', [])
|
kwargs.setdefault('mutually_exclusive', [])
|
||||||
kwargs['mutually_exclusive'] = mutually_exclusive.extend((
|
kwargs['mutually_exclusive'].extend((
|
||||||
('tower_config_file', 'tower_host'),
|
('tower_config_file', 'tower_host'),
|
||||||
('tower_config_file', 'tower_username'),
|
('tower_config_file', 'tower_username'),
|
||||||
('tower_config_file', 'tower_password'),
|
('tower_config_file', 'tower_password'),
|
||||||
|
|||||||
@@ -53,9 +53,23 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Type of credential being added.
|
- Type of credential being added.
|
||||||
- The ssh choice refers to a Tower Machine credential.
|
- The ssh choice refers to a Tower Machine credential.
|
||||||
required: True
|
required: False
|
||||||
type: str
|
type: str
|
||||||
choices: ["ssh", "vault", "net", "scm", "aws", "vmware", "satellite6", "cloudforms", "gce", "azure_rm", "openstack", "rhv", "insights", "tower"]
|
choices: ["ssh", "vault", "net", "scm", "aws", "vmware", "satellite6", "cloudforms", "gce", "azure_rm", "openstack", "rhv", "insights", "tower"]
|
||||||
|
credential_type:
|
||||||
|
description:
|
||||||
|
- Name of credential type.
|
||||||
|
required: False
|
||||||
|
version_added: "2.10"
|
||||||
|
type: str
|
||||||
|
inputs:
|
||||||
|
description:
|
||||||
|
- >-
|
||||||
|
Credential inputs where the keys are var names used in templating.
|
||||||
|
Refer to the Ansible Tower documentation for example syntax.
|
||||||
|
required: False
|
||||||
|
version_added: "2.9"
|
||||||
|
type: dict
|
||||||
host:
|
host:
|
||||||
description:
|
description:
|
||||||
- Host for this credential.
|
- Host for this credential.
|
||||||
@@ -116,7 +130,8 @@ options:
|
|||||||
become_method:
|
become_method:
|
||||||
description:
|
description:
|
||||||
- Become method to use for privilege escalation.
|
- Become method to use for privilege escalation.
|
||||||
choices: ["None", "sudo", "su", "pbrun", "pfexec", "pmrun"]
|
- Some examples are "None", "sudo", "su", "pbrun"
|
||||||
|
- Due to become plugins, these can be arbitrary
|
||||||
type: str
|
type: str
|
||||||
become_username:
|
become_username:
|
||||||
description:
|
description:
|
||||||
@@ -185,6 +200,15 @@ EXAMPLES = '''
|
|||||||
tower_host: https://localhost
|
tower_host: https://localhost
|
||||||
run_once: true
|
run_once: true
|
||||||
delegate_to: localhost
|
delegate_to: localhost
|
||||||
|
|
||||||
|
- name: Add Credential with Custom Credential Type
|
||||||
|
tower_credential:
|
||||||
|
name: Workshop Credential
|
||||||
|
credential_type: MyCloudCredential
|
||||||
|
organization: Default
|
||||||
|
tower_username: admin
|
||||||
|
tower_password: ansible
|
||||||
|
tower_host: https://localhost
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -219,7 +243,17 @@ KIND_CHOICES = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def credential_type_for_v1_kind(params, module):
|
OLD_INPUT_NAMES = (
|
||||||
|
'authorize', 'authorize_password', 'client',
|
||||||
|
'security_token', 'secret', 'tenant', 'subscription',
|
||||||
|
'domain', 'become_method', 'become_username',
|
||||||
|
'become_password', 'vault_password', 'project', 'host',
|
||||||
|
'username', 'password', 'ssh_key_data', 'vault_id',
|
||||||
|
'ssh_key_unlock'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def credential_type_for_kind(params):
|
||||||
credential_type_res = tower_cli.get_resource('credential_type')
|
credential_type_res = tower_cli.get_resource('credential_type')
|
||||||
kind = params.pop('kind')
|
kind = params.pop('kind')
|
||||||
arguments = {'managed_by_tower': True}
|
arguments = {'managed_by_tower': True}
|
||||||
@@ -244,8 +278,9 @@ def main():
|
|||||||
name=dict(required=True),
|
name=dict(required=True),
|
||||||
user=dict(),
|
user=dict(),
|
||||||
team=dict(),
|
team=dict(),
|
||||||
kind=dict(required=True,
|
kind=dict(choices=KIND_CHOICES.keys()),
|
||||||
choices=KIND_CHOICES.keys()),
|
credential_type=dict(),
|
||||||
|
inputs=dict(type='dict'),
|
||||||
host=dict(),
|
host=dict(),
|
||||||
username=dict(),
|
username=dict(),
|
||||||
password=dict(no_log=True),
|
password=dict(no_log=True),
|
||||||
@@ -270,7 +305,14 @@ def main():
|
|||||||
vault_id=dict(),
|
vault_id=dict(),
|
||||||
)
|
)
|
||||||
|
|
||||||
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True)
|
mutually_exclusive = [
|
||||||
|
('kind', 'credential_type')
|
||||||
|
]
|
||||||
|
for input_name in OLD_INPUT_NAMES:
|
||||||
|
mutually_exclusive.append(('inputs', input_name))
|
||||||
|
|
||||||
|
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True,
|
||||||
|
mutually_exclusive=mutually_exclusive)
|
||||||
|
|
||||||
name = module.params.get('name')
|
name = module.params.get('name')
|
||||||
organization = module.params.get('organization')
|
organization = module.params.get('organization')
|
||||||
@@ -298,10 +340,26 @@ def main():
|
|||||||
# /api/v1/ backwards compat
|
# /api/v1/ backwards compat
|
||||||
# older versions of tower-cli don't *have* a credential_type
|
# older versions of tower-cli don't *have* a credential_type
|
||||||
# resource
|
# resource
|
||||||
params['kind'] = module.params['kind']
|
params['kind'] = module.params.get('kind')
|
||||||
else:
|
else:
|
||||||
credential_type = credential_type_for_v1_kind(module.params, module)
|
if module.params.get('credential_type'):
|
||||||
params['credential_type'] = credential_type['id']
|
credential_type_res = tower_cli.get_resource('credential_type')
|
||||||
|
try:
|
||||||
|
credential_type = credential_type_res.get(name=module.params['credential_type'])
|
||||||
|
except (exc.NotFound) as excinfo:
|
||||||
|
module.fail_json(msg=(
|
||||||
|
'Failed to update credential, credential_type not found: {0}'
|
||||||
|
).format(excinfo), changed=False)
|
||||||
|
params['credential_type'] = credential_type['id']
|
||||||
|
|
||||||
|
if module.params.get('inputs'):
|
||||||
|
params['inputs'] = module.params.get('inputs')
|
||||||
|
|
||||||
|
elif module.params.get('kind'):
|
||||||
|
credential_type = credential_type_for_kind(module.params)
|
||||||
|
params['credential_type'] = credential_type['id']
|
||||||
|
else:
|
||||||
|
module.fail_json(msg='must either specify credential_type or kind', changed=False)
|
||||||
|
|
||||||
if module.params.get('description'):
|
if module.params.get('description'):
|
||||||
params['description'] = module.params.get('description')
|
params['description'] = module.params.get('description')
|
||||||
@@ -333,12 +391,7 @@ def main():
|
|||||||
if module.params.get('vault_id', None) and module.params.get('kind') != 'vault':
|
if module.params.get('vault_id', None) and module.params.get('kind') != 'vault':
|
||||||
module.fail_json(msg="Parameter 'vault_id' is only valid if parameter 'kind' is specified as 'vault'")
|
module.fail_json(msg="Parameter 'vault_id' is only valid if parameter 'kind' is specified as 'vault'")
|
||||||
|
|
||||||
for key in ('authorize', 'authorize_password', 'client',
|
for key in OLD_INPUT_NAMES:
|
||||||
'security_token', 'secret', 'tenant', 'subscription',
|
|
||||||
'domain', 'become_method', 'become_username',
|
|
||||||
'become_password', 'vault_password', 'project', 'host',
|
|
||||||
'username', 'password', 'ssh_key_data', 'vault_id',
|
|
||||||
'ssh_key_unlock'):
|
|
||||||
if 'kind' in params:
|
if 'kind' in params:
|
||||||
params[key] = module.params.get(key)
|
params[key] = module.params.get(key)
|
||||||
elif module.params.get(key):
|
elif module.params.get(key):
|
||||||
|
|||||||
@@ -28,10 +28,12 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The name of the credential type.
|
- The name of the credential type.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description of the credential type to give more detail about it.
|
- The description of the credential type to give more detail about it.
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
kind:
|
kind:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
@@ -40,24 +42,28 @@ options:
|
|||||||
for more information.
|
for more information.
|
||||||
choices: [ 'ssh', 'vault', 'net', 'scm', 'cloud', 'insights' ]
|
choices: [ 'ssh', 'vault', 'net', 'scm', 'cloud', 'insights' ]
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
inputs:
|
inputs:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
Enter inputs using either JSON or YAML syntax. Refer to the Ansible
|
Enter inputs using either JSON or YAML syntax. Refer to the Ansible
|
||||||
Tower documentation for example syntax.
|
Tower documentation for example syntax.
|
||||||
required: False
|
required: False
|
||||||
|
type: dict
|
||||||
injectors:
|
injectors:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
Enter injectors using either JSON or YAML syntax. Refer to the
|
Enter injectors using either JSON or YAML syntax. Refer to the
|
||||||
Ansible Tower documentation for example syntax.
|
Ansible Tower documentation for example syntax.
|
||||||
required: False
|
required: False
|
||||||
|
type: dict
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
required: False
|
required: False
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
validate_certs:
|
validate_certs:
|
||||||
description:
|
description:
|
||||||
- Tower option to avoid certificates check.
|
- Tower option to avoid certificates check.
|
||||||
|
|||||||
@@ -27,38 +27,50 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The name to use for the group.
|
- The name to use for the group.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description to use for the group.
|
- The description to use for the group.
|
||||||
|
type: str
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- Inventory the group should be made a member of.
|
- Inventory the group should be made a member of.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
variables:
|
variables:
|
||||||
description:
|
description:
|
||||||
- Variables to use for the group, use C(@) for a file.
|
- Variables to use for the group, use C(@) for a file.
|
||||||
|
type: str
|
||||||
credential:
|
credential:
|
||||||
description:
|
description:
|
||||||
- Credential to use for the group.
|
- Credential to use for the group.
|
||||||
|
type: str
|
||||||
source:
|
source:
|
||||||
description:
|
description:
|
||||||
- The source to use for this group.
|
- The source to use for this group.
|
||||||
choices: ["manual", "file", "ec2", "rax", "vmware", "gce", "azure", "azure_rm", "openstack", "satellite6" , "cloudforms", "custom"]
|
choices: ["manual", "file", "ec2", "rax", "vmware", "gce", "azure", "azure_rm", "openstack", "satellite6" , "cloudforms", "custom"]
|
||||||
|
default: manual
|
||||||
|
type: str
|
||||||
source_regions:
|
source_regions:
|
||||||
description:
|
description:
|
||||||
- Regions for cloud provider.
|
- Regions for cloud provider.
|
||||||
|
type: str
|
||||||
source_vars:
|
source_vars:
|
||||||
description:
|
description:
|
||||||
- Override variables from source with variables from this field.
|
- Override variables from source with variables from this field.
|
||||||
|
type: str
|
||||||
instance_filters:
|
instance_filters:
|
||||||
description:
|
description:
|
||||||
- Comma-separated list of filter expressions for matching hosts.
|
- Comma-separated list of filter expressions for matching hosts.
|
||||||
|
type: str
|
||||||
group_by:
|
group_by:
|
||||||
description:
|
description:
|
||||||
- Limit groups automatically created from inventory source.
|
- Limit groups automatically created from inventory source.
|
||||||
|
type: str
|
||||||
source_script:
|
source_script:
|
||||||
description:
|
description:
|
||||||
- Inventory script to be used when group type is C(custom).
|
- Inventory script to be used when group type is C(custom).
|
||||||
|
type: str
|
||||||
overwrite:
|
overwrite:
|
||||||
description:
|
description:
|
||||||
- Delete child groups and hosts not found in source.
|
- Delete child groups and hosts not found in source.
|
||||||
@@ -67,6 +79,7 @@ options:
|
|||||||
overwrite_vars:
|
overwrite_vars:
|
||||||
description:
|
description:
|
||||||
- Override vars in child groups and hosts with those from external source.
|
- Override vars in child groups and hosts with those from external source.
|
||||||
|
type: bool
|
||||||
update_on_launch:
|
update_on_launch:
|
||||||
description:
|
description:
|
||||||
- Refresh inventory data from its source each time a job is run.
|
- Refresh inventory data from its source each time a job is run.
|
||||||
@@ -77,6 +90,7 @@ options:
|
|||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@@ -120,7 +134,7 @@ def main():
|
|||||||
group_by=dict(),
|
group_by=dict(),
|
||||||
source_script=dict(),
|
source_script=dict(),
|
||||||
overwrite=dict(type='bool', default=False),
|
overwrite=dict(type='bool', default=False),
|
||||||
overwrite_vars=dict(),
|
overwrite_vars=dict(type='bool', default=False),
|
||||||
update_on_launch=dict(type='bool', default=False),
|
update_on_launch=dict(type='bool', default=False),
|
||||||
state=dict(choices=['present', 'absent'], default='present'),
|
state=dict(choices=['present', 'absent'], default='present'),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -27,13 +27,16 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The name to use for the host.
|
- The name to use for the host.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description to use for the host.
|
- The description to use for the host.
|
||||||
|
type: str
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- Inventory the host should be made a member of.
|
- Inventory the host should be made a member of.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
enabled:
|
enabled:
|
||||||
description:
|
description:
|
||||||
- If the host should be enabled.
|
- If the host should be enabled.
|
||||||
@@ -42,11 +45,13 @@ options:
|
|||||||
variables:
|
variables:
|
||||||
description:
|
description:
|
||||||
- Variables to use for the host. Use C(@) for a file.
|
- Variables to use for the host. Use C(@) for a file.
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
default: "present"
|
default: "present"
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,31 +27,38 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The name to use for the inventory.
|
- The name to use for the inventory.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description to use for the inventory.
|
- The description to use for the inventory.
|
||||||
|
type: str
|
||||||
organization:
|
organization:
|
||||||
description:
|
description:
|
||||||
- Organization the inventory belongs to.
|
- Organization the inventory belongs to.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
variables:
|
variables:
|
||||||
description:
|
description:
|
||||||
- Inventory variables. Use C(@) to get from file.
|
- Inventory variables. Use C(@) to get from file.
|
||||||
|
type: str
|
||||||
kind:
|
kind:
|
||||||
description:
|
description:
|
||||||
- The kind field. Cannot be modified after created.
|
- The kind field. Cannot be modified after created.
|
||||||
default: ""
|
default: ""
|
||||||
choices: ["", "smart"]
|
choices: ["", "smart"]
|
||||||
version_added: "2.7"
|
version_added: "2.7"
|
||||||
|
type: str
|
||||||
host_filter:
|
host_filter:
|
||||||
description:
|
description:
|
||||||
- The host_filter field. Only useful when C(kind=smart).
|
- The host_filter field. Only useful when C(kind=smart).
|
||||||
version_added: "2.7"
|
version_added: "2.7"
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,13 +27,16 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The name to use for the inventory source.
|
- The name to use for the inventory source.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description to use for the inventory source.
|
- The description to use for the inventory source.
|
||||||
|
type: str
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- The inventory the source is linked to.
|
- The inventory the source is linked to.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
source:
|
source:
|
||||||
description:
|
description:
|
||||||
- Types of inventory source.
|
- Types of inventory source.
|
||||||
@@ -52,9 +55,11 @@ options:
|
|||||||
- tower
|
- tower
|
||||||
- custom
|
- custom
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
credential:
|
credential:
|
||||||
description:
|
description:
|
||||||
- Credential to use to retrieve the inventory from.
|
- Credential to use to retrieve the inventory from.
|
||||||
|
type: str
|
||||||
source_vars:
|
source_vars:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
@@ -62,15 +67,19 @@ options:
|
|||||||
file. For example with Openstack, specifying *private: false* would
|
file. For example with Openstack, specifying *private: false* would
|
||||||
change the output of the openstack.py script. It has to be YAML or
|
change the output of the openstack.py script. It has to be YAML or
|
||||||
JSON.
|
JSON.
|
||||||
|
type: str
|
||||||
timeout:
|
timeout:
|
||||||
description:
|
description:
|
||||||
- Number in seconds after which the Tower API methods will time out.
|
- Number in seconds after which the Tower API methods will time out.
|
||||||
|
type: int
|
||||||
source_project:
|
source_project:
|
||||||
description:
|
description:
|
||||||
- Use a *project* as a source for the *inventory*.
|
- Use a *project* as a source for the *inventory*.
|
||||||
|
type: str
|
||||||
source_path:
|
source_path:
|
||||||
description:
|
description:
|
||||||
- Path to the file to use as a source in the selected *project*.
|
- Path to the file to use as a source in the selected *project*.
|
||||||
|
type: str
|
||||||
update_on_project_update:
|
update_on_project_update:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
@@ -83,23 +92,27 @@ options:
|
|||||||
List of regions for your cloud provider. You can include multiple all
|
List of regions for your cloud provider. You can include multiple all
|
||||||
regions. Only Hosts associated with the selected regions will be
|
regions. Only Hosts associated with the selected regions will be
|
||||||
updated. Refer to Ansible Tower documentation for more detail.
|
updated. Refer to Ansible Tower documentation for more detail.
|
||||||
|
type: str
|
||||||
instance_filters:
|
instance_filters:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
Provide a comma-separated list of filter expressions. Hosts are
|
Provide a comma-separated list of filter expressions. Hosts are
|
||||||
imported when all of the filters match. Refer to Ansible Tower
|
imported when all of the filters match. Refer to Ansible Tower
|
||||||
documentation for more detail.
|
documentation for more detail.
|
||||||
|
type: str
|
||||||
group_by:
|
group_by:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
Specify which groups to create automatically. Group names will be
|
Specify which groups to create automatically. Group names will be
|
||||||
created similar to the options selected. If blank, all groups above
|
created similar to the options selected. If blank, all groups above
|
||||||
are created. Refer to Ansible Tower documentation for more detail.
|
are created. Refer to Ansible Tower documentation for more detail.
|
||||||
|
type: str
|
||||||
source_script:
|
source_script:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
The source custom script to use to build the inventory. It needs to
|
The source custom script to use to build the inventory. It needs to
|
||||||
exist.
|
exist.
|
||||||
|
type: str
|
||||||
overwrite:
|
overwrite:
|
||||||
description:
|
description:
|
||||||
- >-
|
- >-
|
||||||
@@ -133,16 +146,13 @@ options:
|
|||||||
job runs and callbacks the task system will evaluate the timestamp of
|
job runs and callbacks the task system will evaluate the timestamp of
|
||||||
the latest sync. If it is older than Cache Timeout, it is not
|
the latest sync. If it is older than Cache Timeout, it is not
|
||||||
considered current, and a new inventory sync will be performed.
|
considered current, and a new inventory sync will be performed.
|
||||||
|
type: int
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
validate_certs:
|
type: str
|
||||||
description:
|
|
||||||
- Tower option to avoid certificates check.
|
|
||||||
type: bool
|
|
||||||
aliases: [ tower_verify_ssl ]
|
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- ID of the job to cancel
|
- ID of the job to cancel
|
||||||
required: True
|
required: True
|
||||||
|
type: int
|
||||||
fail_if_not_running:
|
fail_if_not_running:
|
||||||
description:
|
description:
|
||||||
- Fail loudly if the I(job_id) does not reference a running job.
|
- Fail loudly if the I(job_id) does not reference a running job.
|
||||||
|
|||||||
@@ -27,33 +27,32 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name of the job template to use.
|
- Name of the job template to use.
|
||||||
required: True
|
required: True
|
||||||
job_explanation:
|
type: str
|
||||||
description:
|
|
||||||
- Job explanation field.
|
|
||||||
job_type:
|
job_type:
|
||||||
description:
|
description:
|
||||||
- Job_type to use for the job, only used if prompt for job_type is set.
|
- Job_type to use for the job, only used if prompt for job_type is set.
|
||||||
choices: ["run", "check", "scan"]
|
choices: ["run", "check", "scan"]
|
||||||
|
type: str
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- Inventory to use for the job, only used if prompt for inventory is set.
|
- Inventory to use for the job, only used if prompt for inventory is set.
|
||||||
|
type: str
|
||||||
credential:
|
credential:
|
||||||
description:
|
description:
|
||||||
- Credential to use for job, only used if prompt for credential is set.
|
- Credential to use for job, only used if prompt for credential is set.
|
||||||
|
type: str
|
||||||
extra_vars:
|
extra_vars:
|
||||||
description:
|
description:
|
||||||
- Extra_vars to use for the job_template. Prepend C(@) if a file.
|
- Extra_vars to use for the job_template. Prepend C(@) if a file.
|
||||||
|
type: list
|
||||||
limit:
|
limit:
|
||||||
description:
|
description:
|
||||||
- Limit to use for the I(job_template).
|
- Limit to use for the I(job_template).
|
||||||
|
type: str
|
||||||
tags:
|
tags:
|
||||||
description:
|
description:
|
||||||
- Specific tags to use for from playbook.
|
- Specific tags to use for from playbook.
|
||||||
use_job_endpoint:
|
type: list
|
||||||
description:
|
|
||||||
- Disable launching jobs from job template.
|
|
||||||
type: bool
|
|
||||||
default: 'no'
|
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,9 +27,11 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Only list jobs with this status.
|
- Only list jobs with this status.
|
||||||
choices: ['pending', 'waiting', 'running', 'error', 'failed', 'canceled', 'successful']
|
choices: ['pending', 'waiting', 'running', 'error', 'failed', 'canceled', 'successful']
|
||||||
|
type: str
|
||||||
page:
|
page:
|
||||||
description:
|
description:
|
||||||
- Page number of the results to fetch.
|
- Page number of the results to fetch.
|
||||||
|
type: int
|
||||||
all_pages:
|
all_pages:
|
||||||
description:
|
description:
|
||||||
- Fetch all the pages and return a single result.
|
- Fetch all the pages and return a single result.
|
||||||
@@ -38,6 +40,7 @@ options:
|
|||||||
query:
|
query:
|
||||||
description:
|
description:
|
||||||
- Query used to further filter the list of jobs. C({"foo":"bar"}) will be passed at C(?foo=bar)
|
- Query used to further filter the list of jobs. C({"foo":"bar"}) will be passed at C(?foo=bar)
|
||||||
|
type: dict
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,50 +27,63 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name to use for the job template.
|
- Name to use for the job template.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- Description to use for the job template.
|
- Description to use for the job template.
|
||||||
|
type: str
|
||||||
job_type:
|
job_type:
|
||||||
description:
|
description:
|
||||||
- The job type to use for the job template.
|
- The job type to use for the job template.
|
||||||
required: True
|
required: True
|
||||||
choices: ["run", "check", "scan"]
|
choices: ["run", "check", "scan"]
|
||||||
|
type: str
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- Name of the inventory to use for the job template.
|
- Name of the inventory to use for the job template.
|
||||||
|
type: str
|
||||||
project:
|
project:
|
||||||
description:
|
description:
|
||||||
- Name of the project to use for the job template.
|
- Name of the project to use for the job template.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
playbook:
|
playbook:
|
||||||
description:
|
description:
|
||||||
- Path to the playbook to use for the job template within the project provided.
|
- Path to the playbook to use for the job template within the project provided.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
credential:
|
credential:
|
||||||
description:
|
description:
|
||||||
- Name of the credential to use for the job template.
|
- Name of the credential to use for the job template.
|
||||||
version_added: 2.7
|
version_added: 2.7
|
||||||
|
type: str
|
||||||
vault_credential:
|
vault_credential:
|
||||||
description:
|
description:
|
||||||
- Name of the vault credential to use for the job template.
|
- Name of the vault credential to use for the job template.
|
||||||
version_added: 2.7
|
version_added: 2.7
|
||||||
|
type: str
|
||||||
forks:
|
forks:
|
||||||
description:
|
description:
|
||||||
- The number of parallel or simultaneous processes to use while executing the playbook.
|
- The number of parallel or simultaneous processes to use while executing the playbook.
|
||||||
|
type: int
|
||||||
limit:
|
limit:
|
||||||
description:
|
description:
|
||||||
- A host pattern to further constrain the list of hosts managed or affected by the playbook
|
- A host pattern to further constrain the list of hosts managed or affected by the playbook
|
||||||
|
type: str
|
||||||
verbosity:
|
verbosity:
|
||||||
description:
|
description:
|
||||||
- Control the output level Ansible produces as the playbook runs. 0 - Normal, 1 - Verbose, 2 - More Verbose, 3 - Debug, 4 - Connection Debug.
|
- Control the output level Ansible produces as the playbook runs. 0 - Normal, 1 - Verbose, 2 - More Verbose, 3 - Debug, 4 - Connection Debug.
|
||||||
choices: [0, 1, 2, 3, 4]
|
choices: [0, 1, 2, 3, 4]
|
||||||
default: 0
|
default: 0
|
||||||
|
type: int
|
||||||
extra_vars_path:
|
extra_vars_path:
|
||||||
description:
|
description:
|
||||||
- Path to the C(extra_vars) YAML file.
|
- Path to the C(extra_vars) YAML file.
|
||||||
|
type: path
|
||||||
job_tags:
|
job_tags:
|
||||||
description:
|
description:
|
||||||
- Comma separated list of the tags to use for the job template.
|
- Comma separated list of the tags to use for the job template.
|
||||||
|
type: str
|
||||||
force_handlers_enabled:
|
force_handlers_enabled:
|
||||||
description:
|
description:
|
||||||
- Enable forcing playbook handlers to run even if a task fails.
|
- Enable forcing playbook handlers to run even if a task fails.
|
||||||
@@ -80,10 +93,12 @@ options:
|
|||||||
skip_tags:
|
skip_tags:
|
||||||
description:
|
description:
|
||||||
- Comma separated list of the tags to skip for the job template.
|
- Comma separated list of the tags to skip for the job template.
|
||||||
|
type: str
|
||||||
start_at_task:
|
start_at_task:
|
||||||
description:
|
description:
|
||||||
- Start the playbook at the task matching this name.
|
- Start the playbook at the task matching this name.
|
||||||
version_added: 2.7
|
version_added: 2.7
|
||||||
|
type: str
|
||||||
diff_mode_enabled:
|
diff_mode_enabled:
|
||||||
description:
|
description:
|
||||||
- Enable diff mode for the job template.
|
- Enable diff mode for the job template.
|
||||||
@@ -99,6 +114,7 @@ options:
|
|||||||
host_config_key:
|
host_config_key:
|
||||||
description:
|
description:
|
||||||
- Allow provisioning callbacks using this host config key.
|
- Allow provisioning callbacks using this host config key.
|
||||||
|
type: str
|
||||||
ask_diff_mode:
|
ask_diff_mode:
|
||||||
description:
|
description:
|
||||||
- Prompt user to enable diff mode (show changes) to files when supported by modules.
|
- Prompt user to enable diff mode (show changes) to files when supported by modules.
|
||||||
@@ -171,11 +187,16 @@ options:
|
|||||||
version_added: 2.7
|
version_added: 2.7
|
||||||
type: bool
|
type: bool
|
||||||
default: 'no'
|
default: 'no'
|
||||||
|
timeout:
|
||||||
|
description:
|
||||||
|
- Maximum time in seconds to wait for a job to finish (server-side).
|
||||||
|
type: int
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
notes:
|
notes:
|
||||||
- JSON for survey_spec can be found in Tower API Documentation. See
|
- JSON for survey_spec can be found in Tower API Documentation. See
|
||||||
|
|||||||
@@ -27,17 +27,21 @@ options:
|
|||||||
description:
|
description:
|
||||||
- ID of the job to monitor.
|
- ID of the job to monitor.
|
||||||
required: True
|
required: True
|
||||||
|
type: int
|
||||||
min_interval:
|
min_interval:
|
||||||
description:
|
description:
|
||||||
- Minimum interval in seconds, to request an update from Tower.
|
- Minimum interval in seconds, to request an update from Tower.
|
||||||
default: 1
|
default: 1
|
||||||
|
type: float
|
||||||
max_interval:
|
max_interval:
|
||||||
description:
|
description:
|
||||||
- Maximum interval in seconds, to request an update from Tower.
|
- Maximum interval in seconds, to request an update from Tower.
|
||||||
default: 30
|
default: 30
|
||||||
|
type: float
|
||||||
timeout:
|
timeout:
|
||||||
description:
|
description:
|
||||||
- Maximum time in seconds to wait for a job to finish.
|
- Maximum time in seconds to wait for a job to finish.
|
||||||
|
type: int
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,15 +27,18 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name to use for the label.
|
- Name to use for the label.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
organization:
|
organization:
|
||||||
description:
|
description:
|
||||||
- Organization the label should be applied to.
|
- Organization the label should be applied to.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,35 +27,43 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The name of the notification.
|
- The name of the notification.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description of the notification.
|
- The description of the notification.
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
organization:
|
organization:
|
||||||
description:
|
description:
|
||||||
- The organization the notification belongs to.
|
- The organization the notification belongs to.
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
notification_type:
|
notification_type:
|
||||||
description:
|
description:
|
||||||
- The type of notification to be sent.
|
- The type of notification to be sent.
|
||||||
required: True
|
required: True
|
||||||
choices: ["email", "slack", "twilio", "pagerduty", "hipchat", "webhook", "irc"]
|
choices: ["email", "slack", "twilio", "pagerduty", "hipchat", "webhook", "irc"]
|
||||||
|
type: str
|
||||||
notification_configuration:
|
notification_configuration:
|
||||||
description:
|
description:
|
||||||
- The notification configuration file. Note providing this field would disable all notification-configuration-related fields.
|
- The notification configuration file. Note providing this field would disable all notification-configuration-related fields.
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
username:
|
username:
|
||||||
description:
|
description:
|
||||||
- The mail server username. Required if I(notification_type=email).
|
- The mail server username. Required if I(notification_type=email).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
sender:
|
sender:
|
||||||
description:
|
description:
|
||||||
- The sender email address. Required if I(notification_type=email).
|
- The sender email address. Required if I(notification_type=email).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
recipients:
|
recipients:
|
||||||
description:
|
description:
|
||||||
- The recipients email addresses. Required if I(notification_type=email).
|
- The recipients email addresses. Required if I(notification_type=email).
|
||||||
required: False
|
required: False
|
||||||
|
type: list
|
||||||
use_tls:
|
use_tls:
|
||||||
description:
|
description:
|
||||||
- The TLS trigger. Required if I(notification_type=email).
|
- The TLS trigger. Required if I(notification_type=email).
|
||||||
@@ -65,6 +73,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The mail server host. Required if I(notification_type=email).
|
- The mail server host. Required if I(notification_type=email).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
use_ssl:
|
use_ssl:
|
||||||
description:
|
description:
|
||||||
- The SSL trigger. Required if I(notification_type=email) or if I(notification_type=irc).
|
- The SSL trigger. Required if I(notification_type=email) or if I(notification_type=irc).
|
||||||
@@ -74,10 +83,12 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The mail server password. Required if I(notification_type=email) or if I(notification_type=irc).
|
- The mail server password. Required if I(notification_type=email) or if I(notification_type=irc).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
port:
|
port:
|
||||||
description:
|
description:
|
||||||
- The mail server port. Required if I(notification_type=email) or if I(notification_type=irc).
|
- The mail server port. Required if I(notification_type=email) or if I(notification_type=irc).
|
||||||
required: False
|
required: False
|
||||||
|
type: int
|
||||||
channels:
|
channels:
|
||||||
description:
|
description:
|
||||||
- The destination Slack channels. Required if I(notification_type=slack).
|
- The destination Slack channels. Required if I(notification_type=slack).
|
||||||
@@ -87,47 +98,58 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The access token. Required if I(notification_type=slack), if I(notification_type=pagerduty) or if I(notification_type=hipchat).
|
- The access token. Required if I(notification_type=slack), if I(notification_type=pagerduty) or if I(notification_type=hipchat).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
account_token:
|
account_token:
|
||||||
description:
|
description:
|
||||||
- The Twillio account token. Required if I(notification_type=twillio).
|
- The Twillio account token. Required if I(notification_type=twillio).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
from_number:
|
from_number:
|
||||||
description:
|
description:
|
||||||
- The source phone number. Required if I(notification_type=twillio).
|
- The source phone number. Required if I(notification_type=twillio).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
to_numbers:
|
to_numbers:
|
||||||
description:
|
description:
|
||||||
- The destination phone numbers. Required if I(notification_type=twillio).
|
- The destination phone numbers. Required if I(notification_type=twillio).
|
||||||
required: False
|
required: False
|
||||||
|
type: list
|
||||||
account_sid:
|
account_sid:
|
||||||
description:
|
description:
|
||||||
- The Twillio account SID. Required if I(notification_type=twillio).
|
- The Twillio account SID. Required if I(notification_type=twillio).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
subdomain:
|
subdomain:
|
||||||
description:
|
description:
|
||||||
- The PagerDuty subdomain. Required if I(notification_type=pagerduty).
|
- The PagerDuty subdomain. Required if I(notification_type=pagerduty).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
service_key:
|
service_key:
|
||||||
description:
|
description:
|
||||||
- The PagerDuty service/integration API key. Required if I(notification_type=pagerduty).
|
- The PagerDuty service/integration API key. Required if I(notification_type=pagerduty).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
client_name:
|
client_name:
|
||||||
description:
|
description:
|
||||||
- The PagerDuty client identifier. Required if I(notification_type=pagerduty).
|
- The PagerDuty client identifier. Required if I(notification_type=pagerduty).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
message_from:
|
message_from:
|
||||||
description:
|
description:
|
||||||
- The label to be shown with the notification. Required if I(notification_type=hipchat).
|
- The label to be shown with the notification. Required if I(notification_type=hipchat).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
api_url:
|
api_url:
|
||||||
description:
|
description:
|
||||||
- The HipChat API URL. Required if I(notification_type=hipchat).
|
- The HipChat API URL. Required if I(notification_type=hipchat).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
color:
|
color:
|
||||||
description:
|
description:
|
||||||
- The notification color. Required if I(notification_type=hipchat).
|
- The notification color. Required if I(notification_type=hipchat).
|
||||||
required: False
|
required: False
|
||||||
choices: ["yellow", "green", "red", "purple", "gray", "random"]
|
choices: ["yellow", "green", "red", "purple", "gray", "random"]
|
||||||
|
type: str
|
||||||
rooms:
|
rooms:
|
||||||
description:
|
description:
|
||||||
- HipChat rooms to send the notification to. Required if I(notification_type=hipchat).
|
- HipChat rooms to send the notification to. Required if I(notification_type=hipchat).
|
||||||
@@ -142,18 +164,22 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The target URL. Required if I(notification_type=webhook).
|
- The target URL. Required if I(notification_type=webhook).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
headers:
|
headers:
|
||||||
description:
|
description:
|
||||||
- The HTTP headers as JSON string. Required if I(notification_type=webhook).
|
- The HTTP headers as JSON string. Required if I(notification_type=webhook).
|
||||||
required: False
|
required: False
|
||||||
|
type: dict
|
||||||
server:
|
server:
|
||||||
description:
|
description:
|
||||||
- The IRC server address. Required if I(notification_type=irc).
|
- The IRC server address. Required if I(notification_type=irc).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
nickname:
|
nickname:
|
||||||
description:
|
description:
|
||||||
- The IRC nickname. Required if I(notification_type=irc).
|
- The IRC nickname. Required if I(notification_type=irc).
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
targets:
|
targets:
|
||||||
description:
|
description:
|
||||||
- The destination channels or users. Required if I(notification_type=irc).
|
- The destination channels or users. Required if I(notification_type=irc).
|
||||||
@@ -164,6 +190,7 @@ options:
|
|||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,14 +27,17 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name to use for the organization.
|
- Name to use for the organization.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description to use for the organization.
|
- The description to use for the organization.
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,26 +27,33 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name to use for the project.
|
- Name to use for the project.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- Description to use for the project.
|
- Description to use for the project.
|
||||||
|
type: str
|
||||||
scm_type:
|
scm_type:
|
||||||
description:
|
description:
|
||||||
- Type of SCM resource.
|
- Type of SCM resource.
|
||||||
choices: ["manual", "git", "hg", "svn"]
|
choices: ["manual", "git", "hg", "svn"]
|
||||||
default: "manual"
|
default: "manual"
|
||||||
|
type: str
|
||||||
scm_url:
|
scm_url:
|
||||||
description:
|
description:
|
||||||
- URL of SCM resource.
|
- URL of SCM resource.
|
||||||
|
type: str
|
||||||
local_path:
|
local_path:
|
||||||
description:
|
description:
|
||||||
- The server playbook directory for manual projects.
|
- The server playbook directory for manual projects.
|
||||||
|
type: str
|
||||||
scm_branch:
|
scm_branch:
|
||||||
description:
|
description:
|
||||||
- The branch to use for the SCM resource.
|
- The branch to use for the SCM resource.
|
||||||
|
type: str
|
||||||
scm_credential:
|
scm_credential:
|
||||||
description:
|
description:
|
||||||
- Name of the credential to use with this SCM resource.
|
- Name of the credential to use with this SCM resource.
|
||||||
|
type: str
|
||||||
scm_clean:
|
scm_clean:
|
||||||
description:
|
description:
|
||||||
- Remove local modifications before updating.
|
- Remove local modifications before updating.
|
||||||
@@ -68,23 +75,28 @@ options:
|
|||||||
- Cache Timeout to cache prior project syncs for a certain number of seconds.
|
- Cache Timeout to cache prior project syncs for a certain number of seconds.
|
||||||
Only valid if scm_update_on_launch is to True, otherwise ignored.
|
Only valid if scm_update_on_launch is to True, otherwise ignored.
|
||||||
default: 0
|
default: 0
|
||||||
|
type: int
|
||||||
job_timeout:
|
job_timeout:
|
||||||
version_added: "2.8"
|
version_added: "2.8"
|
||||||
description:
|
description:
|
||||||
- The amount of time (in seconds) to run before the SCM Update is canceled. A value of 0 means no timeout.
|
- The amount of time (in seconds) to run before the SCM Update is canceled. A value of 0 means no timeout.
|
||||||
default: 0
|
default: 0
|
||||||
|
type: int
|
||||||
custom_virtualenv:
|
custom_virtualenv:
|
||||||
version_added: "2.8"
|
version_added: "2.8"
|
||||||
description:
|
description:
|
||||||
- Local absolute file path containing a custom Python virtualenv to use
|
- Local absolute file path containing a custom Python virtualenv to use
|
||||||
|
type: str
|
||||||
organization:
|
organization:
|
||||||
description:
|
description:
|
||||||
- Primary key of organization for project.
|
- Primary key of organization for project.
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -32,46 +32,57 @@ options:
|
|||||||
description:
|
description:
|
||||||
- List of organization names to export
|
- List of organization names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
user:
|
user:
|
||||||
description:
|
description:
|
||||||
- List of user names to export
|
- List of user names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
team:
|
team:
|
||||||
description:
|
description:
|
||||||
- List of team names to export
|
- List of team names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
credential_type:
|
credential_type:
|
||||||
description:
|
description:
|
||||||
- List of credential type names to export
|
- List of credential type names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
credential:
|
credential:
|
||||||
description:
|
description:
|
||||||
- List of credential names to export
|
- List of credential names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
notification_template:
|
notification_template:
|
||||||
description:
|
description:
|
||||||
- List of notification template names to export
|
- List of notification template names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
inventory_script:
|
inventory_script:
|
||||||
description:
|
description:
|
||||||
- List of inventory script names to export
|
- List of inventory script names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- List of inventory names to export
|
- List of inventory names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
project:
|
project:
|
||||||
description:
|
description:
|
||||||
- List of project names to export
|
- List of project names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
job_template:
|
job_template:
|
||||||
description:
|
description:
|
||||||
- List of job template names to export
|
- List of job template names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
workflow:
|
workflow:
|
||||||
description:
|
description:
|
||||||
- List of workflow names to export
|
- List of workflow names to export
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
- "ansible-tower-cli >= 3.3.0"
|
- "ansible-tower-cli >= 3.3.0"
|
||||||
|
|||||||
@@ -26,38 +26,48 @@ options:
|
|||||||
user:
|
user:
|
||||||
description:
|
description:
|
||||||
- User that receives the permissions specified by the role.
|
- User that receives the permissions specified by the role.
|
||||||
|
type: str
|
||||||
team:
|
team:
|
||||||
description:
|
description:
|
||||||
- Team that receives the permissions specified by the role.
|
- Team that receives the permissions specified by the role.
|
||||||
|
type: str
|
||||||
role:
|
role:
|
||||||
description:
|
description:
|
||||||
- The role type to grant/revoke.
|
- The role type to grant/revoke.
|
||||||
required: True
|
required: True
|
||||||
choices: ["admin", "read", "member", "execute", "adhoc", "update", "use", "auditor", "project_admin", "inventory_admin", "credential_admin",
|
choices: ["admin", "read", "member", "execute", "adhoc", "update", "use", "auditor", "project_admin", "inventory_admin", "credential_admin",
|
||||||
"workflow_admin", "notification_admin", "job_template_admin"]
|
"workflow_admin", "notification_admin", "job_template_admin"]
|
||||||
|
type: str
|
||||||
target_team:
|
target_team:
|
||||||
description:
|
description:
|
||||||
- Team that the role acts on.
|
- Team that the role acts on.
|
||||||
|
type: str
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- Inventory the role acts on.
|
- Inventory the role acts on.
|
||||||
|
type: str
|
||||||
job_template:
|
job_template:
|
||||||
description:
|
description:
|
||||||
- The job template the role acts on.
|
- The job template the role acts on.
|
||||||
|
type: str
|
||||||
credential:
|
credential:
|
||||||
description:
|
description:
|
||||||
- Credential the role acts on.
|
- Credential the role acts on.
|
||||||
|
type: str
|
||||||
organization:
|
organization:
|
||||||
description:
|
description:
|
||||||
- Organization the role acts on.
|
- Organization the role acts on.
|
||||||
|
type: str
|
||||||
project:
|
project:
|
||||||
description:
|
description:
|
||||||
- Project the role acts on.
|
- Project the role acts on.
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -28,16 +28,19 @@ options:
|
|||||||
- The assets to import.
|
- The assets to import.
|
||||||
- This can be the output of tower_receive or loaded from a file
|
- This can be the output of tower_receive or loaded from a file
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
files:
|
files:
|
||||||
description:
|
description:
|
||||||
- List of files to import.
|
- List of files to import.
|
||||||
required: False
|
required: False
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
prevent:
|
prevent:
|
||||||
description:
|
description:
|
||||||
- A list of asset types to prevent import for
|
- A list of asset types to prevent import for
|
||||||
required: false
|
required: false
|
||||||
default: []
|
default: []
|
||||||
|
type: list
|
||||||
password_management:
|
password_management:
|
||||||
description:
|
description:
|
||||||
- The password management option to use.
|
- The password management option to use.
|
||||||
@@ -45,6 +48,7 @@ options:
|
|||||||
required: false
|
required: false
|
||||||
default: 'default'
|
default: 'default'
|
||||||
choices: ["default", "random"]
|
choices: ["default", "random"]
|
||||||
|
type: str
|
||||||
|
|
||||||
notes:
|
notes:
|
||||||
- One of assets or files needs to be passed in
|
- One of assets or files needs to be passed in
|
||||||
|
|||||||
@@ -27,10 +27,12 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name of setting to modify
|
- Name of setting to modify
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
value:
|
value:
|
||||||
description:
|
description:
|
||||||
- Value to be modified for given setting.
|
- Value to be modified for given setting.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,15 +27,22 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name to use for the team.
|
- Name to use for the team.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
description:
|
||||||
|
- The description to use for the team.
|
||||||
|
type: str
|
||||||
organization:
|
organization:
|
||||||
description:
|
description:
|
||||||
- Organization the team should be made a member of.
|
- Organization the team should be made a member of.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
default: "present"
|
default: "present"
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -27,19 +27,24 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The username of the user.
|
- The username of the user.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
first_name:
|
first_name:
|
||||||
description:
|
description:
|
||||||
- First name of the user.
|
- First name of the user.
|
||||||
|
type: str
|
||||||
last_name:
|
last_name:
|
||||||
description:
|
description:
|
||||||
- Last name of the user.
|
- Last name of the user.
|
||||||
|
type: str
|
||||||
email:
|
email:
|
||||||
description:
|
description:
|
||||||
- Email address of the user.
|
- Email address of the user.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
password:
|
password:
|
||||||
description:
|
description:
|
||||||
- Password of the user.
|
- Password of the user.
|
||||||
|
type: str
|
||||||
superuser:
|
superuser:
|
||||||
description:
|
description:
|
||||||
- User is a system wide administrator.
|
- User is a system wide administrator.
|
||||||
@@ -55,6 +60,7 @@ options:
|
|||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
- ansible-tower-cli >= 3.2.0
|
- ansible-tower-cli >= 3.2.0
|
||||||
|
|||||||
@@ -24,10 +24,12 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The name of the workflow template to run.
|
- The name of the workflow template to run.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
extra_vars:
|
extra_vars:
|
||||||
description:
|
description:
|
||||||
- Any extra vars required to launch the job.
|
- Any extra vars required to launch the job.
|
||||||
required: False
|
required: False
|
||||||
|
type: str
|
||||||
wait:
|
wait:
|
||||||
description:
|
description:
|
||||||
- Wait for the workflow to complete.
|
- Wait for the workflow to complete.
|
||||||
@@ -38,6 +40,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- If waiting for the workflow to complete this will abort after this
|
- If waiting for the workflow to complete this will abort after this
|
||||||
amount of seconds
|
amount of seconds
|
||||||
|
type: int
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
- "python >= 2.6"
|
- "python >= 2.6"
|
||||||
|
|||||||
@@ -40,26 +40,32 @@ options:
|
|||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- The description to use for the workflow.
|
- The description to use for the workflow.
|
||||||
|
type: str
|
||||||
extra_vars:
|
extra_vars:
|
||||||
description:
|
description:
|
||||||
- Extra variables used by Ansible in YAML or key=value format.
|
- Extra variables used by Ansible in YAML or key=value format.
|
||||||
|
type: str
|
||||||
inventory:
|
inventory:
|
||||||
description:
|
description:
|
||||||
- Name of the inventory to use for the job template.
|
- Name of the inventory to use for the job template.
|
||||||
version_added: "2.9"
|
version_added: "2.9"
|
||||||
|
type: str
|
||||||
name:
|
name:
|
||||||
description:
|
description:
|
||||||
- The name to use for the workflow.
|
- The name to use for the workflow.
|
||||||
required: True
|
required: True
|
||||||
|
type: str
|
||||||
organization:
|
organization:
|
||||||
description:
|
description:
|
||||||
- The organization the workflow is linked to.
|
- The organization the workflow is linked to.
|
||||||
|
type: str
|
||||||
schema:
|
schema:
|
||||||
description:
|
description:
|
||||||
- >
|
- >
|
||||||
The schema is a JSON- or YAML-formatted string defining the
|
The schema is a JSON- or YAML-formatted string defining the
|
||||||
hierarchy structure that connects the nodes. Refer to Tower
|
hierarchy structure that connects the nodes. Refer to Tower
|
||||||
documentation for more information.
|
documentation for more information.
|
||||||
|
type: str
|
||||||
survey_enabled:
|
survey_enabled:
|
||||||
description:
|
description:
|
||||||
- Setting that variable will prompt the user for job type on the
|
- Setting that variable will prompt the user for job type on the
|
||||||
@@ -68,11 +74,13 @@ options:
|
|||||||
survey:
|
survey:
|
||||||
description:
|
description:
|
||||||
- The definition of the survey associated to the workflow.
|
- The definition of the survey associated to the workflow.
|
||||||
|
type: str
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Desired state of the resource.
|
- Desired state of the resource.
|
||||||
default: "present"
|
default: "present"
|
||||||
choices: ["present", "absent"]
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
extends_documentation_fragment: awx.awx.auth
|
extends_documentation_fragment: awx.awx.auth
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -62,6 +62,9 @@ def run_module():
|
|||||||
# We should consider supporting that in the future
|
# We should consider supporting that in the future
|
||||||
resource_module = importlib.import_module('plugins.modules.{}'.format(module_name))
|
resource_module = importlib.import_module('plugins.modules.{}'.format(module_name))
|
||||||
|
|
||||||
|
if not isinstance(module_params, dict):
|
||||||
|
raise RuntimeError('Module params must be dict, got {}'.format(type(module_params)))
|
||||||
|
|
||||||
# Ansible params can be passed as an invocation argument or over stdin
|
# Ansible params can be passed as an invocation argument or over stdin
|
||||||
# this short circuits within the AnsibleModule interface
|
# this short circuits within the AnsibleModule interface
|
||||||
def mock_load_params(self):
|
def mock_load_params(self):
|
||||||
|
|||||||
151
awx_collection/test/awx/test_credential.py
Normal file
151
awx_collection/test/awx/test_credential.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from awx.main.models import Credential, CredentialType, Organization
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_create_machine_credential(run_module, admin_user):
|
||||||
|
Organization.objects.create(name='test-org')
|
||||||
|
# create the ssh credential type
|
||||||
|
CredentialType.defaults['ssh']().save()
|
||||||
|
# Example from docs
|
||||||
|
result = run_module('tower_credential', dict(
|
||||||
|
name='Team Name',
|
||||||
|
description='Team Description',
|
||||||
|
organization='test-org',
|
||||||
|
kind='ssh',
|
||||||
|
state='present'
|
||||||
|
), admin_user)
|
||||||
|
|
||||||
|
cred = Credential.objects.get(name='Team Name')
|
||||||
|
result.pop('invocation')
|
||||||
|
assert result == {
|
||||||
|
"credential": "Team Name",
|
||||||
|
"state": "present",
|
||||||
|
"id": cred.pk,
|
||||||
|
"changed": True
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_create_custom_credential_type(run_module, admin_user):
|
||||||
|
# Example from docs
|
||||||
|
result = run_module('tower_credential_type', dict(
|
||||||
|
name='Nexus',
|
||||||
|
description='Credentials type for Nexus',
|
||||||
|
kind='cloud',
|
||||||
|
inputs={"fields": [{"id": "server", "type": "string", "default": "", "label": ""}], "required": []},
|
||||||
|
injectors={'extra_vars': {'nexus_credential': 'test'}},
|
||||||
|
state='present',
|
||||||
|
validate_certs='false'
|
||||||
|
), admin_user)
|
||||||
|
|
||||||
|
ct = CredentialType.objects.get(name='Nexus')
|
||||||
|
result.pop('invocation')
|
||||||
|
assert result == {
|
||||||
|
"credential_type": "Nexus",
|
||||||
|
"state": "present",
|
||||||
|
"id": ct.pk,
|
||||||
|
"changed": True
|
||||||
|
}
|
||||||
|
|
||||||
|
assert ct.inputs == {"fields": [{"id": "server", "type": "string", "default": "", "label": ""}], "required": []}
|
||||||
|
assert ct.injectors == {'extra_vars': {'nexus_credential': 'test'}}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_kind_ct_exclusivity(run_module, admin_user):
|
||||||
|
result = run_module('tower_credential', dict(
|
||||||
|
name='A credential',
|
||||||
|
organization='test-org',
|
||||||
|
kind='ssh',
|
||||||
|
credential_type='foobar', # cannot specify if kind is also specified
|
||||||
|
state='present'
|
||||||
|
), admin_user)
|
||||||
|
|
||||||
|
result.pop('invocation')
|
||||||
|
assert result == {
|
||||||
|
'failed': True,
|
||||||
|
'msg': 'parameters are mutually exclusive: kind|credential_type'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_input_exclusivity(run_module, admin_user):
|
||||||
|
result = run_module('tower_credential', dict(
|
||||||
|
name='A credential',
|
||||||
|
organization='test-org',
|
||||||
|
kind='ssh',
|
||||||
|
inputs={'token': '7rEZK38DJl58A7RxA6EC7lLvUHbBQ1'},
|
||||||
|
security_token='7rEZK38DJl58A7RxA6EC7lLvUHbBQ1',
|
||||||
|
state='present'
|
||||||
|
), admin_user)
|
||||||
|
|
||||||
|
result.pop('invocation')
|
||||||
|
assert result == {
|
||||||
|
'failed': True,
|
||||||
|
'msg': 'parameters are mutually exclusive: inputs|security_token'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_missing_credential_type(run_module, admin_user):
|
||||||
|
Organization.objects.create(name='test-org')
|
||||||
|
result = run_module('tower_credential', dict(
|
||||||
|
name='A credential',
|
||||||
|
organization='test-org',
|
||||||
|
credential_type='foobar',
|
||||||
|
state='present'
|
||||||
|
), admin_user)
|
||||||
|
|
||||||
|
result.pop('invocation')
|
||||||
|
assert result == {
|
||||||
|
"changed": False,
|
||||||
|
"failed": True,
|
||||||
|
'msg': 'Failed to update credential, credential_type not found: The requested object could not be found.'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_make_use_of_custom_credential_type(run_module, admin_user):
|
||||||
|
Organization.objects.create(name='test-org')
|
||||||
|
# Make a credential type which will be used by the credential
|
||||||
|
ct = CredentialType.objects.create(
|
||||||
|
name='Ansible Galaxy Token',
|
||||||
|
inputs={
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"id": "token",
|
||||||
|
"type": "string",
|
||||||
|
"secret": True,
|
||||||
|
"label": "Ansible Galaxy Secret Token Value"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"required": ["token"]
|
||||||
|
},
|
||||||
|
injectors={
|
||||||
|
"extra_vars": {
|
||||||
|
"galaxy_token": "{{token}}",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
result = run_module('tower_credential', dict(
|
||||||
|
name='Galaxy Token for Steve',
|
||||||
|
organization='test-org',
|
||||||
|
credential_type='Ansible Galaxy Token',
|
||||||
|
inputs={'token': '7rEZK38DJl58A7RxA6EC7lLvUHbBQ1'},
|
||||||
|
state='present'
|
||||||
|
), admin_user)
|
||||||
|
|
||||||
|
cred = Credential.objects.get(name='Galaxy Token for Steve')
|
||||||
|
assert cred.credential_type_id == ct.id
|
||||||
|
assert list(cred.inputs.keys()) == ['token']
|
||||||
|
assert cred.inputs['token'].startswith('$encrypted$')
|
||||||
|
assert len(cred.inputs['token']) >= len('$encrypted$') + len('7rEZK38DJl58A7RxA6EC7lLvUHbBQ1')
|
||||||
|
result.pop('invocation')
|
||||||
|
assert result == {
|
||||||
|
"credential": "Galaxy Token for Steve",
|
||||||
|
"state": "present",
|
||||||
|
"id": cred.pk,
|
||||||
|
"changed": True
|
||||||
|
}
|
||||||
@@ -32,7 +32,6 @@ def test_create_job_template(run_module, admin_user, project, inventory):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@pytest.mark.xfail(reason='Known limitation and needs to be fixed.')
|
|
||||||
def test_create_job_template_with_old_machine_cred(run_module, admin_user, project, inventory, machine_credential):
|
def test_create_job_template_with_old_machine_cred(run_module, admin_user, project, inventory, machine_credential):
|
||||||
|
|
||||||
module_args = {
|
module_args = {
|
||||||
|
|||||||
72
awx_collection/test/awx/test_send_receive.py
Normal file
72
awx_collection/test/awx/test_send_receive.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
import pytest
|
||||||
|
import json
|
||||||
|
|
||||||
|
from awx.main.models import (
|
||||||
|
Organization,
|
||||||
|
Project,
|
||||||
|
Inventory,
|
||||||
|
Host,
|
||||||
|
CredentialType,
|
||||||
|
Credential,
|
||||||
|
JobTemplate
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_receive_send_jt(run_module, admin_user, mocker):
|
||||||
|
org = Organization.objects.create(name='SRtest')
|
||||||
|
proj = Project.objects.create(
|
||||||
|
name='SRtest',
|
||||||
|
playbook_files=['debug.yml'],
|
||||||
|
scm_type='git',
|
||||||
|
scm_url='https://github.com/ansible/test-playbooks.git',
|
||||||
|
organization=org,
|
||||||
|
allow_override=True # so we do not require playbooks populated
|
||||||
|
)
|
||||||
|
inv = Inventory.objects.create(name='SRtest', organization=org)
|
||||||
|
Host.objects.create(name='SRtest', inventory=inv)
|
||||||
|
ct = CredentialType.defaults['ssh']()
|
||||||
|
ct.save()
|
||||||
|
cred = Credential.objects.create(
|
||||||
|
name='SRtest',
|
||||||
|
credential_type=ct,
|
||||||
|
organization=org
|
||||||
|
)
|
||||||
|
jt = JobTemplate.objects.create(
|
||||||
|
name='SRtest',
|
||||||
|
project=proj,
|
||||||
|
inventory=inv,
|
||||||
|
playbook='helloworld.yml'
|
||||||
|
)
|
||||||
|
jt.credentials.add(cred)
|
||||||
|
jt.admin_role.members.add(admin_user) # work around send/receive bug
|
||||||
|
|
||||||
|
# receive everything
|
||||||
|
result = run_module('tower_receive', dict(all=True), admin_user)
|
||||||
|
|
||||||
|
assert 'assets' in result, result
|
||||||
|
assets = result['assets']
|
||||||
|
assert not result.get('changed', True)
|
||||||
|
assert set(a['asset_type'] for a in assets) == set((
|
||||||
|
'organization', 'inventory', 'job_template', 'credential', 'project',
|
||||||
|
'user'
|
||||||
|
))
|
||||||
|
|
||||||
|
# delete everything
|
||||||
|
for obj in (jt, inv, proj, cred, org):
|
||||||
|
obj.delete()
|
||||||
|
|
||||||
|
def fake_wait(self, pk, parent_pk=None, **kwargs):
|
||||||
|
return {"changed": True}
|
||||||
|
|
||||||
|
# recreate everything
|
||||||
|
with mocker.patch('sys.stdin.isatty', return_value=True):
|
||||||
|
with mocker.patch('tower_cli.models.base.MonitorableResource.wait'):
|
||||||
|
result = run_module('tower_send', dict(assets=json.dumps(assets)), admin_user)
|
||||||
|
|
||||||
|
assert not result.get('failed'), result
|
||||||
|
|
||||||
|
new = JobTemplate.objects.get(name='SRtest')
|
||||||
|
assert new.project.name == 'SRtest'
|
||||||
|
assert new.inventory.name == 'SRtest'
|
||||||
|
assert [cred.name for cred in new.credentials.all()] == ['SRtest']
|
||||||
44
awx_collection/tests/sanity/ignore-2.10.txt
Normal file
44
awx_collection/tests/sanity/ignore-2.10.txt
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
plugins/modules/tower_credential_type.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_group.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_host.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_inventory.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_inventory_source.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_cancel.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_launch.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_list.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_template.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_label.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_notification.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_organization.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_project.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_receive.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_role.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_settings.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_team.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_user.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_workflow_launch.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_workflow_template.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_credential_type.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_credential.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_group.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_host.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_inventory.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_inventory_source.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_cancel.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_launch.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_list.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_wait.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_template.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_label.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_notification.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_organization.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_project.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_receive.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_role.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_settings.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_send.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_team.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_user.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_workflow_launch.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_workflow_template.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_workflow_job_template.py validate-modules:import-error
|
||||||
44
awx_collection/tests/sanity/ignore-2.9.txt
Normal file
44
awx_collection/tests/sanity/ignore-2.9.txt
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
plugins/modules/tower_credential_type.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_group.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_host.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_inventory.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_inventory_source.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_cancel.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_launch.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_list.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_job_template.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_label.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_notification.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_organization.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_project.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_receive.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_role.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_settings.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_team.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_user.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_workflow_launch.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_workflow_template.py validate-modules:missing-module-utils-import
|
||||||
|
plugins/modules/tower_credential_type.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_credential.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_group.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_host.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_inventory.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_inventory_source.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_cancel.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_launch.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_list.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_wait.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_job_template.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_label.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_notification.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_organization.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_project.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_receive.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_role.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_settings.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_send.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_team.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_user.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_workflow_launch.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_workflow_template.py validate-modules:import-error
|
||||||
|
plugins/modules/tower_workflow_job_template.py validate-modules:import-error
|
||||||
@@ -4,19 +4,26 @@ import awxkit.exceptions as exc
|
|||||||
|
|
||||||
notification_endpoints = ("notification_templates", "notification_templates_started", "notification_templates_error",
|
notification_endpoints = ("notification_templates", "notification_templates_started", "notification_templates_error",
|
||||||
"notification_templates_success")
|
"notification_templates_success")
|
||||||
|
wfjt_notification_endpoints = notification_endpoints + ('notification_templates_approvals',)
|
||||||
|
|
||||||
|
|
||||||
class HasNotifications(object):
|
class HasNotifications(object):
|
||||||
|
|
||||||
def add_notification_template(self, notification_template, endpoint="notification_templates_success"):
|
def add_notification_template(self, notification_template, endpoint="notification_templates_success"):
|
||||||
if endpoint not in notification_endpoints:
|
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
|
||||||
|
supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) \
|
||||||
|
else notification_endpoints
|
||||||
|
if endpoint not in supported_endpoints:
|
||||||
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
|
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
|
||||||
.format(endpoint, notification_endpoints))
|
.format(endpoint, notification_endpoints))
|
||||||
with suppress(exc.NoContent):
|
with suppress(exc.NoContent):
|
||||||
self.related[endpoint].post(dict(id=notification_template.id))
|
self.related[endpoint].post(dict(id=notification_template.id))
|
||||||
|
|
||||||
def remove_notification_template(self, notification_template, endpoint="notification_templates_success"):
|
def remove_notification_template(self, notification_template, endpoint="notification_templates_success"):
|
||||||
if endpoint not in notification_endpoints:
|
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
|
||||||
|
supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) \
|
||||||
|
else notification_endpoints
|
||||||
|
if endpoint not in supported_endpoints:
|
||||||
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
|
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
|
||||||
.format(endpoint, notification_endpoints))
|
.format(endpoint, notification_endpoints))
|
||||||
with suppress(exc.NoContent):
|
with suppress(exc.NoContent):
|
||||||
|
|||||||
Reference in New Issue
Block a user