Merge pull request #5140 from ryanpetrello/downstream-hardening

merge a variety of downstream bug fixes

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
This commit is contained in:
softwarefactory-project-zuul[bot] 2019-10-29 18:19:10 +00:00 committed by GitHub
commit 8cb7b388dc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
93 changed files with 1589 additions and 596 deletions

7
.gitignore vendored
View File

@ -135,9 +135,10 @@ use_dev_supervisor.txt
# Ansible module tests
awx_collection_test_venv/
awx_collection/*.tar.gz
awx_collection/galaxy.yml
/awx_collection_test_venv/
/awx_collection/*.tar.gz
/awx_collection/galaxy.yml
/sanity/
.idea/*
*.unison.tmp

View File

@ -399,6 +399,13 @@ flake8_collection:
test_collection_all: prepare_collection_venv test_collection flake8_collection
test_collection_sanity:
rm -rf sanity
mkdir -p sanity/ansible_collections/awx
cp -Ra awx_collection sanity/ansible_collections/awx/awx # symlinks do not work
cd sanity/ansible_collections/awx/awx && git init && git add . # requires both this file structure and a git repo, so there you go
cd sanity/ansible_collections/awx/awx && ansible-test sanity --test validate-modules
build_collection:
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
ansible-galaxy collection build awx_collection --output-path=awx_collection

View File

@ -4338,13 +4338,30 @@ class NotificationTemplateSerializer(BaseSerializer):
error_list = []
collected_messages = []
def check_messages(messages):
for message_type in messages:
if message_type not in ('message', 'body'):
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
continue
message = messages[message_type]
if message is None:
continue
if not isinstance(message, str):
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
continue
if message_type == 'message':
if '\n' in message:
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
continue
collected_messages.append(message)
# Validate structure / content types
if not isinstance(messages, dict):
error_list.append(_("Expected dict for 'messages' field, found {}".format(type(messages))))
else:
for event in messages:
if event not in ['started', 'success', 'error']:
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', or 'error'").format(event))
if event not in ('started', 'success', 'error', 'workflow_approval'):
error_list.append(_("Event '{}' invalid, must be one of 'started', 'success', 'error', or 'workflow_approval'").format(event))
continue
event_messages = messages[event]
if event_messages is None:
@ -4352,21 +4369,21 @@ class NotificationTemplateSerializer(BaseSerializer):
if not isinstance(event_messages, dict):
error_list.append(_("Expected dict for event '{}', found {}").format(event, type(event_messages)))
continue
for message_type in event_messages:
if message_type not in ['message', 'body']:
error_list.append(_("Message type '{}' invalid, must be either 'message' or 'body'").format(message_type))
continue
message = event_messages[message_type]
if message is None:
continue
if not isinstance(message, str):
error_list.append(_("Expected string for '{}', found {}, ").format(message_type, type(message)))
continue
if message_type == 'message':
if '\n' in message:
error_list.append(_("Messages cannot contain newlines (found newline in {} event)".format(event)))
if event == 'workflow_approval':
for subevent in event_messages:
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
error_list.append(_("Workflow Approval event '{}' invalid, must be one of "
"'running', 'approved', 'timed_out', or 'denied'").format(subevent))
continue
collected_messages.append(message)
subevent_messages = event_messages[subevent]
if subevent_messages is None:
continue
if not isinstance(subevent_messages, dict):
error_list.append(_("Expected dict for workflow approval event '{}', found {}").format(subevent, type(subevent_messages)))
continue
check_messages(subevent_messages)
else:
check_messages(event_messages)
# Subclass to return name of undefined field
class DescriptiveUndefined(StrictUndefined):
@ -4497,8 +4514,18 @@ class NotificationSerializer(BaseSerializer):
'notification_type', 'recipients', 'subject', 'body')
def get_body(self, obj):
if obj.notification_type == 'webhook' and 'body' in obj.body:
return obj.body['body']
if obj.notification_type in ('webhook', 'pagerduty'):
if isinstance(obj.body, dict):
if 'body' in obj.body:
return obj.body['body']
elif isinstance(obj.body, str):
# attempt to load json string
try:
potential_body = json.loads(obj.body)
if isinstance(potential_body, dict):
return potential_body
except json.JSONDecodeError:
pass
return obj.body
def get_related(self, obj):
@ -4774,6 +4801,18 @@ class InstanceGroupSerializer(BaseSerializer):
raise serializers.ValidationError(_('Isolated instances may not be added or removed from instances groups via the API.'))
if self.instance and self.instance.controller_id is not None:
raise serializers.ValidationError(_('Isolated instance group membership may not be managed via the API.'))
if value and self.instance and self.instance.is_containerized:
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
return value
def validate_policy_instance_percentage(self, value):
if value and self.instance and self.instance.is_containerized:
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
return value
def validate_policy_instance_minimum(self, value):
if value and self.instance and self.instance.is_containerized:
raise serializers.ValidationError(_('Containerized instances may not be managed via the API'))
return value
def validate_name(self, value):

View File

@ -102,7 +102,7 @@ from awx.main.scheduler.dag_workflow import WorkflowDAG
from awx.api.views.mixin import (
ControlledByScmMixin, InstanceGroupMembershipMixin,
OrganizationCountsMixin, RelatedJobsPreventDeleteMixin,
UnifiedJobDeletionMixin,
UnifiedJobDeletionMixin, NoTruncateMixin,
)
from awx.api.views.organization import ( # noqa
OrganizationList,
@ -383,6 +383,13 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP
serializer_class = serializers.InstanceGroupSerializer
permission_classes = (InstanceGroupTowerPermission,)
def update_raw_data(self, data):
if self.get_object().is_containerized:
data.pop('policy_instance_percentage', None)
data.pop('policy_instance_minimum', None)
data.pop('policy_instance_list', None)
return super(InstanceGroupDetail, self).update_raw_data(data)
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
if instance.controller is not None:
@ -2136,12 +2143,21 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
def perform_list_destroy(self, instance_list):
inv_source = self.get_parent_object()
with ignore_inventory_computed_fields():
# Activity stream doesn't record disassociation here anyway
# no signals-related reason to not bulk-delete
models.Host.groups.through.objects.filter(
host__inventory_sources=inv_source
).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
from awx.main.signals import disable_activity_stream
with disable_activity_stream():
# job host summary deletion necessary to avoid deadlock
models.JobHostSummary.objects.filter(host__inventory_sources=inv_source).update(host=None)
models.Host.objects.filter(inventory_sources=inv_source).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy([])
else:
# Advance delete of group-host memberships to prevent deadlock
# Activity stream doesn't record disassociation here anyway
# no signals-related reason to not bulk-delete
models.Host.groups.through.objects.filter(
host__inventory_sources=inv_source
).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
return r
@ -2157,11 +2173,18 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
def perform_list_destroy(self, instance_list):
inv_source = self.get_parent_object()
with ignore_inventory_computed_fields():
# Same arguments for bulk delete as with host list
models.Group.hosts.through.objects.filter(
group__inventory_sources=inv_source
).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
from awx.main.signals import disable_activity_stream
with disable_activity_stream():
models.Group.objects.filter(inventory_sources=inv_source).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy([])
else:
# Advance delete of group-host memberships to prevent deadlock
# Same arguments for bulk delete as with host list
models.Group.hosts.through.objects.filter(
group__inventory_sources=inv_source
).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
return r
@ -3762,18 +3785,12 @@ class JobHostSummaryDetail(RetrieveAPIView):
serializer_class = serializers.JobHostSummarySerializer
class JobEventList(ListAPIView):
class JobEventList(NoTruncateMixin, ListAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
search_fields = ('stdout',)
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context
class JobEventDetail(RetrieveAPIView):
@ -3786,7 +3803,7 @@ class JobEventDetail(RetrieveAPIView):
return context
class JobEventChildrenList(SubListAPIView):
class JobEventChildrenList(NoTruncateMixin, SubListAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
@ -3811,7 +3828,7 @@ class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
name = _('Job Event Hosts List')
class BaseJobEventsList(SubListAPIView):
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
@ -4007,18 +4024,12 @@ class AdHocCommandRelaunch(GenericAPIView):
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
class AdHocCommandEventList(ListAPIView):
class AdHocCommandEventList(NoTruncateMixin, ListAPIView):
model = models.AdHocCommandEvent
serializer_class = serializers.AdHocCommandEventSerializer
search_fields = ('stdout',)
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context
class AdHocCommandEventDetail(RetrieveAPIView):
@ -4031,7 +4042,7 @@ class AdHocCommandEventDetail(RetrieveAPIView):
return context
class BaseAdHocCommandEventsList(SubListAPIView):
class BaseAdHocCommandEventsList(NoTruncateMixin, SubListAPIView):
model = models.AdHocCommandEvent
serializer_class = serializers.AdHocCommandEventSerializer
@ -4297,8 +4308,15 @@ class NotificationTemplateTest(GenericAPIView):
def post(self, request, *args, **kwargs):
obj = self.get_object()
notification = obj.generate_notification("Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE),
{"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)})
msg = "Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE)
if obj.notification_type in ('email', 'pagerduty'):
body = "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)
elif obj.notification_type == 'webhook':
body = '{{"body": "Ansible Tower Test Notification {} {}"}}'.format(obj.id, settings.TOWER_URL_BASE)
else:
body = {"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)}
notification = obj.generate_notification(msg, body)
if not notification:
return Response({}, status=status.HTTP_400_BAD_REQUEST)
else:

View File

@ -270,3 +270,11 @@ class ControlledByScmMixin(object):
obj = super(ControlledByScmMixin, self).get_parent_object()
self._reset_inv_src_rev(obj)
return obj
class NoTruncateMixin(object):
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context

View File

@ -1,6 +1,5 @@
from hashlib import sha1
import hmac
import json
import logging
import urllib.parse
@ -151,13 +150,13 @@ class WebhookReceiverBase(APIView):
'webhook_credential': obj.webhook_credential,
'webhook_guid': event_guid,
},
'extra_vars': json.dumps({
'extra_vars': {
'tower_webhook_event_type': event_type,
'tower_webhook_event_guid': event_guid,
'tower_webhook_event_ref': event_ref,
'tower_webhook_status_api': status_api,
'tower_webhook_payload': request.data,
})
}
}
new_job = obj.create_unified_job(**kwargs)

View File

@ -465,7 +465,7 @@ class BaseAccess(object):
else:
relationship = 'members'
return access_method(obj, parent_obj, relationship, skip_sub_obj_read_check=True, data={})
except (ParseError, ObjectDoesNotExist):
except (ParseError, ObjectDoesNotExist, PermissionDenied):
return False
return False
@ -1660,26 +1660,19 @@ class JobAccess(BaseAccess):
except JobLaunchConfig.DoesNotExist:
config = None
if obj.job_template and (self.user not in obj.job_template.execute_role):
return False
# Check if JT execute access (and related prompts) is sufficient
if obj.job_template is not None:
if config is None:
prompts_access = False
elif not config.has_user_prompts(obj.job_template):
prompts_access = True
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
prompts_access = False
if self.save_messages:
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
else:
prompts_access = (
JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}) and
not config.has_unprompted(obj.job_template)
)
jt_access = self.user in obj.job_template.execute_role
if prompts_access and jt_access:
if config and obj.job_template:
if not config.has_user_prompts(obj.job_template):
return True
elif not jt_access:
return False
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
# never allowed, not even for org admins
raise PermissionDenied(_('Job was launched with secret prompts provided by another user.'))
elif not config.has_unprompted(obj.job_template):
if JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
return True
org_access = bool(obj.inventory) and self.user in obj.inventory.organization.inventory_admin_role
project_access = obj.project is None or self.user in obj.project.admin_role
@ -2098,23 +2091,20 @@ class WorkflowJobAccess(BaseAccess):
self.messages['detail'] = _('Workflow Job was launched with unknown prompts.')
return False
# execute permission to WFJT is mandatory for any relaunch
if self.user not in template.execute_role:
return False
# Check if access to prompts to prevent relaunch
if config.prompts_dict():
if obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
if self.save_messages:
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
return False
raise PermissionDenied(_("Job was launched with secret prompts provided by another user."))
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
if self.save_messages:
self.messages['detail'] = _('Job was launched with prompts you lack access to.')
return False
raise PermissionDenied(_('Job was launched with prompts you lack access to.'))
if config.has_unprompted(template):
if self.save_messages:
self.messages['detail'] = _('Job was launched with prompts no longer accepted.')
return False
raise PermissionDenied(_('Job was launched with prompts no longer accepted.'))
# execute permission to WFJT is mandatory for any relaunch
return (self.user in template.execute_role)
return True # passed config checks
def can_recreate(self, obj):
node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credentials', 'unified_job_template')

View File

@ -513,6 +513,16 @@ register(
category_slug='jobs'
)
register(
'PUBLIC_GALAXY_ENABLED',
field_class=fields.BooleanField,
default=True,
label=_('Allow Access to Public Galaxy'),
help_text=_('Allow or deny access to the public Ansible Galaxy during project updates.'),
category=_('Jobs'),
category_slug='jobs'
)
register(
'STDOUT_MAX_BYTES_DISPLAY',
field_class=fields.IntegerField,

View File

@ -4,6 +4,7 @@ import importlib
import sys
import traceback
from kubernetes.config import kube_config
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
@ -107,6 +108,14 @@ class TaskWorker(BaseWorker):
for callback in body.get('errbacks', []) or []:
callback['uuid'] = body['uuid']
self.perform_work(callback)
finally:
# It's frustrating that we have to do this, but the python k8s
# client leaves behind cacert files in /tmp, so we must clean up
# the tmpdir per-dispatcher process every time a new task comes in
try:
kube_config._cleanup_temp_files()
except Exception:
logger.exception('failed to cleanup k8s client tmp files')
for callback in body.get('callbacks', []) or []:
callback['uuid'] = body['uuid']

View File

@ -6,6 +6,7 @@ import stat
import tempfile
import time
import logging
import yaml
from django.conf import settings
import ansible_runner
@ -48,10 +49,17 @@ class IsolatedManager(object):
def build_inventory(self, hosts):
if self.instance and self.instance.is_containerized:
inventory = {'all': {'hosts': {}}}
fd, path = tempfile.mkstemp(
prefix='.kubeconfig', dir=self.private_data_dir
)
with open(path, 'wb') as temp:
temp.write(yaml.dump(self.pod_manager.kube_config).encode())
temp.flush()
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
for host in hosts:
inventory['all']['hosts'][host] = {
"ansible_connection": "kubectl",
"ansible_kubectl_config": self.pod_manager.kube_config
"ansible_kubectl_config": path,
}
else:
inventory = '\n'.join([
@ -143,6 +151,8 @@ class IsolatedManager(object):
'- /artifacts/job_events/*-partial.json.tmp',
# don't rsync the ssh_key FIFO
'- /env/ssh_key',
# don't rsync kube config files
'- .kubeconfig*'
]
for filename, data in (

View File

@ -295,7 +295,10 @@ class PrimordialModel(HasEditsMixin, CreatedModifiedModel):
def __init__(self, *args, **kwargs):
r = super(PrimordialModel, self).__init__(*args, **kwargs)
self._prior_values_store = self._get_fields_snapshot()
if self.pk:
self._prior_values_store = self._get_fields_snapshot()
else:
self._prior_values_store = {}
return r
def save(self, *args, **kwargs):

View File

@ -73,7 +73,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
notification_configuration = prevent_search(JSONField(blank=False))
def default_messages():
return {'started': None, 'success': None, 'error': None}
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
messages = JSONField(
null=True,
@ -92,25 +92,6 @@ class NotificationTemplate(CommonModelNameNotUnique):
def get_message(self, condition):
return self.messages.get(condition, {})
def build_notification_message(self, event_type, context):
env = sandbox.ImmutableSandboxedEnvironment()
templates = self.get_message(event_type)
msg_template = templates.get('message', {})
try:
notification_subject = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
notification_subject = ''
msg_body = templates.get('body', {})
try:
notification_body = env.from_string(msg_body).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
notification_body = ''
return (notification_subject, notification_body)
def get_absolute_url(self, request=None):
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
@ -128,19 +109,34 @@ class NotificationTemplate(CommonModelNameNotUnique):
old_messages = old_nt.messages
new_messages = self.messages
def merge_messages(local_old_messages, local_new_messages, local_event):
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
local_old_event_msgs = local_old_messages[local_event]
local_new_event_msgs = local_new_messages[local_event]
for msg_type in ['message', 'body']:
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
if old_messages is not None and new_messages is not None:
for event in ['started', 'success', 'error']:
for event in ('started', 'success', 'error', 'workflow_approval'):
if not new_messages.get(event, {}) and old_messages.get(event, {}):
new_messages[event] = old_messages[event]
continue
if new_messages.get(event, {}) and old_messages.get(event, {}):
old_event_msgs = old_messages[event]
new_event_msgs = new_messages[event]
for msg_type in ['message', 'body']:
if msg_type not in new_event_msgs and old_event_msgs.get(msg_type, None):
new_event_msgs[msg_type] = old_event_msgs[msg_type]
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
new_messages.setdefault('workflow_approval', {})
for subevent in ('running', 'approved', 'timed_out', 'denied'):
old_wfa_messages = old_messages['workflow_approval']
new_wfa_messages = new_messages['workflow_approval']
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
new_wfa_messages[subevent] = old_wfa_messages[subevent]
continue
if old_wfa_messages:
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
else:
merge_messages(old_messages, new_messages, event)
new_messages.setdefault(event, None)
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if self.notification_configuration[field].startswith("$encrypted$"):
@ -169,12 +165,12 @@ class NotificationTemplate(CommonModelNameNotUnique):
def recipients(self):
return self.notification_configuration[self.notification_class.recipient_parameter]
def generate_notification(self, subject, message):
def generate_notification(self, msg, body):
notification = Notification(notification_template=self,
notification_type=self.notification_type,
recipients=smart_str(self.recipients),
subject=subject,
body=message)
subject=msg,
body=body)
notification.save()
return notification
@ -370,7 +366,7 @@ class JobNotificationMixin(object):
'verbosity': 0},
'job_friendly_name': 'Job',
'url': 'https://towerhost/#/jobs/playbook/1010',
'job_summary_dict': """{'url': 'https://towerhost/$/jobs/playbook/13',
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
'traceback': '',
'status': 'running',
'started': '2019-08-07T21:46:38.362630+00:00',
@ -389,14 +385,14 @@ class JobNotificationMixin(object):
return context
def context(self, serialized_job):
"""Returns a context that can be used for rendering notification messages.
Context contains whitelisted content retrieved from a serialized job object
"""Returns a dictionary that can be used for rendering notification messages.
The context will contain whitelisted content retrieved from a serialized job object
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
and a url to the job run."""
context = {'job': {},
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
'job_summary_dict': json.dumps(self.notification_data(), indent=4)}
'job_metadata': json.dumps(self.notification_data(), indent=4)}
def build_context(node, fields, whitelisted_fields):
for safe_field in whitelisted_fields:
@ -434,32 +430,33 @@ class JobNotificationMixin(object):
context = self.context(job_serialization)
msg_template = body_template = None
msg = body = ''
# Use custom template if available
if nt.messages:
templates = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
msg_template = templates.get('message', {})
body_template = templates.get('body', {})
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
msg_template = template.get('message', None)
body_template = template.get('body', None)
# If custom template not provided, look up default template
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
if not msg_template:
msg_template = default_template.get('message', None)
if not body_template:
body_template = default_template.get('body', None)
if msg_template:
try:
notification_subject = env.from_string(msg_template).render(**context)
msg = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
notification_subject = ''
else:
notification_subject = u"{} #{} '{}' {}: {}".format(self.get_notification_friendly_name(),
self.id,
self.name,
status,
self.get_ui_url())
notification_body = self.notification_data()
notification_body['friendly_name'] = self.get_notification_friendly_name()
msg = ''
if body_template:
try:
notification_body['body'] = env.from_string(body_template).render(**context)
body = env.from_string(body_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
notification_body['body'] = ''
body = ''
return (notification_subject, notification_body)
return (msg, body)
def send_notification_templates(self, status):
from awx.main.tasks import send_notifications # avoid circular import
@ -475,16 +472,13 @@ class JobNotificationMixin(object):
return
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
try:
(notification_subject, notification_body) = self.build_notification_message(nt, status)
except AttributeError:
raise NotImplementedError("build_notification_message() does not exist" % status)
(msg, body) = self.build_notification_message(nt, status)
# Use kwargs to force late-binding
# https://stackoverflow.com/a/3431699/10669572
def send_it(local_nt=nt, local_subject=notification_subject, local_body=notification_body):
def send_it(local_nt=nt, local_msg=msg, local_body=body):
def _func():
send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id],
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
job_id=self.id)
return _func
connection.on_commit(send_it())

View File

@ -2,6 +2,7 @@
# All Rights Reserved.
# Python
import json
import logging
from copy import copy
from urllib.parse import urljoin
@ -16,6 +17,9 @@ from django.core.exceptions import ObjectDoesNotExist
# Django-CRUM
from crum import get_current_user
from jinja2 import sandbox
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
# AWX
from awx.api.versioning import reverse
from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
@ -763,22 +767,45 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
connection.on_commit(send_it())
def build_approval_notification_message(self, nt, approval_status):
subject = []
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
subject.append(('The approval node "{}"').format(self.workflow_approval_template.name))
if approval_status == 'running':
subject.append(('needs review. This node can be viewed at: {}').format(workflow_url))
if approval_status == 'approved':
subject.append(('was approved. {}').format(workflow_url))
if approval_status == 'timed_out':
subject.append(('has timed out. {}').format(workflow_url))
elif approval_status == 'denied':
subject.append(('was denied. {}').format(workflow_url))
subject = " ".join(subject)
body = self.notification_data()
body['body'] = subject
env = sandbox.ImmutableSandboxedEnvironment()
return subject, body
context = self.context(approval_status)
msg_template = body_template = None
msg = body = ''
# Use custom template if available
if nt.messages and nt.messages.get('workflow_approval', None):
template = nt.messages['workflow_approval'].get(approval_status, {})
msg_template = template.get('message', None)
body_template = template.get('body', None)
# If custom template not provided, look up default template
default_template = nt.notification_class.default_messages['workflow_approval'][approval_status]
if not msg_template:
msg_template = default_template.get('message', None)
if not body_template:
body_template = default_template.get('body', None)
if msg_template:
try:
msg = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
msg = ''
if body_template:
try:
body = env.from_string(body_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
body = ''
return (msg, body)
def context(self, approval_status):
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.workflow_job.id))
return {'approval_status': approval_status,
'approval_node_name': self.workflow_approval_template.name,
'workflow_url': workflow_url,
'job_metadata': json.dumps(self.notification_data(), indent=4)}
@property
def workflow_job_template(self):

View File

@ -1,21 +1,10 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.base import BaseEmailBackend
from django.utils.translation import ugettext_lazy as _
class AWXBaseEmailBackend(BaseEmailBackend):
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
return body

View File

@ -0,0 +1,20 @@
# Copyright (c) 2019 Ansible, Inc.
# All Rights Reserved.
class CustomNotificationBase(object):
DEFAULT_MSG = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
DEFAULT_BODY = "{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_metadata }}"
default_messages = {"started": {"message": DEFAULT_MSG, "body": None},
"success": {"message": DEFAULT_MSG, "body": None},
"error": {"message": DEFAULT_MSG, "body": None},
"workflow_approval": {"running": {"message": 'The approval node "{{ approval_node_name }}" needs review. '
'This node can be viewed at: {{ workflow_url }}',
"body": None},
"approved": {"message": 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}',
"body": None},
"timed_out": {"message": 'The approval node "{{ approval_node_name }}" has timed out. {{ workflow_url }}',
"body": None},
"denied": {"message": 'The approval node "{{ approval_node_name }}" was denied. {{ workflow_url }}',
"body": None}}}

View File

@ -1,14 +1,15 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.custom_notification_base import CustomNotificationBase
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
class CustomEmailBackend(EmailBackend):
class CustomEmailBackend(EmailBackend, CustomNotificationBase):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
@ -19,22 +20,17 @@ class CustomEmailBackend(EmailBackend):
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"},
"timeout": {"label": "Timeout", "type": "int", "default": 30}}
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
DEFAULT_BODY = smart_text(_("{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_summary_dict }}"))
default_messages = {"started": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
"success": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
"error": {"message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
recipient_parameter = "recipients"
sender_parameter = "sender"
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"workflow_approval": {"running": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"approved": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"timed_out": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"denied": {"message": DEFAULT_MSG, "body": DEFAULT_BODY}}}
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
# leave body unchanged (expect a string)
return body

View File

@ -8,24 +8,21 @@ import dateutil.parser as dp
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.grafana_backend')
class GrafanaBackend(AWXBaseEmailBackend):
class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"},
"grafana_key": {"label": "Grafana API Key", "type": "password"}}
recipient_parameter = "grafana_url"
sender_parameter = None
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
default_messages = {"started": {"message": DEFAULT_SUBJECT},
"success": {"message": DEFAULT_SUBJECT},
"error": {"message": DEFAULT_SUBJECT}}
def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
fail_silently=False, **kwargs):
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)

View File

@ -7,12 +7,14 @@ import requests
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
class HipChatBackend(AWXBaseEmailBackend):
class HipChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"token": {"label": "Token", "type": "password"},
"rooms": {"label": "Destination Rooms", "type": "list"},
@ -23,11 +25,6 @@ class HipChatBackend(AWXBaseEmailBackend):
recipient_parameter = "rooms"
sender_parameter = "message_from"
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
default_messages = {"started": {"message": DEFAULT_SUBJECT},
"success": {"message": DEFAULT_SUBJECT},
"error": {"message": DEFAULT_SUBJECT}}
def __init__(self, token, color, api_url, notify, fail_silently=False, **kwargs):
super(HipChatBackend, self).__init__(fail_silently=fail_silently)
self.token = token

View File

@ -9,12 +9,14 @@ import irc.client
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.irc_backend')
class IrcBackend(AWXBaseEmailBackend):
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
"port": {"label": "IRC Server Port", "type": "int"},
@ -25,11 +27,6 @@ class IrcBackend(AWXBaseEmailBackend):
recipient_parameter = "targets"
sender_parameter = None
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
default_messages = {"started": {"message": DEFAULT_SUBJECT},
"success": {"message": DEFAULT_SUBJECT},
"error": {"message": DEFAULT_SUBJECT}}
def __init__(self, server, port, nickname, password, use_ssl, fail_silently=False, **kwargs):
super(IrcBackend, self).__init__(fail_silently=fail_silently)
self.server = server

View File

@ -7,23 +7,20 @@ import json
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.mattermost_backend')
class MattermostBackend(AWXBaseEmailBackend):
class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"mattermost_url": {"label": "Target URL", "type": "string"},
"mattermost_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
recipient_parameter = "mattermost_url"
sender_parameter = None
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
default_messages = {"started": {"message": DEFAULT_SUBJECT},
"success": {"message": DEFAULT_SUBJECT},
"error": {"message": DEFAULT_SUBJECT}}
def __init__(self, mattermost_no_verify_ssl=False, mattermost_channel=None, mattermost_username=None,
mattermost_icon_url=None, fail_silently=False, **kwargs):
super(MattermostBackend, self).__init__(fail_silently=fail_silently)

View File

@ -1,17 +1,23 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
import logging
import pygerduty
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
DEFAULT_BODY = CustomNotificationBase.DEFAULT_BODY
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
class PagerDutyBackend(AWXBaseEmailBackend):
class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
"token": {"label": "API Token", "type": "password"},
@ -20,11 +26,14 @@ class PagerDutyBackend(AWXBaseEmailBackend):
recipient_parameter = "service_key"
sender_parameter = "client_name"
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
DEFAULT_BODY = "{{ job_summary_dict }}"
default_messages = {"started": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
"success": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY},
"error": { "message": DEFAULT_SUBJECT, "body": DEFAULT_BODY}}
DEFAULT_BODY = "{{ job_metadata }}"
default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"workflow_approval": {"running": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"approved": {"message": DEFAULT_MSG,"body": DEFAULT_BODY},
"timed_out": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
"denied": {"message": DEFAULT_MSG, "body": DEFAULT_BODY}}}
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
@ -32,6 +41,16 @@ class PagerDutyBackend(AWXBaseEmailBackend):
self.token = token
def format_body(self, body):
# cast to dict if possible # TODO: is it true that this can be a dict or str?
try:
potential_body = json.loads(body)
if isinstance(potential_body, dict):
body = potential_body
except json.JSONDecodeError:
pass
# but it's okay if this is also just a string
return body
def send_messages(self, messages):

View File

@ -7,22 +7,20 @@ import json
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
class RocketChatBackend(AWXBaseEmailBackend):
class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"rocketchat_url": {"label": "Target URL", "type": "string"},
"rocketchat_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
recipient_parameter = "rocketchat_url"
sender_parameter = None
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
default_messages = {"started": {"message": DEFAULT_SUBJECT},
"success": {"message": DEFAULT_SUBJECT},
"error": {"message": DEFAULT_SUBJECT}}
def __init__(self, rocketchat_no_verify_ssl=False, rocketchat_username=None, rocketchat_icon_url=None, fail_silently=False, **kwargs):
super(RocketChatBackend, self).__init__(fail_silently=fail_silently)

View File

@ -6,24 +6,21 @@ from slackclient import SlackClient
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.slack_backend')
WEBSOCKET_TIMEOUT = 30
class SlackBackend(AWXBaseEmailBackend):
class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"token": {"label": "Token", "type": "password"},
"channels": {"label": "Destination Channels", "type": "list"}}
recipient_parameter = "channels"
sender_parameter = None
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
default_messages = {"started": {"message": DEFAULT_SUBJECT},
"success": {"message": DEFAULT_SUBJECT},
"error": {"message": DEFAULT_SUBJECT}}
def __init__(self, token, hex_color="", fail_silently=False, **kwargs):
super(SlackBackend, self).__init__(fail_silently=fail_silently)
self.token = token

View File

@ -7,12 +7,14 @@ from twilio.rest import Client
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.twilio_backend')
class TwilioBackend(AWXBaseEmailBackend):
class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
"account_token": {"label": "Account Token", "type": "password"},
@ -21,11 +23,6 @@ class TwilioBackend(AWXBaseEmailBackend):
recipient_parameter = "to_numbers"
sender_parameter = "from_number"
DEFAULT_SUBJECT = "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}"
default_messages = {"started": {"message": DEFAULT_SUBJECT},
"success": {"message": DEFAULT_SUBJECT},
"error": {"message": DEFAULT_SUBJECT}}
def __init__(self, account_sid, account_token, fail_silently=False, **kwargs):
super(TwilioBackend, self).__init__(fail_silently=fail_silently)
self.account_sid = account_sid

View File

@ -7,13 +7,15 @@ import requests
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.utils import get_awx_version
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(AWXBaseEmailBackend):
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
@ -24,10 +26,16 @@ class WebhookBackend(AWXBaseEmailBackend):
recipient_parameter = "url"
sender_parameter = None
DEFAULT_BODY = "{{ job_summary_dict }}"
DEFAULT_BODY = "{{ job_metadata }}"
default_messages = {"started": {"body": DEFAULT_BODY},
"success": {"body": DEFAULT_BODY},
"error": {"body": DEFAULT_BODY}}
"error": {"body": DEFAULT_BODY},
"workflow_approval": {
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. '
'This node can be viewed at: {{ workflow_url }}"}'},
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'}}}
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
self.http_method = http_method
@ -38,15 +46,13 @@ class WebhookBackend(AWXBaseEmailBackend):
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
# If `body` has body field, attempt to use this as the main body,
# otherwise, leave it as a sub-field
if isinstance(body, dict) and 'body' in body and isinstance(body['body'], str):
try:
potential_body = json.loads(body['body'])
if isinstance(potential_body, dict):
body = potential_body
except json.JSONDecodeError:
pass
# expect body to be a string representing a dict
try:
potential_body = json.loads(body)
if isinstance(potential_body, dict):
body = potential_body
except json.JSONDecodeError:
body = {}
return body
def send_messages(self, messages):

View File

@ -12,10 +12,12 @@ class UriCleaner(object):
@staticmethod
def remove_sensitive(cleartext):
# exclude_list contains the items that will _not_ be redacted
exclude_list = [settings.PUBLIC_GALAXY_SERVER['url']]
if settings.PRIMARY_GALAXY_URL:
exclude_list = [settings.PRIMARY_GALAXY_URL] + [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
else:
exclude_list = [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
exclude_list += [settings.PRIMARY_GALAXY_URL]
if settings.FALLBACK_GALAXY_SERVERS:
exclude_list += [server['url'] for server in settings.FALLBACK_GALAXY_SERVERS]
redactedtext = cleartext
text_index = 0
while True:

View File

@ -1,9 +1,5 @@
import collections
import os
import stat
import time
import yaml
import tempfile
import logging
from base64 import b64encode
@ -88,8 +84,17 @@ class PodManager(object):
@cached_property
def kube_api(self):
my_client = config.new_client_from_config(config_file=self.kube_config)
return client.CoreV1Api(api_client=my_client)
# this feels a little janky, but it's what k8s' own code does
# internally when it reads kube config files from disk:
# https://github.com/kubernetes-client/python-base/blob/0b208334ef0247aad9afcaae8003954423b61a0d/config/kube_config.py#L643
loader = config.kube_config.KubeConfigLoader(
config_dict=self.kube_config
)
cfg = type.__call__(client.Configuration)
loader.load_and_set(cfg)
return client.CoreV1Api(api_client=client.ApiClient(
configuration=cfg
))
@property
def pod_name(self):
@ -174,10 +179,4 @@ def generate_tmp_kube_config(credential, namespace):
).decode() # decode the base64 data into a str
else:
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
fd, path = tempfile.mkstemp(prefix='kubeconfig')
with open(path, 'wb') as temp:
temp.write(yaml.dump(config).encode())
temp.flush()
os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
return path
return config

View File

@ -252,19 +252,25 @@ class TaskManager():
logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(
task.log_format, task.execution_node, controller_node))
elif rampart_group.is_containerized:
# find one real, non-containerized instance with capacity to
# act as the controller for k8s API interaction
match = None
for group in InstanceGroup.objects.all():
if group.is_containerized or group.controller_id:
continue
match = group.find_largest_idle_instance()
if match:
break
task.instance_group = rampart_group
if not task.supports_isolation():
if task.supports_isolation():
task.controller_node = match.hostname
else:
# project updates and inventory updates don't *actually* run in pods,
# so just pick *any* non-isolated, non-containerized host and use it
for group in InstanceGroup.objects.all():
if group.is_containerized or group.controller_id:
continue
match = group.find_largest_idle_instance()
if match:
task.execution_node = match.hostname
logger.debug('Submitting containerized {} to queue {}.'.format(
task.log_format, task.execution_node))
break
# as the execution node
task.execution_node = match.hostname
logger.debug('Submitting containerized {} to queue {}.'.format(
task.log_format, task.execution_node))
else:
task.instance_group = rampart_group
if instance is not None:

View File

@ -1423,7 +1423,6 @@ class BaseTask(object):
def deploy_container_group_pod(self, task):
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
pod_manager = PodManager(self.instance)
self.cleanup_paths.append(pod_manager.kube_config)
try:
log_name = task.log_format
logger.debug(f"Launching pod for {log_name}.")
@ -1452,7 +1451,7 @@ class BaseTask(object):
self.update_model(task.pk, execution_node=pod_manager.pod_name)
return pod_manager
@ -1959,9 +1958,15 @@ class RunProjectUpdate(BaseTask):
env['PROJECT_UPDATE_ID'] = str(project_update.pk)
env['ANSIBLE_CALLBACK_PLUGINS'] = self.get_path_to('..', 'plugins', 'callback')
env['ANSIBLE_GALAXY_IGNORE'] = True
# Set up the fallback server, which is the normal Ansible Galaxy by default
galaxy_servers = list(settings.FALLBACK_GALAXY_SERVERS)
# If private galaxy URL is non-blank, that means this feature is enabled
# Set up the public Galaxy server, if enabled
if settings.PUBLIC_GALAXY_ENABLED:
galaxy_servers = [settings.PUBLIC_GALAXY_SERVER]
else:
galaxy_servers = []
# Set up fallback Galaxy servers, if configured
if settings.FALLBACK_GALAXY_SERVERS:
galaxy_servers = settings.FALLBACK_GALAXY_SERVERS + galaxy_servers
# Set up the primary Galaxy server, if configured
if settings.PRIMARY_GALAXY_URL:
galaxy_servers = [{'id': 'primary_galaxy'}] + galaxy_servers
for key in GALAXY_SERVER_FIELDS:
@ -2354,6 +2359,27 @@ class RunInventoryUpdate(BaseTask):
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
elif inventory_update.source == 'file':
raise NotImplementedError('Cannot update file sources through the task system.')
if inventory_update.source == 'scm' and inventory_update.source_project_update:
env_key = 'ANSIBLE_COLLECTIONS_PATHS'
config_setting = 'collections_paths'
folder = 'requirements_collections'
default = '~/.ansible/collections:/usr/share/ansible/collections'
config_values = read_ansible_config(os.path.join(private_data_dir, 'project'), [config_setting])
paths = default.split(':')
if env_key in env:
for path in env[env_key].split(':'):
if path not in paths:
paths = [env[env_key]] + paths
elif config_setting in config_values:
for path in config_values[config_setting].split(':'):
if path not in paths:
paths = [config_values[config_setting]] + paths
paths = [os.path.join(private_data_dir, folder)] + paths
env[env_key] = os.pathsep.join(paths)
return env
def write_args_file(self, private_data_dir, args):
@ -2452,7 +2478,7 @@ class RunInventoryUpdate(BaseTask):
# Use the vendored script path
inventory_path = self.get_path_to('..', 'plugins', 'inventory', injector.script_name)
elif src == 'scm':
inventory_path = inventory_update.get_actual_source_path()
inventory_path = os.path.join(private_data_dir, 'project', inventory_update.source_path)
elif src == 'custom':
handle, inventory_path = tempfile.mkstemp(dir=private_data_dir)
f = os.fdopen(handle, 'w')
@ -2473,7 +2499,7 @@ class RunInventoryUpdate(BaseTask):
'''
src = inventory_update.source
if src == 'scm' and inventory_update.source_project_update:
return inventory_update.source_project_update.get_project_path(check_if_exists=False)
return os.path.join(private_data_dir, 'project')
if src in CLOUD_PROVIDERS:
injector = None
if src in InventorySource.injectors:
@ -2509,8 +2535,10 @@ class RunInventoryUpdate(BaseTask):
project_update_task = local_project_sync._get_task_class()
try:
project_update_task().run(local_project_sync.id)
inventory_update.inventory_source.scm_last_revision = local_project_sync.project.scm_revision
sync_task = project_update_task(job_private_data_dir=private_data_dir)
sync_task.run(local_project_sync.id)
local_project_sync.refresh_from_db()
inventory_update.inventory_source.scm_last_revision = local_project_sync.scm_revision
inventory_update.inventory_source.save(update_fields=['scm_last_revision'])
except Exception:
inventory_update = self.update_model(
@ -2518,6 +2546,13 @@ class RunInventoryUpdate(BaseTask):
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
('project_update', local_project_sync.name, local_project_sync.id)))
raise
elif inventory_update.source == 'scm' and inventory_update.launch_type == 'scm' and source_project:
# This follows update, not sync, so make copy here
project_path = source_project.get_project_path(check_if_exists=False)
RunProjectUpdate.make_local_copy(
project_path, os.path.join(private_data_dir, 'project'),
source_project.scm_type, source_project.scm_revision
)
@task()

View File

@ -0,0 +1,45 @@
import pytest
from awx.api.versioning import reverse
from awx.main.models import AdHocCommand, AdHocCommandEvent, JobEvent
@pytest.mark.django_db
@pytest.mark.parametrize('truncate, expected', [
(True, False),
(False, True),
])
def test_job_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
objs = organization_factory("org", superusers=['admin'])
jt = job_template_factory("jt", organization=objs.organization,
inventory='test_inv', project='test_proj').job_template
job = jt.create_unified_job()
JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start',
stdout='a' * 1025)
url = reverse('api:job_job_events_list', kwargs={'pk': job.pk})
if not truncate:
url += '?no_truncate=1'
response = get(url, user=objs.superusers.admin, expect=200)
assert (len(response.data['results'][0]['stdout']) == 1025) == expected
@pytest.mark.django_db
@pytest.mark.parametrize('truncate, expected', [
(True, False),
(False, True),
])
def test_ad_hoc_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
objs = organization_factory("org", superusers=['admin'])
adhoc = AdHocCommand()
adhoc.save()
AdHocCommandEvent.create_from_data(ad_hoc_command_id=adhoc.pk, uuid='abc123', event='runner_on_start',
stdout='a' * 1025)
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': adhoc.pk})
if not truncate:
url += '?no_truncate=1'
response = get(url, user=objs.superusers.admin, expect=200)
assert (len(response.data['results'][0]['stdout']) == 1025) == expected

View File

@ -117,3 +117,10 @@ def test_handle_content_type(post, admin):
admin,
content_type='text/html',
expect=415)
@pytest.mark.django_db
def test_basic_not_found(get, admin_user):
root_url = reverse('api:api_v2_root_view')
r = get(root_url + 'fooooooo', user=admin_user, expect=404)
assert r.data.get('detail') == 'The requested resource could not be found.'

View File

@ -8,6 +8,8 @@ from unittest.mock import PropertyMock
# Django
from django.urls import resolve
from django.http import Http404
from django.core.handlers.exception import response_for_exception
from django.contrib.auth.models import User
from django.core.serializers.json import DjangoJSONEncoder
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
@ -581,8 +583,12 @@ def _request(verb):
if 'format' not in kwargs and 'content_type' not in kwargs:
kwargs['format'] = 'json'
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
request = getattr(APIRequestFactory(), verb)(url, **kwargs)
request_error = None
try:
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
except Http404 as e:
request_error = e
if isinstance(kwargs.get('cookies', None), dict):
for key, value in kwargs['cookies'].items():
request.COOKIES[key] = value
@ -591,7 +597,10 @@ def _request(verb):
if user:
force_authenticate(request, user=user)
response = view(request, *view_args, **view_kwargs)
if not request_error:
response = view(request, *view_args, **view_kwargs)
else:
response = response_for_exception(request, request_error)
if middleware:
middleware.process_response(request, response)
if expect:

View File

@ -87,7 +87,7 @@ class TestJobNotificationMixin(object):
'use_fact_cache': bool,
'verbosity': int},
'job_friendly_name': str,
'job_summary_dict': str,
'job_metadata': str,
'url': str}
@ -144,5 +144,3 @@ class TestJobNotificationMixin(object):
context_stub = JobNotificationMixin.context_stub()
check_structure_and_completeness(TestJobNotificationMixin.CONTEXT_STRUCTURE, context_stub)

View File

@ -1,5 +1,4 @@
import subprocess
import yaml
import base64
from unittest import mock # noqa
@ -51,6 +50,5 @@ def test_kubectl_ssl_verification(containerized_job):
cred.inputs['ssl_ca_cert'] = cert.stdout
cred.save()
pm = PodManager(containerized_job)
config = yaml.load(open(pm.kube_config), Loader=yaml.FullLoader)
ca_data = config['clusters'][0]['cluster']['certificate-authority-data']
ca_data = pm.kube_config['clusters'][0]['cluster']['certificate-authority-data']
assert cert.stdout == base64.b64decode(ca_data.encode())

View File

@ -264,6 +264,7 @@ def test_inventory_update_injected_content(this_kind, script_or_plugin, inventor
assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == ('auto' if use_plugin else 'script')
set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0'])
env, content = read_content(private_data_dir, envvars, inventory_update)
env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test
base_dir = os.path.join(DATA, script_or_plugin)
if not os.path.exists(base_dir):
os.mkdir(base_dir)

View File

@ -43,7 +43,7 @@ def test_basic_parameterization(get, post, user, organization):
assert 'url' in response.data['notification_configuration']
assert 'headers' in response.data['notification_configuration']
assert 'messages' in response.data
assert response.data['messages'] == {'started': None, 'success': None, 'error': None}
assert response.data['messages'] == {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
@pytest.mark.django_db

View File

@ -19,6 +19,8 @@ from awx.main.models import (
Credential
)
from rest_framework.exceptions import PermissionDenied
from crum import impersonate
@ -252,7 +254,8 @@ class TestJobRelaunchAccess:
assert 'job_var' in job.launch_config.extra_data
assert bob.can_access(Job, 'start', job, validate_license=False)
assert not alice.can_access(Job, 'start', job, validate_license=False)
with pytest.raises(PermissionDenied):
alice.can_access(Job, 'start', job, validate_license=False)
@pytest.mark.django_db

View File

@ -7,6 +7,8 @@ from awx.main.access import (
# WorkflowJobNodeAccess
)
from rest_framework.exceptions import PermissionDenied
from awx.main.models import InventorySource, JobLaunchConfig
@ -169,7 +171,8 @@ class TestWorkflowJobAccess:
wfjt.ask_inventory_on_launch = True
wfjt.save()
JobLaunchConfig.objects.create(job=workflow_job, inventory=inventory)
assert not WorkflowJobAccess(rando).can_start(workflow_job)
with pytest.raises(PermissionDenied):
WorkflowJobAccess(rando).can_start(workflow_job)
inventory.use_role.members.add(rando)
assert WorkflowJobAccess(rando).can_start(workflow_job)

View File

@ -26,7 +26,7 @@ class TestNotificationTemplateSerializer():
{'started': {'message': '{{ job.id }}', 'body': '{{ job.status }}'},
'success': {'message': None, 'body': '{{ job_friendly_name }}'},
'error': {'message': '{{ url }}', 'body': None}},
{'started': {'body': '{{ job_summary_dict }}'}},
{'started': {'body': '{{ job_metadata }}'}},
{'started': {'body': '{{ job.summary_fields.inventory.total_hosts }}'}},
{'started': {'body': u'Iñtërnâtiônàlizætiøn'}}
])

View File

@ -288,13 +288,17 @@ class AWXProxyHandler(logging.Handler):
'''
thread_local = threading.local()
_auditor = None
def __init__(self, **kwargs):
# TODO: process 'level' kwarg
super(AWXProxyHandler, self).__init__(**kwargs)
self._handler = None
self._old_kwargs = {}
if settings.LOG_AGGREGATOR_AUDIT:
@property
def auditor(self):
if not self._auditor:
self._auditor = logging.handlers.RotatingFileHandler(
filename='/var/log/tower/external.log',
maxBytes=1024 * 1024 * 50, # 50 MB
@ -307,6 +311,7 @@ class AWXProxyHandler(logging.Handler):
return json.dumps(message)
self._auditor.setFormatter(WritableLogstashFormatter())
return self._auditor
def get_handler_class(self, protocol):
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
@ -341,8 +346,8 @@ class AWXProxyHandler(logging.Handler):
if AWXProxyHandler.thread_local.enabled:
actual_handler = self.get_handler()
if settings.LOG_AGGREGATOR_AUDIT:
self._auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
self._auditor.emit(record)
self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
self.auditor.emit(record)
return actual_handler.emit(record)
def perform_test(self, custom_settings):

View File

@ -635,16 +635,18 @@ PRIMARY_GALAXY_USERNAME = ''
PRIMARY_GALAXY_TOKEN = ''
PRIMARY_GALAXY_PASSWORD = ''
PRIMARY_GALAXY_AUTH_URL = ''
# Settings for the fallback galaxy server(s), normally this is the
# actual Ansible Galaxy site.
# server options: 'id', 'url', 'username', 'password', 'token', 'auth_url'
# To not use any fallback servers set this to []
FALLBACK_GALAXY_SERVERS = [
{
'id': 'galaxy',
'url': 'https://galaxy.ansible.com'
}
]
# Settings for the public galaxy server(s).
PUBLIC_GALAXY_ENABLED = True
PUBLIC_GALAXY_SERVER = {
'id': 'galaxy',
'url': 'https://galaxy.ansible.com'
}
# List of dicts of fallback (additional) Galaxy servers. If configured, these
# will be higher precedence than public Galaxy, but lower than primary Galaxy.
# Available options: 'id', 'url', 'username', 'password', 'token', 'auth_url'
FALLBACK_GALAXY_SERVERS = []
# Enable bubblewrap support for running jobs (playbook runs only).
# Note: This setting may be overridden by database settings.

View File

@ -282,10 +282,12 @@ function getLaunchedByDetails () {
tooltip = strings.get('tooltips.SCHEDULE');
link = `/#/templates/job_template/${jobTemplate.id}/schedules/${schedule.id}`;
value = $filter('sanitize')(schedule.name);
} else {
} else if (schedule) {
tooltip = null;
link = null;
value = $filter('sanitize')(schedule.name);
} else {
return null;
}
return { label, link, tooltip, value };

View File

@ -5,7 +5,7 @@
<!-- LEFT PANE HEADER ACTIONS -->
<div class="JobResults-panelHeaderButtonActions">
<!-- RELAUNCH ACTION -->
<at-relaunch job="vm.job"></at-relaunch>
<at-relaunch ng-if="vm.job" job="vm.job"></at-relaunch>
<!-- CANCEL ACTION -->
<button

View File

@ -213,8 +213,8 @@ function JobRenderService ($q, $compile, $sce, $window) {
const record = this.createRecord(event, lines);
if (lines.length === 1 && lines[0] === '') {
// Some events, mainly runner_on_start events, have an actual line count of 1
// (stdout = '') and a claimed line count of 0 (end_line - start_line = 0).
// runner_on_start, runner_on_ok, and a few other events have an actual line count
// of 1 (stdout = '') and a claimed line count of 0 (end_line - start_line = 0).
// Since a zero-length string has an actual line count of 1, they'll still get
// rendered as blank lines unless we intercept them and add some special
// handling to remove them.

View File

@ -208,6 +208,7 @@
max-width: none !important;
width: 100% !important;
padding-right: 0px !important;
margin-top: 10px;
}
.Form-formGroup--checkbox{

View File

@ -15,7 +15,9 @@
title="{{ label || vm.strings.get('code_mirror.label.VARIABLES') }}"
tabindex="-1"
ng-if="tooltip">
<i class="fa fa-question-circle"></i>
<span class="at-Popover-icon" ng-class="{ 'at-Popover-icon--defaultCursor': popover.on === 'mouseenter' && !popover.click }">
<i class="fa fa-question-circle"></i>
</span>
</a>
<div class="atCodeMirror-toggleContainer FormToggle-container">
<div id="{{ name }}_parse_type" class="btn-group">

View File

@ -202,6 +202,7 @@
.at-Row-toggle {
align-self: flex-start;
margin-right: @at-space-4x;
margin-left: 15px;
}
.at-Row-actions {
@ -385,29 +386,3 @@
margin-right: @at-margin-right-list-row-item-inline-label;
}
}
@media screen and (max-width: @at-breakpoint-instances-wrap) {
.at-Row-items--instances {
margin-bottom: @at-padding-bottom-instances-wrap;
}
}
@media screen and (max-width: @at-breakpoint-compact-list) {
.at-Row-actions {
align-items: center;
}
.at-RowAction {
margin: @at-margin-list-row-action-mobile;
}
.at-RowItem--inline {
display: flex;
margin-right: inherit;
.at-RowItem-label {
width: @at-width-list-row-item-label;
margin-right: inherit;
}
}
}

View File

@ -89,6 +89,9 @@ export default ['i18n', function(i18n) {
type: 'text',
reset: 'PRIMARY_GALAXY_AUTH_URL',
},
PUBLIC_GALAXY_ENABLED: {
type: 'toggleSwitch',
},
AWX_TASK_ENV: {
type: 'textarea',
reset: 'AWX_TASK_ENV',

View File

@ -1,9 +1,11 @@
.CapacityAdjuster {
margin-right: @at-space-4x;
margin-top: 15px;
margin-left: -10px;
position: relative;
&-valueLabel {
bottom: @at-space-5x;
top: -10px;
color: @at-color-body-text;
font-size: @at-font-size;
position: absolute;

View File

@ -5,6 +5,8 @@ capacity-bar {
font-size: @at-font-size;
min-width: 100px;
white-space: nowrap;
margin-top: 5px;
margin-bottom: 5px;
.CapacityBar {
background-color: @default-bg;
@ -42,12 +44,4 @@ capacity-bar {
text-align: right;
text-transform: uppercase;
}
.Capacity-details--percentage {
width: 40px;
}
&:only-child {
margin-right: 50px;
}
}

View File

@ -12,6 +12,7 @@ function AddContainerGroupController(ToJSON, $scope, $state, models, strings, i1
vm.form = instanceGroup.createFormSchema('post');
vm.form.name.required = true;
delete vm.form.name.help_text;
vm.form.credential = {
type: 'field',
@ -22,6 +23,7 @@ function AddContainerGroupController(ToJSON, $scope, $state, models, strings, i1
vm.form.credential._route = "instanceGroups.addContainerGroup.credentials";
vm.form.credential._model = credential;
vm.form.credential._placeholder = strings.get('container.CREDENTIAL_PLACEHOLDER');
vm.form.credential.help_text = strings.get('container.CREDENTIAL_HELP_TEXT');
vm.form.credential.required = true;
vm.form.extraVars = {
@ -29,6 +31,7 @@ function AddContainerGroupController(ToJSON, $scope, $state, models, strings, i1
value: DataSet.data.actions.POST.pod_spec_override.default,
name: 'extraVars',
toggleLabel: strings.get('container.POD_SPEC_TOGGLE'),
tooltip: strings.get('container.EXTRA_VARS_HELP_TEXT')
};
vm.tab = {

View File

@ -1,8 +1,8 @@
<div ui-view="credentials"></div>
<a class="containerGroups-messageBar-link" href="https://docs.ansible.com/ansible-tower/latest/html/administration/external_execution_envs.html#container-group-considerations" target="_blank" style="color: white">
<a class="containerGroups-messageBar-link" href="https://docs.ansible.com/ansible-tower/latest/html/administration/external_execution_envs.html#container-groups" target="_blank" style="color: white">
<div class="Section-messageBar">
<i class="Section-messageBar-warning fa fa-warning"></i>
<span class="Section-messageBar-text">This feature is tech preview, and is subject to change in a future release. Click here for documentation.</span>
<span class="Section-messageBar-text">This feature is currently in tech preview and is subject to change in a future release. Click here for documentation.</span>
</div>
</a>
<at-panel>
@ -34,6 +34,7 @@
variables="vm.form.extraVars.value"
label="{{ vm.form.extraVars.label }}"
name="{{ vm.form.extraVars.name }}"
tooltip="{{ vm.form.extraVars.tooltip }}"
>
</at-code-mirror>
</div>

View File

@ -27,6 +27,7 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
vm.switchDisabled = false;
vm.form.disabled = !instanceGroup.has('options', 'actions.PUT');
vm.form.name.required = true;
delete vm.form.name.help_text;
vm.form.credential = {
type: 'field',
label: i18n._('Credential'),
@ -38,6 +39,7 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
vm.form.credential._displayValue = EditContainerGroupDataset.data.summary_fields.credential.name;
vm.form.credential.required = true;
vm.form.credential._value = EditContainerGroupDataset.data.summary_fields.credential.id;
vm.form.credential.help_text = strings.get('container.CREDENTIAL_HELP_TEXT');
vm.tab = {
details: {
@ -59,7 +61,8 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
label: strings.get('container.POD_SPEC_LABEL'),
value: EditContainerGroupDataset.data.pod_spec_override || "---",
name: 'extraVars',
disabled: true
disabled: true,
tooltip: strings.get('container.EXTRA_VARS_HELP_TEXT')
};
vm.switchDisabled = true;
} else {
@ -67,7 +70,8 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
label: strings.get('container.POD_SPEC_LABEL'),
value: EditContainerGroupDataset.data.pod_spec_override || instanceGroup.model.OPTIONS.actions.PUT.pod_spec_override.default,
name: 'extraVars',
toggleLabel: strings.get('container.POD_SPEC_TOGGLE')
toggleLabel: strings.get('container.POD_SPEC_TOGGLE'),
tooltip: strings.get('container.EXTRA_VARS_HELP_TEXT')
};
}

View File

@ -1,135 +1,100 @@
.InstanceGroups {
.at-Row-actions{
justify-content: flex-start;
width: 300px;
& > capacity-bar:only-child{
margin-left: 0px;
margin-top: 5px
}
}
.at-RowAction{
margin: 0;
}
.at-Row-links{
justify-content: flex-start;
.at-Row--instances {
.at-Row-content {
flex-wrap: nowrap;
}
.BreadCrumb-menuLinkImage:hover {
color: @default-link;
.at-Row-toggle {
align-self: auto;
flex: initial;
}
.List-details {
align-self: flex-end;
color: @default-interface-txt;
.at-Row-itemGroup {
display: flex;
flex: 0 0 auto;
font-size: 12px;
margin-right:20px;
text-transform: uppercase;
flex: 1;
flex-wrap: wrap;
}
.Capacity-details {
.at-Row-items--instances {
display: flex;
margin-right: 20px;
flex-wrap: wrap;
align-items: center;
.Capacity-details--label {
color: @default-interface-txt;
margin: 0 10px 0 0;
width: 100px;
}
align-content: center;
flex: 1;
}
.RunningJobs-details {
align-items: center;
display: flex;
.RunningJobs-details--label {
margin: 0 10px 0 0;
}
.at-RowItem--isHeader {
min-width: 250px;
}
.List-tableCell--capacityColumn {
.at-Row-items--capacity {
display: flex;
height: 40px;
flex-wrap: wrap;
align-items: center;
}
.List-noItems {
margin-top: 20px;
}
.List-tableRow .List-titleBadge {
margin: 0 0 0 5px;
}
.Panel-docsLink {
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
padding: 7px;
background: @at-white;
border-radius: @at-border-radius;
height: 30px;
width: 30px;
margin: 0 20px 0 auto;
i {
font-size: @at-font-size-icon;
color: @at-gray-646972;
}
}
.Panel-docsLink:hover {
background-color: @at-blue;
i {
color: @at-white;
}
}
.at-Row-toggle{
margin-top: 20px;
padding-left: 15px;
}
.ContainerGroups-codeMirror{
margin-bottom: 10px;
}
.at-Row-container{
flex-wrap: wrap;
}
.containerGroups-messageBar-link:hover{
text-decoration: underline;
}
@media screen and (max-width: 1060px) and (min-width: 769px){
.at-Row-links {
justify-content: flex-start;
flex-wrap: wrap;
}
}
@media screen and (min-width: 1061px){
.at-Row-actions{
justify-content: flex-end;
& > capacity-bar:only-child {
margin-right: 30px;
}
}
.instanceGroupsList-details{
display: flex;
}
.at-Row-links {
justify-content: flex-end;
display: flex;
width: 445px;
}
.CapacityAdjuster {
padding-bottom: 15px;
}
}
.at-Row--instanceGroups {
.at-Row-content {
flex-wrap: nowrap;
}
.at-Row-itemGroup {
display: flex;
flex: 1;
flex-wrap: wrap;
}
.at-Row-items--instanceGroups {
display: flex;
flex-wrap: wrap;
align-items: center;
flex: 1;
max-width: 100%;
}
.at-Row-itemHeaderGroup {
min-width: 320px;
display: flex;
}
.at-Row-items--capacity {
display: flex;
flex-wrap: wrap;
align-items: center;
margin-right: 5px;
min-width: 215px;
}
.at-Row--instanceSpacer {
width: 140px;
}
.at-Row--capacitySpacer {
flex: .6;
}
.at-Row-actions {
min-width: 50px;
}
}
@media screen and (max-width: 1260px) {
.at-Row--instances .at-Row-items--capacity {
flex: 1
}
.at-Row--instances .CapacityAdjuster {
padding-bottom: 5px;
}
}
@media screen and (max-width: 600px) {
.at-Row--instanceGroups .at-Row-itemHeaderGroup,
.at-Row--instanceGroups .at-Row-itemGroup {
max-width: 270px;
}
}

View File

@ -72,8 +72,9 @@ function InstanceGroupsStrings(BaseString) {
CREDENTIAL_PLACEHOLDER: t.s('SELECT A CREDENTIAL'),
POD_SPEC_LABEL: t.s('Pod Spec Override'),
BADGE_TEXT: t.s('Container Group'),
POD_SPEC_TOGGLE: t.s('Customize Pod Spec')
POD_SPEC_TOGGLE: t.s('Customize Pod Spec'),
CREDENTIAL_HELP_TEXT: t.s('Credential to authenticate with Kubernetes or OpenShift.  Must be of type \"Kubernetes/OpenShift API Bearer Token\”.'),
EXTRA_VARS_HELP_TEXT: t.s('Field for passing a custom Kubernetes or OpenShift Pod specification.')
};
}

View File

@ -43,35 +43,45 @@
</at-list-toolbar>
<at-list results='vm.instances'>
<at-row ng-repeat="instance in vm.instances"
ng-class="{'at-Row--active': (instance.id === vm.activeId)}">
ng-class="{'at-Row--active': (instance.id === vm.activeId)}"
class="at-Row--instances">
<div class="at-Row-toggle">
<at-switch on-toggle="vm.toggle(instance)" switch-on="instance.enabled" switch-disabled="vm.rowAction.toggle._disabled"></at-switch>
</div>
<div class="at-Row-items at-Row-items--instances">
<at-row-item
header-value="{{ instance.hostname }}"
header-tag="{{ instance.managed_by_policy ? '' : vm.strings.get('list.MANUAL') }}">
</at-row-item>
<at-row-item
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RUNNING_JOBS') }}"
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}, job_search: {status__in: ['running,waiting']}})"
value="{{ instance.jobs_running }}"
inline="true"
badge="true">
</at-row-item>
<at-row-item
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_TOTAL_JOBS') }}"
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}})"
value="{{ instance.jobs_total }}"
inline="true"
badge="true">
</at-row-item>
</div>
<div class="at-Row-actions">
<capacity-adjuster state="instance" disabled="{{vm.rowAction.capacity_adjustment._disabled}}"></capacity-adjuster>
<capacity-bar label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}" capacity="instance.consumed_capacity" total-capacity="instance.capacity"></capacity-bar>
<div class="at-Row-itemGroup">
<div class="at-Row-items at-Row-items--instances">
<at-row-item
header-value="{{ instance.hostname }}"
header-tag="{{ instance.managed_by_policy ? '' : vm.strings.get('list.MANUAL') }}">
</at-row-item>
<div class="at-Row-nonHeaderItems">
<at-row-item
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RUNNING_JOBS') }}"
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}, job_search: {status__in: ['running,waiting']}})"
value="{{ instance.jobs_running }}"
inline="true"
badge="true">
</at-row-item>
<at-row-item
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_TOTAL_JOBS') }}"
label-state="instanceGroups.instanceJobs({instance_group_id: {{vm.instance_group_id}}, instance_id: {{instance.id}}})"
value="{{ instance.jobs_total }}"
inline="true"
badge="true">
</at-row-item>
</div>
</div>
<div class="at-Row-items--capacity">
<capacity-adjuster
state="instance"
disabled="{{vm.rowAction.capacity_adjustment._disabled}}">
</capacity-adjuster>
<capacity-bar
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}"
capacity="instance.consumed_capacity"
total-capacity="instance.capacity">
</capacity-bar>
</div>
</div>
</at-row>
</at-list>

View File

@ -41,10 +41,11 @@
</at-list-toolbar>
<at-list results="instance_groups">
<at-row ng-repeat="instance_group in instance_groups"
ng-class="{'at-Row--active': (instance_group.id === vm.activeId)}" >
<div class="at-Row-items">
<div class="at-Row-container">
<div class="at-Row-content">
ng-class="{'at-Row--active': (instance_group.id === vm.activeId)}"
class="at-Row--instanceGroups">
<div class="at-Row-itemGroup">
<div class="at-Row-items at-Row-items--instanceGroups">
<div class="at-Row-itemHeaderGroup">
<at-row-item
ng-if="!instance_group.credential"
header-value="{{ instance_group.name }}"
@ -67,23 +68,14 @@
</div>
</div>
<div class="at-RowItem--labels" ng-if="!instance_group.credential">
<div class="LabelList-tagContainer">
<div class="LabelList-tag" ng-class="{'LabelList-tag--deletable' : (showDelete && template.summary_fields.user_capabilities.edit)}">
<span class="LabelList-name">{{vm.strings.get('instance.BADGE_TEXT') }}</span>
</div>
<div class="LabelList-tagContainer">
<div class="LabelList-tag" ng-class="{'LabelList-tag--deletable' : (showDelete && template.summary_fields.user_capabilities.edit)}">
<span class="LabelList-name">{{vm.strings.get('instance.BADGE_TEXT') }}</span>
</div>
</div>
</div>
</div>
<div class="instanceGroupsList-details">
<div class="at-Row-links">
<at-row-item
ng-if="!instance_group.credential"
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_INSTANCES') }}"
label-state="instanceGroups.instances({instance_group_id: {{ instance_group.id }}})"
value="{{ instance_group.instances }}"
inline="true"
badge="true">
</at-row-item>
<div class="at-Row-nonHeaderItems">
<at-row-item
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RUNNING_JOBS') }}"
label-state="instanceGroups.jobs({instance_group_id: {{ instance_group.id }}, job_search: {status__in: ['running,waiting']}})"
@ -98,14 +90,38 @@
inline="true"
badge="true">
</at-row-item>
</div>
<div class="at-Row-actions" >
<capacity-bar ng-show="!instance_group.credential" label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}" capacity="instance_group.consumed_capacity" total-capacity="instance_group.capacity"></capacity-bar>
<at-row-action icon="fa-trash" ng-click="vm.deleteInstanceGroup(instance_group)" ng-if="vm.rowAction.trash(instance_group)">
</at-row-action>
</div>
<at-row-item
ng-if="!instance_group.credential"
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_INSTANCES') }}"
label-state="instanceGroups.instances({instance_group_id: {{ instance_group.id }}})"
value="{{ instance_group.instances }}"
inline="true"
badge="true">
</at-row-item>
<div
ng-if="instance_group.credential"
class="at-Row--instanceSpacer">
</div>
</div>
</div>
<div class="at-Row-items--capacity" ng-if="!instance_group.credential">
<capacity-bar
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}"
capacity="instance_group.consumed_capacity"
total-capacity="instance_group.capacity">
</capacity-bar>
</div>
<div
ng-if="instance_group.credential"
class="at-Row--capacitySpacer">
</div>
</div>
<div class="at-Row-actions" >
<at-row-action
icon="fa-trash"
ng-click="vm.deleteInstanceGroup(instance_group)"
ng-if="vm.rowAction.trash(instance_group)">
</at-row-action>
</div>
</at-row>
</at-list>

View File

@ -671,6 +671,98 @@ export default ['i18n', function(i18n) {
"|| notification_type.value == 'webhook')",
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
approved_message: {
label: i18n._('Workflow Approved Message'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && notification_type.value != 'webhook'",
rows: 2,
oneLine: 'true',
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
approved_body: {
label: i18n._('Workflow Approved Message Body'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && " +
"(notification_type.value == 'email' " +
"|| notification_type.value == 'pagerduty' " +
"|| notification_type.value == 'webhook')",
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
denied_message: {
label: i18n._('Workflow Denied Message'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && notification_type.value != 'webhook'",
rows: 2,
oneLine: 'true',
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
denied_body: {
label: i18n._('Workflow Denied Message Body'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && " +
"(notification_type.value == 'email' " +
"|| notification_type.value == 'pagerduty' " +
"|| notification_type.value == 'webhook')",
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
running_message: {
label: i18n._('Workflow Running Message'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && notification_type.value != 'webhook'",
rows: 2,
oneLine: 'true',
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
running_body: {
label: i18n._('Workflow Running Message Body'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && " +
"(notification_type.value == 'email' " +
"|| notification_type.value == 'pagerduty' " +
"|| notification_type.value == 'webhook')",
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
timed_out_message: {
label: i18n._('Workflow Timed Out Message'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && notification_type.value != 'webhook'",
rows: 2,
oneLine: 'true',
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
timed_out_body: {
label: i18n._('Workflow Timed Out Message Body'),
class: 'Form-formGroup--fullWidth',
type: 'syntax_highlight',
mode: 'jinja2',
default: '',
ngShow: "customize_messages && " +
"(notification_type.value == 'email' " +
"|| notification_type.value == 'pagerduty' " +
"|| notification_type.value == 'webhook')",
ngDisabled: '!(notification_template.summary_fields.user_capabilities.edit || canAdd)',
},
},
buttons: { //for now always generates <button> tags

View File

@ -1,19 +1,20 @@
const emptyDefaults = {
started: {
message: '',
body: '',
},
success: {
message: '',
body: '',
},
error: {
message: '',
body: '',
},
started: { message: '', body: '' },
success: { message: '', body: '' },
error: { message: '', body: '' },
workflow_approval: {
approved: { message: '', body: '' },
denied: { message: '', body: '' },
running: { message: '', body: '' },
timed_out: { message: '', body: '' },
}
};
function getMessageIfUpdated(message, defaultValue) {
return message === defaultValue ? null : message;
}
export default [function() {
return {
getMessagesObj: function ($scope, defaultMessages) {
@ -23,22 +24,34 @@ export default [function() {
const defaults = defaultMessages[$scope.notification_type.value] || {};
return {
started: {
message: $scope.started_message === defaults.started.message ?
null : $scope.started_message,
body: $scope.started_body === defaults.started.body ?
null : $scope.started_body,
message: getMessageIfUpdated($scope.started_message, defaults.started.message),
body: getMessageIfUpdated($scope.started_body, defaults.started.body),
},
success: {
message: $scope.success_message === defaults.success.message ?
null : $scope.success_message,
body: $scope.success_body === defaults.success.body ?
null : $scope.success_body,
message: getMessageIfUpdated($scope.success_message, defaults.success.message),
body: getMessageIfUpdated($scope.success_body, defaults.success.body),
},
error: {
message: $scope.error_message === defaults.error.message ?
null : $scope.error_message,
body: $scope.error_body === defaults.error.body ?
null : $scope.error_body,
message: getMessageIfUpdated($scope.error_message, defaults.error.message),
body: getMessageIfUpdated($scope.error_body, defaults.error.body),
},
workflow_approval: {
approved: {
message: getMessageIfUpdated($scope.approved_message, defaults.workflow_approval.approved.message),
body: getMessageIfUpdated($scope.approved_body, defaults.workflow_approval.approved.body),
},
denied: {
message: getMessageIfUpdated($scope.denied_message, defaults.workflow_approval.denied.message),
body: getMessageIfUpdated($scope.denied_body, defaults.workflow_approval.denied.body),
},
running: {
message: getMessageIfUpdated($scope.running_message, defaults.workflow_approval.running.message),
body: getMessageIfUpdated($scope.running_body, defaults.workflow_approval.running.body),
},
timed_out: {
message: getMessageIfUpdated($scope.timed_out_message, defaults.workflow_approval.timed_out.message),
body: getMessageIfUpdated($scope.timed_out_body, defaults.workflow_approval.timed_out.body),
},
}
};
},
@ -56,6 +69,15 @@ export default [function() {
$scope.success_body = defaults.success.body;
$scope.error_message = defaults.error.message;
$scope.error_body = defaults.error.body;
$scope.approved_message = defaults.workflow_approval.approved.message;
$scope.approved_body = defaults.workflow_approval.approved.body;
$scope.denied_message = defaults.workflow_approval.denied.message;
$scope.denied_body = defaults.workflow_approval.denied.body;
$scope.running_message = defaults.workflow_approval.running.message;
$scope.running_body = defaults.workflow_approval.running.body;
$scope.timed_out_message = defaults.workflow_approval.timed_out.message;
$scope.timed_out_body = defaults.workflow_approval.timed_out.body;
if (!messages) {
return;
}
@ -84,6 +106,48 @@ export default [function() {
isCustomized = true;
$scope.error_body = messages.error.body;
}
if (messages.workflow_approval) {
if (messages.workflow_approval.approved &&
messages.workflow_approval.approved.message) {
isCustomized = true;
$scope.approved_message = messages.workflow_approval.approved.message;
}
if (messages.workflow_approval.approved &&
messages.workflow_approval.approved.body) {
isCustomized = true;
$scope.approved_body = messages.workflow_approval.approved.body;
}
if (messages.workflow_approval.denied &&
messages.workflow_approval.denied.message) {
isCustomized = true;
$scope.denied_message = messages.workflow_approval.denied.message;
}
if (messages.workflow_approval.denied &&
messages.workflow_approval.denied.body) {
isCustomized = true;
$scope.denied_body = messages.workflow_approval.denied.body;
}
if (messages.workflow_approval.running &&
messages.workflow_approval.running.message) {
isCustomized = true;
$scope.running_message = messages.workflow_approval.running.message;
}
if (messages.workflow_approval.running &&
messages.workflow_approval.running.body) {
isCustomized = true;
$scope.running_body = messages.workflow_approval.running.body;
}
if (messages.workflow_approval.timed_out &&
messages.workflow_approval.timed_out.message) {
isCustomized = true;
$scope.timed_out_message = messages.workflow_approval.timed_out.message;
}
if (messages.workflow_approval.timed_out &&
messages.workflow_approval.timed_out.body) {
isCustomized = true;
$scope.timed_out_body = messages.workflow_approval.timed_out.body;
}
}
$scope.customize_messages = isCustomized;
},
@ -110,6 +174,30 @@ export default [function() {
if ($scope.error_body === oldDefaults.error.body) {
$scope.error_body = newDefaults.error.body;
}
if ($scope.approved_message === oldDefaults.workflow_approval.approved.message) {
$scope.approved_message = newDefaults.workflow_approval.approved.message;
}
if ($scope.approved_body === oldDefaults.workflow_approval.approved.body) {
$scope.approved_body = newDefaults.workflow_approval.approved.body;
}
if ($scope.denied_message === oldDefaults.workflow_approval.denied.message) {
$scope.denied_message = newDefaults.workflow_approval.denied.message;
}
if ($scope.denied_body === oldDefaults.workflow_approval.denied.body) {
$scope.denied_body = newDefaults.workflow_approval.denied.body;
}
if ($scope.running_message === oldDefaults.workflow_approval.running.message) {
$scope.running_message = newDefaults.workflow_approval.running.message;
}
if ($scope.running_body === oldDefaults.workflow_approval.running.body) {
$scope.running_body = newDefaults.workflow_approval.running.body;
}
if ($scope.timed_out_message === oldDefaults.workflow_approval.timed_out.message) {
$scope.timed_out_message = newDefaults.workflow_approval.timed_out.message;
}
if ($scope.timed_out_body === oldDefaults.workflow_approval.timed_out.body) {
$scope.timed_out_body = newDefaults.workflow_approval.timed_out.body;
}
}
};
}];

View File

@ -233,6 +233,38 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f
}, true);
};
function getSelectedTags(tagId) {
const selectedTags = [];
const choiceElements = $(tagId).siblings(".select2").first()
.find(".select2-selection__choice");
choiceElements.each((index, option) => {
selectedTags.push({
value: option.title,
name: option.title,
label: option.title
});
});
return selectedTags;
}
function consolidateTags (tags, otherTags) {
const seen = [];
const consolidated = [];
tags.forEach(tag => {
if (!seen.includes(tag.value)) {
seen.push(tag.value);
consolidated.push(tag);
}
});
otherTags.forEach(tag => {
if (!seen.includes(tag.value)) {
seen.push(tag.value);
consolidated.push(tag);
}
});
return consolidated;
}
vm.next = (currentTab) => {
if(_.has(vm, 'steps.other_prompts.tab._active') && vm.steps.other_prompts.tab._active === true){
try {
@ -243,6 +275,22 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f
event.preventDefault();
return;
}
// The current tag input state lives somewhere in the associated select2
// widgetry and isn't directly tied to the vm, so extract the tag values
// and update the vm to keep it in sync.
if (vm.promptDataClone.launchConf.ask_tags_on_launch) {
vm.promptDataClone.prompts.tags.value = consolidateTags(
angular.copy(vm.promptDataClone.prompts.tags.value),
getSelectedTags("#job_launch_job_tags")
);
}
if (vm.promptDataClone.launchConf.ask_skip_tags_on_launch) {
vm.promptDataClone.prompts.skipTags.value = consolidateTags(
angular.copy(vm.promptDataClone.prompts.skipTags.value),
getSelectedTags("#job_launch_skip_tags")
);
}
}
let nextStep;

View File

@ -12,19 +12,6 @@ export default
let scope;
let consolidateTags = (tagModel, tagId) => {
let tags = angular.copy(tagModel);
$(tagId).siblings(".select2").first().find(".select2-selection__choice").each((optionIndex, option) => {
tags.push({
value: option.title,
name: option.title,
label: option.title
});
});
return [...tags.reduce((map, tag) => map.has(tag.value) ? map : map.set(tag.value, tag), new Map()).values()];
};
vm.init = (_scope_) => {
scope = _scope_;
@ -35,14 +22,6 @@ export default
const surveyPasswords = {};
if (scope.promptData.launchConf.ask_tags_on_launch) {
scope.promptData.prompts.tags.value = consolidateTags(scope.promptData.prompts.tags.value, "#job_launch_job_tags");
}
if (scope.promptData.launchConf.ask_skip_tags_on_launch) {
scope.promptData.prompts.skipTags.value = consolidateTags(scope.promptData.prompts.skipTags.value, "#job_launch_skip_tags");
}
if (scope.promptData.launchConf.survey_enabled){
scope.promptData.extraVars = ToJSON(scope.parseType, scope.promptData.prompts.variables.value, false);
scope.promptData.surveyQuestions.forEach(surveyQuestion => {

View File

@ -8,6 +8,17 @@ inside the folder `lib/ansible/modules/web_infrastructure/ansible_tower`
as well as other folders for the inventory plugin, module utils, and
doc fragment.
## Release and Upgrade Notes
The release 7.0.0 of the `awx.awx` collection is intended to be identical
to the content prior to the migration, aside from changes necessary to
have it function as a collection.
The following notes are changes that may require changes to playbooks.
- Specifying `inputs` or `injectors` as strings in the
`tower_credential_type` module is no longer supported. Provide as dictionaries instead.
## Running
To use this collection, the "old" tower-cli needs to be installed
@ -29,12 +40,31 @@ in `awx_collection/test/awx`. These tests require that python packages
are available for all of `awx`, `ansible`, `tower_cli`, and the collection
itself.
### Inside Development Container
The target `make prepare_collection_venv` will prepare some requirements
in the `awx_collection_test_venv` folder so that `make test_collection` can
be ran to actually run the tests. A single test can be ran via:
```
make test_collection MODULE_TEST_DIRS=awx_collection/test/awx/test_organization.py
make test_collection COLLECTION_TEST_DIRS=awx_collection/test/awx/test_organization.py
```
### Manually
As a faster alternative if you do not want to use the container, or
run against Ansible or tower-cli source, it is possible to set up a
working environment yourself.
```
mkvirtualenv my_new_venv
# may need to replace psycopg2 with psycopg2-binary in requirements/requirements.txt
pip install -r requirements/requirements.txt -r requirements/requirements_dev.txt -r requirements/requirements_git.txt
make clean-api
pip install -e <path to your Ansible>
pip install -e <path to your tower-cli>
pip install -e .
PYTHONPATH=awx_collection:$PYTHONPATH py.test awx_collection/test/awx/
```
## Building

View File

@ -98,8 +98,8 @@ class TowerModule(AnsibleModule):
)
args.update(argument_spec)
mutually_exclusive = kwargs.get('mutually_exclusive', [])
kwargs['mutually_exclusive'] = mutually_exclusive.extend((
kwargs.setdefault('mutually_exclusive', [])
kwargs['mutually_exclusive'].extend((
('tower_config_file', 'tower_host'),
('tower_config_file', 'tower_username'),
('tower_config_file', 'tower_password'),

View File

@ -53,9 +53,23 @@ options:
description:
- Type of credential being added.
- The ssh choice refers to a Tower Machine credential.
required: True
required: False
type: str
choices: ["ssh", "vault", "net", "scm", "aws", "vmware", "satellite6", "cloudforms", "gce", "azure_rm", "openstack", "rhv", "insights", "tower"]
credential_type:
description:
- Name of credential type.
required: False
version_added: "2.10"
type: str
inputs:
description:
- >-
Credential inputs where the keys are var names used in templating.
Refer to the Ansible Tower documentation for example syntax.
required: False
version_added: "2.9"
type: dict
host:
description:
- Host for this credential.
@ -116,7 +130,8 @@ options:
become_method:
description:
- Become method to use for privilege escalation.
choices: ["None", "sudo", "su", "pbrun", "pfexec", "pmrun"]
- Some examples are "None", "sudo", "su", "pbrun"
- Due to become plugins, these can be arbitrary
type: str
become_username:
description:
@ -185,6 +200,15 @@ EXAMPLES = '''
tower_host: https://localhost
run_once: true
delegate_to: localhost
- name: Add Credential with Custom Credential Type
tower_credential:
name: Workshop Credential
credential_type: MyCloudCredential
organization: Default
tower_username: admin
tower_password: ansible
tower_host: https://localhost
'''
import os
@ -219,7 +243,17 @@ KIND_CHOICES = {
}
def credential_type_for_v1_kind(params, module):
OLD_INPUT_NAMES = (
'authorize', 'authorize_password', 'client',
'security_token', 'secret', 'tenant', 'subscription',
'domain', 'become_method', 'become_username',
'become_password', 'vault_password', 'project', 'host',
'username', 'password', 'ssh_key_data', 'vault_id',
'ssh_key_unlock'
)
def credential_type_for_kind(params):
credential_type_res = tower_cli.get_resource('credential_type')
kind = params.pop('kind')
arguments = {'managed_by_tower': True}
@ -244,8 +278,9 @@ def main():
name=dict(required=True),
user=dict(),
team=dict(),
kind=dict(required=True,
choices=KIND_CHOICES.keys()),
kind=dict(choices=KIND_CHOICES.keys()),
credential_type=dict(),
inputs=dict(type='dict'),
host=dict(),
username=dict(),
password=dict(no_log=True),
@ -270,7 +305,14 @@ def main():
vault_id=dict(),
)
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True)
mutually_exclusive = [
('kind', 'credential_type')
]
for input_name in OLD_INPUT_NAMES:
mutually_exclusive.append(('inputs', input_name))
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True,
mutually_exclusive=mutually_exclusive)
name = module.params.get('name')
organization = module.params.get('organization')
@ -298,10 +340,26 @@ def main():
# /api/v1/ backwards compat
# older versions of tower-cli don't *have* a credential_type
# resource
params['kind'] = module.params['kind']
params['kind'] = module.params.get('kind')
else:
credential_type = credential_type_for_v1_kind(module.params, module)
params['credential_type'] = credential_type['id']
if module.params.get('credential_type'):
credential_type_res = tower_cli.get_resource('credential_type')
try:
credential_type = credential_type_res.get(name=module.params['credential_type'])
except (exc.NotFound) as excinfo:
module.fail_json(msg=(
'Failed to update credential, credential_type not found: {0}'
).format(excinfo), changed=False)
params['credential_type'] = credential_type['id']
if module.params.get('inputs'):
params['inputs'] = module.params.get('inputs')
elif module.params.get('kind'):
credential_type = credential_type_for_kind(module.params)
params['credential_type'] = credential_type['id']
else:
module.fail_json(msg='must either specify credential_type or kind', changed=False)
if module.params.get('description'):
params['description'] = module.params.get('description')
@ -333,12 +391,7 @@ def main():
if module.params.get('vault_id', None) and module.params.get('kind') != 'vault':
module.fail_json(msg="Parameter 'vault_id' is only valid if parameter 'kind' is specified as 'vault'")
for key in ('authorize', 'authorize_password', 'client',
'security_token', 'secret', 'tenant', 'subscription',
'domain', 'become_method', 'become_username',
'become_password', 'vault_password', 'project', 'host',
'username', 'password', 'ssh_key_data', 'vault_id',
'ssh_key_unlock'):
for key in OLD_INPUT_NAMES:
if 'kind' in params:
params[key] = module.params.get(key)
elif module.params.get(key):

View File

@ -28,10 +28,12 @@ options:
description:
- The name of the credential type.
required: True
type: str
description:
description:
- The description of the credential type to give more detail about it.
required: False
type: str
kind:
description:
- >-
@ -40,24 +42,28 @@ options:
for more information.
choices: [ 'ssh', 'vault', 'net', 'scm', 'cloud', 'insights' ]
required: False
type: str
inputs:
description:
- >-
Enter inputs using either JSON or YAML syntax. Refer to the Ansible
Tower documentation for example syntax.
required: False
type: dict
injectors:
description:
- >-
Enter injectors using either JSON or YAML syntax. Refer to the
Ansible Tower documentation for example syntax.
required: False
type: dict
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
type: str
validate_certs:
description:
- Tower option to avoid certificates check.

View File

@ -27,38 +27,50 @@ options:
description:
- The name to use for the group.
required: True
type: str
description:
description:
- The description to use for the group.
type: str
inventory:
description:
- Inventory the group should be made a member of.
required: True
type: str
variables:
description:
- Variables to use for the group, use C(@) for a file.
type: str
credential:
description:
- Credential to use for the group.
type: str
source:
description:
- The source to use for this group.
choices: ["manual", "file", "ec2", "rax", "vmware", "gce", "azure", "azure_rm", "openstack", "satellite6" , "cloudforms", "custom"]
default: manual
type: str
source_regions:
description:
- Regions for cloud provider.
type: str
source_vars:
description:
- Override variables from source with variables from this field.
type: str
instance_filters:
description:
- Comma-separated list of filter expressions for matching hosts.
type: str
group_by:
description:
- Limit groups automatically created from inventory source.
type: str
source_script:
description:
- Inventory script to be used when group type is C(custom).
type: str
overwrite:
description:
- Delete child groups and hosts not found in source.
@ -67,6 +79,7 @@ options:
overwrite_vars:
description:
- Override vars in child groups and hosts with those from external source.
type: bool
update_on_launch:
description:
- Refresh inventory data from its source each time a job is run.
@ -77,6 +90,7 @@ options:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''
@ -120,7 +134,7 @@ def main():
group_by=dict(),
source_script=dict(),
overwrite=dict(type='bool', default=False),
overwrite_vars=dict(),
overwrite_vars=dict(type='bool', default=False),
update_on_launch=dict(type='bool', default=False),
state=dict(choices=['present', 'absent'], default='present'),
)

View File

@ -27,13 +27,16 @@ options:
description:
- The name to use for the host.
required: True
type: str
description:
description:
- The description to use for the host.
type: str
inventory:
description:
- Inventory the host should be made a member of.
required: True
type: str
enabled:
description:
- If the host should be enabled.
@ -42,11 +45,13 @@ options:
variables:
description:
- Variables to use for the host. Use C(@) for a file.
type: str
state:
description:
- Desired state of the resource.
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,31 +27,38 @@ options:
description:
- The name to use for the inventory.
required: True
type: str
description:
description:
- The description to use for the inventory.
type: str
organization:
description:
- Organization the inventory belongs to.
required: True
type: str
variables:
description:
- Inventory variables. Use C(@) to get from file.
type: str
kind:
description:
- The kind field. Cannot be modified after created.
default: ""
choices: ["", "smart"]
version_added: "2.7"
type: str
host_filter:
description:
- The host_filter field. Only useful when C(kind=smart).
version_added: "2.7"
type: str
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,13 +27,16 @@ options:
description:
- The name to use for the inventory source.
required: True
type: str
description:
description:
- The description to use for the inventory source.
type: str
inventory:
description:
- The inventory the source is linked to.
required: True
type: str
source:
description:
- Types of inventory source.
@ -52,9 +55,11 @@ options:
- tower
- custom
required: True
type: str
credential:
description:
- Credential to use to retrieve the inventory from.
type: str
source_vars:
description:
- >-
@ -62,15 +67,19 @@ options:
file. For example with Openstack, specifying *private: false* would
change the output of the openstack.py script. It has to be YAML or
JSON.
type: str
timeout:
description:
- Number in seconds after which the Tower API methods will time out.
type: int
source_project:
description:
- Use a *project* as a source for the *inventory*.
type: str
source_path:
description:
- Path to the file to use as a source in the selected *project*.
type: str
update_on_project_update:
description:
- >-
@ -83,23 +92,27 @@ options:
List of regions for your cloud provider. You can include multiple all
regions. Only Hosts associated with the selected regions will be
updated. Refer to Ansible Tower documentation for more detail.
type: str
instance_filters:
description:
- >-
Provide a comma-separated list of filter expressions. Hosts are
imported when all of the filters match. Refer to Ansible Tower
documentation for more detail.
type: str
group_by:
description:
- >-
Specify which groups to create automatically. Group names will be
created similar to the options selected. If blank, all groups above
are created. Refer to Ansible Tower documentation for more detail.
type: str
source_script:
description:
- >-
The source custom script to use to build the inventory. It needs to
exist.
type: str
overwrite:
description:
- >-
@ -133,16 +146,13 @@ options:
job runs and callbacks the task system will evaluate the timestamp of
the latest sync. If it is older than Cache Timeout, it is not
considered current, and a new inventory sync will be performed.
type: int
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
validate_certs:
description:
- Tower option to avoid certificates check.
type: bool
aliases: [ tower_verify_ssl ]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,6 +27,7 @@ options:
description:
- ID of the job to cancel
required: True
type: int
fail_if_not_running:
description:
- Fail loudly if the I(job_id) does not reference a running job.

View File

@ -27,33 +27,32 @@ options:
description:
- Name of the job template to use.
required: True
job_explanation:
description:
- Job explanation field.
type: str
job_type:
description:
- Job_type to use for the job, only used if prompt for job_type is set.
choices: ["run", "check", "scan"]
type: str
inventory:
description:
- Inventory to use for the job, only used if prompt for inventory is set.
type: str
credential:
description:
- Credential to use for job, only used if prompt for credential is set.
type: str
extra_vars:
description:
- Extra_vars to use for the job_template. Prepend C(@) if a file.
type: list
limit:
description:
- Limit to use for the I(job_template).
type: str
tags:
description:
- Specific tags to use for from playbook.
use_job_endpoint:
description:
- Disable launching jobs from job template.
type: bool
default: 'no'
type: list
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,9 +27,11 @@ options:
description:
- Only list jobs with this status.
choices: ['pending', 'waiting', 'running', 'error', 'failed', 'canceled', 'successful']
type: str
page:
description:
- Page number of the results to fetch.
type: int
all_pages:
description:
- Fetch all the pages and return a single result.
@ -38,6 +40,7 @@ options:
query:
description:
- Query used to further filter the list of jobs. C({"foo":"bar"}) will be passed at C(?foo=bar)
type: dict
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,50 +27,63 @@ options:
description:
- Name to use for the job template.
required: True
type: str
description:
description:
- Description to use for the job template.
type: str
job_type:
description:
- The job type to use for the job template.
required: True
choices: ["run", "check", "scan"]
type: str
inventory:
description:
- Name of the inventory to use for the job template.
type: str
project:
description:
- Name of the project to use for the job template.
required: True
type: str
playbook:
description:
- Path to the playbook to use for the job template within the project provided.
required: True
type: str
credential:
description:
- Name of the credential to use for the job template.
version_added: 2.7
type: str
vault_credential:
description:
- Name of the vault credential to use for the job template.
version_added: 2.7
type: str
forks:
description:
- The number of parallel or simultaneous processes to use while executing the playbook.
type: int
limit:
description:
- A host pattern to further constrain the list of hosts managed or affected by the playbook
type: str
verbosity:
description:
- Control the output level Ansible produces as the playbook runs. 0 - Normal, 1 - Verbose, 2 - More Verbose, 3 - Debug, 4 - Connection Debug.
choices: [0, 1, 2, 3, 4]
default: 0
type: int
extra_vars_path:
description:
- Path to the C(extra_vars) YAML file.
type: path
job_tags:
description:
- Comma separated list of the tags to use for the job template.
type: str
force_handlers_enabled:
description:
- Enable forcing playbook handlers to run even if a task fails.
@ -80,10 +93,12 @@ options:
skip_tags:
description:
- Comma separated list of the tags to skip for the job template.
type: str
start_at_task:
description:
- Start the playbook at the task matching this name.
version_added: 2.7
type: str
diff_mode_enabled:
description:
- Enable diff mode for the job template.
@ -99,6 +114,7 @@ options:
host_config_key:
description:
- Allow provisioning callbacks using this host config key.
type: str
ask_diff_mode:
description:
- Prompt user to enable diff mode (show changes) to files when supported by modules.
@ -171,11 +187,16 @@ options:
version_added: 2.7
type: bool
default: 'no'
timeout:
description:
- Maximum time in seconds to wait for a job to finish (server-side).
type: int
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
notes:
- JSON for survey_spec can be found in Tower API Documentation. See

View File

@ -27,17 +27,21 @@ options:
description:
- ID of the job to monitor.
required: True
type: int
min_interval:
description:
- Minimum interval in seconds, to request an update from Tower.
default: 1
type: float
max_interval:
description:
- Maximum interval in seconds, to request an update from Tower.
default: 30
type: float
timeout:
description:
- Maximum time in seconds to wait for a job to finish.
type: int
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,15 +27,18 @@ options:
description:
- Name to use for the label.
required: True
type: str
organization:
description:
- Organization the label should be applied to.
required: True
type: str
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,35 +27,43 @@ options:
description:
- The name of the notification.
required: True
type: str
description:
description:
- The description of the notification.
required: False
type: str
organization:
description:
- The organization the notification belongs to.
required: False
type: str
notification_type:
description:
- The type of notification to be sent.
required: True
choices: ["email", "slack", "twilio", "pagerduty", "hipchat", "webhook", "irc"]
type: str
notification_configuration:
description:
- The notification configuration file. Note providing this field would disable all notification-configuration-related fields.
required: False
type: str
username:
description:
- The mail server username. Required if I(notification_type=email).
required: False
type: str
sender:
description:
- The sender email address. Required if I(notification_type=email).
required: False
type: str
recipients:
description:
- The recipients email addresses. Required if I(notification_type=email).
required: False
type: list
use_tls:
description:
- The TLS trigger. Required if I(notification_type=email).
@ -65,6 +73,7 @@ options:
description:
- The mail server host. Required if I(notification_type=email).
required: False
type: str
use_ssl:
description:
- The SSL trigger. Required if I(notification_type=email) or if I(notification_type=irc).
@ -74,10 +83,12 @@ options:
description:
- The mail server password. Required if I(notification_type=email) or if I(notification_type=irc).
required: False
type: str
port:
description:
- The mail server port. Required if I(notification_type=email) or if I(notification_type=irc).
required: False
type: int
channels:
description:
- The destination Slack channels. Required if I(notification_type=slack).
@ -87,47 +98,58 @@ options:
description:
- The access token. Required if I(notification_type=slack), if I(notification_type=pagerduty) or if I(notification_type=hipchat).
required: False
type: str
account_token:
description:
- The Twillio account token. Required if I(notification_type=twillio).
required: False
type: str
from_number:
description:
- The source phone number. Required if I(notification_type=twillio).
required: False
type: str
to_numbers:
description:
- The destination phone numbers. Required if I(notification_type=twillio).
required: False
type: list
account_sid:
description:
- The Twillio account SID. Required if I(notification_type=twillio).
required: False
type: str
subdomain:
description:
- The PagerDuty subdomain. Required if I(notification_type=pagerduty).
required: False
type: str
service_key:
description:
- The PagerDuty service/integration API key. Required if I(notification_type=pagerduty).
required: False
type: str
client_name:
description:
- The PagerDuty client identifier. Required if I(notification_type=pagerduty).
required: False
type: str
message_from:
description:
- The label to be shown with the notification. Required if I(notification_type=hipchat).
required: False
type: str
api_url:
description:
- The HipChat API URL. Required if I(notification_type=hipchat).
required: False
type: str
color:
description:
- The notification color. Required if I(notification_type=hipchat).
required: False
choices: ["yellow", "green", "red", "purple", "gray", "random"]
type: str
rooms:
description:
- HipChat rooms to send the notification to. Required if I(notification_type=hipchat).
@ -142,18 +164,22 @@ options:
description:
- The target URL. Required if I(notification_type=webhook).
required: False
type: str
headers:
description:
- The HTTP headers as JSON string. Required if I(notification_type=webhook).
required: False
type: dict
server:
description:
- The IRC server address. Required if I(notification_type=irc).
required: False
type: str
nickname:
description:
- The IRC nickname. Required if I(notification_type=irc).
required: False
type: str
targets:
description:
- The destination channels or users. Required if I(notification_type=irc).
@ -164,6 +190,7 @@ options:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,14 +27,17 @@ options:
description:
- Name to use for the organization.
required: True
type: str
description:
description:
- The description to use for the organization.
type: str
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,26 +27,33 @@ options:
description:
- Name to use for the project.
required: True
type: str
description:
description:
- Description to use for the project.
type: str
scm_type:
description:
- Type of SCM resource.
choices: ["manual", "git", "hg", "svn"]
default: "manual"
type: str
scm_url:
description:
- URL of SCM resource.
type: str
local_path:
description:
- The server playbook directory for manual projects.
type: str
scm_branch:
description:
- The branch to use for the SCM resource.
type: str
scm_credential:
description:
- Name of the credential to use with this SCM resource.
type: str
scm_clean:
description:
- Remove local modifications before updating.
@ -68,23 +75,28 @@ options:
- Cache Timeout to cache prior project syncs for a certain number of seconds.
Only valid if scm_update_on_launch is to True, otherwise ignored.
default: 0
type: int
job_timeout:
version_added: "2.8"
description:
- The amount of time (in seconds) to run before the SCM Update is canceled. A value of 0 means no timeout.
default: 0
type: int
custom_virtualenv:
version_added: "2.8"
description:
- Local absolute file path containing a custom Python virtualenv to use
type: str
organization:
description:
- Primary key of organization for project.
type: str
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -32,46 +32,57 @@ options:
description:
- List of organization names to export
default: []
type: list
user:
description:
- List of user names to export
default: []
type: list
team:
description:
- List of team names to export
default: []
type: list
credential_type:
description:
- List of credential type names to export
default: []
type: list
credential:
description:
- List of credential names to export
default: []
type: list
notification_template:
description:
- List of notification template names to export
default: []
type: list
inventory_script:
description:
- List of inventory script names to export
default: []
type: list
inventory:
description:
- List of inventory names to export
default: []
type: list
project:
description:
- List of project names to export
default: []
type: list
job_template:
description:
- List of job template names to export
default: []
type: list
workflow:
description:
- List of workflow names to export
default: []
type: list
requirements:
- "ansible-tower-cli >= 3.3.0"

View File

@ -26,38 +26,48 @@ options:
user:
description:
- User that receives the permissions specified by the role.
type: str
team:
description:
- Team that receives the permissions specified by the role.
type: str
role:
description:
- The role type to grant/revoke.
required: True
choices: ["admin", "read", "member", "execute", "adhoc", "update", "use", "auditor", "project_admin", "inventory_admin", "credential_admin",
"workflow_admin", "notification_admin", "job_template_admin"]
type: str
target_team:
description:
- Team that the role acts on.
type: str
inventory:
description:
- Inventory the role acts on.
type: str
job_template:
description:
- The job template the role acts on.
type: str
credential:
description:
- Credential the role acts on.
type: str
organization:
description:
- Organization the role acts on.
type: str
project:
description:
- Project the role acts on.
type: str
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -28,16 +28,19 @@ options:
- The assets to import.
- This can be the output of tower_receive or loaded from a file
required: False
type: str
files:
description:
- List of files to import.
required: False
default: []
type: list
prevent:
description:
- A list of asset types to prevent import for
required: false
default: []
type: list
password_management:
description:
- The password management option to use.
@ -45,6 +48,7 @@ options:
required: false
default: 'default'
choices: ["default", "random"]
type: str
notes:
- One of assets or files needs to be passed in

View File

@ -27,10 +27,12 @@ options:
description:
- Name of setting to modify
required: True
type: str
value:
description:
- Value to be modified for given setting.
required: True
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,15 +27,22 @@ options:
description:
- Name to use for the team.
required: True
type: str
description:
description:
- The description to use for the team.
type: str
organization:
description:
- Organization the team should be made a member of.
required: True
type: str
state:
description:
- Desired state of the resource.
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -27,19 +27,24 @@ options:
description:
- The username of the user.
required: True
type: str
first_name:
description:
- First name of the user.
type: str
last_name:
description:
- Last name of the user.
type: str
email:
description:
- Email address of the user.
required: True
type: str
password:
description:
- Password of the user.
type: str
superuser:
description:
- User is a system wide administrator.
@ -55,6 +60,7 @@ options:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
requirements:
- ansible-tower-cli >= 3.2.0

View File

@ -24,10 +24,12 @@ options:
description:
- The name of the workflow template to run.
required: True
type: str
extra_vars:
description:
- Any extra vars required to launch the job.
required: False
type: str
wait:
description:
- Wait for the workflow to complete.
@ -38,6 +40,7 @@ options:
description:
- If waiting for the workflow to complete this will abort after this
amount of seconds
type: int
requirements:
- "python >= 2.6"

View File

@ -40,26 +40,32 @@ options:
description:
description:
- The description to use for the workflow.
type: str
extra_vars:
description:
- Extra variables used by Ansible in YAML or key=value format.
type: str
inventory:
description:
- Name of the inventory to use for the job template.
version_added: "2.9"
type: str
name:
description:
- The name to use for the workflow.
required: True
type: str
organization:
description:
- The organization the workflow is linked to.
type: str
schema:
description:
- >
The schema is a JSON- or YAML-formatted string defining the
hierarchy structure that connects the nodes. Refer to Tower
documentation for more information.
type: str
survey_enabled:
description:
- Setting that variable will prompt the user for job type on the
@ -68,11 +74,13 @@ options:
survey:
description:
- The definition of the survey associated to the workflow.
type: str
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''

View File

@ -62,6 +62,9 @@ def run_module():
# We should consider supporting that in the future
resource_module = importlib.import_module('plugins.modules.{}'.format(module_name))
if not isinstance(module_params, dict):
raise RuntimeError('Module params must be dict, got {}'.format(type(module_params)))
# Ansible params can be passed as an invocation argument or over stdin
# this short circuits within the AnsibleModule interface
def mock_load_params(self):

View File

@ -0,0 +1,151 @@
import pytest
from awx.main.models import Credential, CredentialType, Organization
@pytest.mark.django_db
def test_create_machine_credential(run_module, admin_user):
Organization.objects.create(name='test-org')
# create the ssh credential type
CredentialType.defaults['ssh']().save()
# Example from docs
result = run_module('tower_credential', dict(
name='Team Name',
description='Team Description',
organization='test-org',
kind='ssh',
state='present'
), admin_user)
cred = Credential.objects.get(name='Team Name')
result.pop('invocation')
assert result == {
"credential": "Team Name",
"state": "present",
"id": cred.pk,
"changed": True
}
@pytest.mark.django_db
def test_create_custom_credential_type(run_module, admin_user):
# Example from docs
result = run_module('tower_credential_type', dict(
name='Nexus',
description='Credentials type for Nexus',
kind='cloud',
inputs={"fields": [{"id": "server", "type": "string", "default": "", "label": ""}], "required": []},
injectors={'extra_vars': {'nexus_credential': 'test'}},
state='present',
validate_certs='false'
), admin_user)
ct = CredentialType.objects.get(name='Nexus')
result.pop('invocation')
assert result == {
"credential_type": "Nexus",
"state": "present",
"id": ct.pk,
"changed": True
}
assert ct.inputs == {"fields": [{"id": "server", "type": "string", "default": "", "label": ""}], "required": []}
assert ct.injectors == {'extra_vars': {'nexus_credential': 'test'}}
@pytest.mark.django_db
def test_kind_ct_exclusivity(run_module, admin_user):
result = run_module('tower_credential', dict(
name='A credential',
organization='test-org',
kind='ssh',
credential_type='foobar', # cannot specify if kind is also specified
state='present'
), admin_user)
result.pop('invocation')
assert result == {
'failed': True,
'msg': 'parameters are mutually exclusive: kind|credential_type'
}
@pytest.mark.django_db
def test_input_exclusivity(run_module, admin_user):
result = run_module('tower_credential', dict(
name='A credential',
organization='test-org',
kind='ssh',
inputs={'token': '7rEZK38DJl58A7RxA6EC7lLvUHbBQ1'},
security_token='7rEZK38DJl58A7RxA6EC7lLvUHbBQ1',
state='present'
), admin_user)
result.pop('invocation')
assert result == {
'failed': True,
'msg': 'parameters are mutually exclusive: inputs|security_token'
}
@pytest.mark.django_db
def test_missing_credential_type(run_module, admin_user):
Organization.objects.create(name='test-org')
result = run_module('tower_credential', dict(
name='A credential',
organization='test-org',
credential_type='foobar',
state='present'
), admin_user)
result.pop('invocation')
assert result == {
"changed": False,
"failed": True,
'msg': 'Failed to update credential, credential_type not found: The requested object could not be found.'
}
@pytest.mark.django_db
def test_make_use_of_custom_credential_type(run_module, admin_user):
Organization.objects.create(name='test-org')
# Make a credential type which will be used by the credential
ct = CredentialType.objects.create(
name='Ansible Galaxy Token',
inputs={
"fields": [
{
"id": "token",
"type": "string",
"secret": True,
"label": "Ansible Galaxy Secret Token Value"
}
],
"required": ["token"]
},
injectors={
"extra_vars": {
"galaxy_token": "{{token}}",
}
}
)
result = run_module('tower_credential', dict(
name='Galaxy Token for Steve',
organization='test-org',
credential_type='Ansible Galaxy Token',
inputs={'token': '7rEZK38DJl58A7RxA6EC7lLvUHbBQ1'},
state='present'
), admin_user)
cred = Credential.objects.get(name='Galaxy Token for Steve')
assert cred.credential_type_id == ct.id
assert list(cred.inputs.keys()) == ['token']
assert cred.inputs['token'].startswith('$encrypted$')
assert len(cred.inputs['token']) >= len('$encrypted$') + len('7rEZK38DJl58A7RxA6EC7lLvUHbBQ1')
result.pop('invocation')
assert result == {
"credential": "Galaxy Token for Steve",
"state": "present",
"id": cred.pk,
"changed": True
}

View File

@ -32,7 +32,6 @@ def test_create_job_template(run_module, admin_user, project, inventory):
@pytest.mark.django_db
@pytest.mark.xfail(reason='Known limitation and needs to be fixed.')
def test_create_job_template_with_old_machine_cred(run_module, admin_user, project, inventory, machine_credential):
module_args = {

View File

@ -0,0 +1,72 @@
import pytest
import json
from awx.main.models import (
Organization,
Project,
Inventory,
Host,
CredentialType,
Credential,
JobTemplate
)
@pytest.mark.django_db
def test_receive_send_jt(run_module, admin_user, mocker):
org = Organization.objects.create(name='SRtest')
proj = Project.objects.create(
name='SRtest',
playbook_files=['debug.yml'],
scm_type='git',
scm_url='https://github.com/ansible/test-playbooks.git',
organization=org,
allow_override=True # so we do not require playbooks populated
)
inv = Inventory.objects.create(name='SRtest', organization=org)
Host.objects.create(name='SRtest', inventory=inv)
ct = CredentialType.defaults['ssh']()
ct.save()
cred = Credential.objects.create(
name='SRtest',
credential_type=ct,
organization=org
)
jt = JobTemplate.objects.create(
name='SRtest',
project=proj,
inventory=inv,
playbook='helloworld.yml'
)
jt.credentials.add(cred)
jt.admin_role.members.add(admin_user) # work around send/receive bug
# receive everything
result = run_module('tower_receive', dict(all=True), admin_user)
assert 'assets' in result, result
assets = result['assets']
assert not result.get('changed', True)
assert set(a['asset_type'] for a in assets) == set((
'organization', 'inventory', 'job_template', 'credential', 'project',
'user'
))
# delete everything
for obj in (jt, inv, proj, cred, org):
obj.delete()
def fake_wait(self, pk, parent_pk=None, **kwargs):
return {"changed": True}
# recreate everything
with mocker.patch('sys.stdin.isatty', return_value=True):
with mocker.patch('tower_cli.models.base.MonitorableResource.wait'):
result = run_module('tower_send', dict(assets=json.dumps(assets)), admin_user)
assert not result.get('failed'), result
new = JobTemplate.objects.get(name='SRtest')
assert new.project.name == 'SRtest'
assert new.inventory.name == 'SRtest'
assert [cred.name for cred in new.credentials.all()] == ['SRtest']

View File

@ -0,0 +1,44 @@
plugins/modules/tower_credential_type.py validate-modules:missing-module-utils-import
plugins/modules/tower_group.py validate-modules:missing-module-utils-import
plugins/modules/tower_host.py validate-modules:missing-module-utils-import
plugins/modules/tower_inventory.py validate-modules:missing-module-utils-import
plugins/modules/tower_inventory_source.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_cancel.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_launch.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_list.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_template.py validate-modules:missing-module-utils-import
plugins/modules/tower_label.py validate-modules:missing-module-utils-import
plugins/modules/tower_notification.py validate-modules:missing-module-utils-import
plugins/modules/tower_organization.py validate-modules:missing-module-utils-import
plugins/modules/tower_project.py validate-modules:missing-module-utils-import
plugins/modules/tower_receive.py validate-modules:missing-module-utils-import
plugins/modules/tower_role.py validate-modules:missing-module-utils-import
plugins/modules/tower_settings.py validate-modules:missing-module-utils-import
plugins/modules/tower_team.py validate-modules:missing-module-utils-import
plugins/modules/tower_user.py validate-modules:missing-module-utils-import
plugins/modules/tower_workflow_launch.py validate-modules:missing-module-utils-import
plugins/modules/tower_workflow_template.py validate-modules:missing-module-utils-import
plugins/modules/tower_credential_type.py validate-modules:import-error
plugins/modules/tower_credential.py validate-modules:import-error
plugins/modules/tower_group.py validate-modules:import-error
plugins/modules/tower_host.py validate-modules:import-error
plugins/modules/tower_inventory.py validate-modules:import-error
plugins/modules/tower_inventory_source.py validate-modules:import-error
plugins/modules/tower_job_cancel.py validate-modules:import-error
plugins/modules/tower_job_launch.py validate-modules:import-error
plugins/modules/tower_job_list.py validate-modules:import-error
plugins/modules/tower_job_wait.py validate-modules:import-error
plugins/modules/tower_job_template.py validate-modules:import-error
plugins/modules/tower_label.py validate-modules:import-error
plugins/modules/tower_notification.py validate-modules:import-error
plugins/modules/tower_organization.py validate-modules:import-error
plugins/modules/tower_project.py validate-modules:import-error
plugins/modules/tower_receive.py validate-modules:import-error
plugins/modules/tower_role.py validate-modules:import-error
plugins/modules/tower_settings.py validate-modules:import-error
plugins/modules/tower_send.py validate-modules:import-error
plugins/modules/tower_team.py validate-modules:import-error
plugins/modules/tower_user.py validate-modules:import-error
plugins/modules/tower_workflow_launch.py validate-modules:import-error
plugins/modules/tower_workflow_template.py validate-modules:import-error
plugins/modules/tower_workflow_job_template.py validate-modules:import-error

View File

@ -0,0 +1,44 @@
plugins/modules/tower_credential_type.py validate-modules:missing-module-utils-import
plugins/modules/tower_group.py validate-modules:missing-module-utils-import
plugins/modules/tower_host.py validate-modules:missing-module-utils-import
plugins/modules/tower_inventory.py validate-modules:missing-module-utils-import
plugins/modules/tower_inventory_source.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_cancel.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_launch.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_list.py validate-modules:missing-module-utils-import
plugins/modules/tower_job_template.py validate-modules:missing-module-utils-import
plugins/modules/tower_label.py validate-modules:missing-module-utils-import
plugins/modules/tower_notification.py validate-modules:missing-module-utils-import
plugins/modules/tower_organization.py validate-modules:missing-module-utils-import
plugins/modules/tower_project.py validate-modules:missing-module-utils-import
plugins/modules/tower_receive.py validate-modules:missing-module-utils-import
plugins/modules/tower_role.py validate-modules:missing-module-utils-import
plugins/modules/tower_settings.py validate-modules:missing-module-utils-import
plugins/modules/tower_team.py validate-modules:missing-module-utils-import
plugins/modules/tower_user.py validate-modules:missing-module-utils-import
plugins/modules/tower_workflow_launch.py validate-modules:missing-module-utils-import
plugins/modules/tower_workflow_template.py validate-modules:missing-module-utils-import
plugins/modules/tower_credential_type.py validate-modules:import-error
plugins/modules/tower_credential.py validate-modules:import-error
plugins/modules/tower_group.py validate-modules:import-error
plugins/modules/tower_host.py validate-modules:import-error
plugins/modules/tower_inventory.py validate-modules:import-error
plugins/modules/tower_inventory_source.py validate-modules:import-error
plugins/modules/tower_job_cancel.py validate-modules:import-error
plugins/modules/tower_job_launch.py validate-modules:import-error
plugins/modules/tower_job_list.py validate-modules:import-error
plugins/modules/tower_job_wait.py validate-modules:import-error
plugins/modules/tower_job_template.py validate-modules:import-error
plugins/modules/tower_label.py validate-modules:import-error
plugins/modules/tower_notification.py validate-modules:import-error
plugins/modules/tower_organization.py validate-modules:import-error
plugins/modules/tower_project.py validate-modules:import-error
plugins/modules/tower_receive.py validate-modules:import-error
plugins/modules/tower_role.py validate-modules:import-error
plugins/modules/tower_settings.py validate-modules:import-error
plugins/modules/tower_send.py validate-modules:import-error
plugins/modules/tower_team.py validate-modules:import-error
plugins/modules/tower_user.py validate-modules:import-error
plugins/modules/tower_workflow_launch.py validate-modules:import-error
plugins/modules/tower_workflow_template.py validate-modules:import-error
plugins/modules/tower_workflow_job_template.py validate-modules:import-error

View File

@ -4,19 +4,26 @@ import awxkit.exceptions as exc
notification_endpoints = ("notification_templates", "notification_templates_started", "notification_templates_error",
"notification_templates_success")
wfjt_notification_endpoints = notification_endpoints + ('notification_templates_approvals',)
class HasNotifications(object):
def add_notification_template(self, notification_template, endpoint="notification_templates_success"):
if endpoint not in notification_endpoints:
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) \
else notification_endpoints
if endpoint not in supported_endpoints:
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
.format(endpoint, notification_endpoints))
with suppress(exc.NoContent):
self.related[endpoint].post(dict(id=notification_template.id))
def remove_notification_template(self, notification_template, endpoint="notification_templates_success"):
if endpoint not in notification_endpoints:
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) \
else notification_endpoints
if endpoint not in supported_endpoints:
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
.format(endpoint, notification_endpoints))
with suppress(exc.NoContent):