mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 12:04:44 -03:30
Compare commits
82 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
37ee95314a | ||
|
|
28c3fa517e | ||
|
|
3dd21d720e | ||
|
|
9cfecb5590 | ||
|
|
2742612be9 | ||
|
|
4f4a4e2394 | ||
|
|
edd9972435 | ||
|
|
9fdec9b31b | ||
|
|
a93ee86581 | ||
|
|
020246736c | ||
|
|
8d3ce206cd | ||
|
|
28e27c5196 | ||
|
|
c56352daa4 | ||
|
|
5eea4e8881 | ||
|
|
58c821f3e1 | ||
|
|
5cad0d243a | ||
|
|
0aaa2d8c8d | ||
|
|
921feb561d | ||
|
|
5b0bb4939f | ||
|
|
144cffe009 | ||
|
|
af11055e5c | ||
|
|
c0cb546c3c | ||
|
|
a800c8cd00 | ||
|
|
f8a23f20aa | ||
|
|
46edd151e0 | ||
|
|
ba4b6bdbb7 | ||
|
|
1e24d8b5fa | ||
|
|
41586ea3a6 | ||
|
|
ded5577832 | ||
|
|
cce5f26e34 | ||
|
|
1940c834cb | ||
|
|
08381577f5 | ||
|
|
669d67b8fb | ||
|
|
8a0be5b111 | ||
|
|
9e30f004d3 | ||
|
|
62bf61b2a2 | ||
|
|
f62dfe85cc | ||
|
|
97acba8fe9 | ||
|
|
cec7cb393d | ||
|
|
e9b254b9d2 | ||
|
|
222fecc5f6 | ||
|
|
c833676863 | ||
|
|
7e9835f6ee | ||
|
|
5940f6de2c | ||
|
|
a899a147e1 | ||
|
|
e0c8f3e541 | ||
|
|
68a0bbe125 | ||
|
|
8592bf3e39 | ||
|
|
4787e69afb | ||
|
|
8f5afc83ce | ||
|
|
b1a90d445b | ||
|
|
8954e6e556 | ||
|
|
7bfc99a615 | ||
|
|
f159a6508e | ||
|
|
4d7b5adf12 | ||
|
|
6e648cf72f | ||
|
|
24a50ea076 | ||
|
|
2d2add009b | ||
|
|
fd068695ef | ||
|
|
b19360ac9b | ||
|
|
7c3c1f5a29 | ||
|
|
a902afcf73 | ||
|
|
501568340b | ||
|
|
1d32917ceb | ||
|
|
2d455800c4 | ||
|
|
37491fa4b9 | ||
|
|
f41852c3ee | ||
|
|
b565ed2077 | ||
|
|
86bafb52f6 | ||
|
|
11b1d0e84c | ||
|
|
f47325a532 | ||
|
|
1a261782c7 | ||
|
|
5a1599b440 | ||
|
|
72248db76d | ||
|
|
21268b779f | ||
|
|
8926f635df | ||
|
|
e19194b883 | ||
|
|
d30ecb6fb3 | ||
|
|
29a582f869 | ||
|
|
2524e8af47 | ||
|
|
f957ef7249 | ||
|
|
4551859248 |
@@ -2,6 +2,15 @@
|
||||
|
||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
||||
|
||||
## 11.1.0 (Apr 22, 2020)
|
||||
- Changed rsyslogd to persist queued events to disk (to prevent a risk of out-of-memory errors) (https://github.com/ansible/awx/issues/6746)
|
||||
- Added the ability to configure the destination and maximum disk size of rsyslogd spool (in the event of a log aggregator outage) (https://github.com/ansible/awx/pull/6763)
|
||||
- Added the ability to discover playbooks in project clones from symlinked directories (https://github.com/ansible/awx/pull/6773)
|
||||
- Fixed a bug that caused certain log aggregator settings to break logging integration (https://github.com/ansible/awx/issues/6760)
|
||||
- Fixed a bug that caused playbook execution in container groups to sometimes unexpectedly deadlock (https://github.com/ansible/awx/issues/6692)
|
||||
- Improved stability of the new redis clustering implementation (https://github.com/ansible/awx/pull/6739 https://github.com/ansible/awx/pull/6720)
|
||||
- Improved stability of the new rsyslogd-based logging implementation (https://github.com/ansible/awx/pull/6796)
|
||||
|
||||
## 11.0.0 (Apr 16, 2020)
|
||||
- As of AWX 11.0.0, Kubernetes-based deployments use a Deployment rather than a StatefulSet.
|
||||
- Reimplemented external logging support using rsyslogd to improve reliability and address a number of issues (https://github.com/ansible/awx/issues/5155)
|
||||
|
||||
@@ -82,7 +82,7 @@ The system that runs the AWX service will need to satisfy the following requirem
|
||||
- At least 2 cpu cores
|
||||
- At least 20GB of space
|
||||
- Running Docker, Openshift, or Kubernetes
|
||||
- If you choose to use an external PostgreSQL database, please note that the minimum version is 9.6+.
|
||||
- If you choose to use an external PostgreSQL database, please note that the minimum version is 10+.
|
||||
|
||||
### AWX Tunables
|
||||
|
||||
|
||||
3
Makefile
3
Makefile
@@ -644,7 +644,6 @@ detect-schema-change: genschema
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm -w /awx_devel --service-ports awx make clean
|
||||
cd tools && TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose rm -sf
|
||||
|
||||
docker-compose-build: awx-devel-build
|
||||
@@ -666,7 +665,7 @@ docker-clean:
|
||||
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
docker images | grep "awx_devel" | awk '{print $$1 ":" $$2}' | xargs docker rmi
|
||||
|
||||
docker-clean-volumes:
|
||||
docker-clean-volumes: docker-compose-clean
|
||||
docker volume rm tools_awx_db
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
@@ -45,7 +45,10 @@ from awx.main.utils import (
|
||||
get_search_fields,
|
||||
getattrd,
|
||||
get_object_or_400,
|
||||
decrypt_field
|
||||
decrypt_field,
|
||||
get_awx_version,
|
||||
get_licenser,
|
||||
StubLicense
|
||||
)
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
@@ -197,6 +200,8 @@ class APIView(views.APIView):
|
||||
logger.warning(status_msg)
|
||||
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
||||
time_started = getattr(self, 'time_started', None)
|
||||
response['X-API-Product-Version'] = get_awx_version()
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), StubLicense) else 'Red Hat Ansible Tower'
|
||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||
if time_started:
|
||||
time_elapsed = time.time() - self.time_started
|
||||
|
||||
@@ -3668,7 +3668,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
attrs.get('survey_passwords', {}).pop(key, None)
|
||||
else:
|
||||
errors.setdefault('extra_vars', []).append(
|
||||
_('"$encrypted$ is a reserved keyword, may not be used for {var_name}."'.format(key))
|
||||
_('"$encrypted$ is a reserved keyword, may not be used for {}."'.format(key))
|
||||
)
|
||||
|
||||
# Launch configs call extra_vars extra_data for historical reasons
|
||||
|
||||
@@ -172,9 +172,9 @@ class URLField(CharField):
|
||||
netloc = '{}:{}'.format(netloc, url_parts.port)
|
||||
if url_parts.username:
|
||||
if url_parts.password:
|
||||
netloc = '{}:{}@{}' % (url_parts.username, url_parts.password, netloc)
|
||||
netloc = '{}:{}@{}'.format(url_parts.username, url_parts.password, netloc)
|
||||
else:
|
||||
netloc = '{}@{}' % (url_parts.username, netloc)
|
||||
netloc = '{}@{}'.format(url_parts.username, netloc)
|
||||
value = urlparse.urlunsplit([url_parts.scheme, netloc, url_parts.path, url_parts.query, url_parts.fragment])
|
||||
except Exception:
|
||||
raise # If something fails here, just fall through and let the validators check it.
|
||||
|
||||
@@ -410,7 +410,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
field = self.registry.get_setting_field(name)
|
||||
if field.read_only:
|
||||
logger.warning('Attempt to set read only setting "%s".', name)
|
||||
raise ImproperlyConfigured('Setting "%s" is read only.'.format(name))
|
||||
raise ImproperlyConfigured('Setting "{}" is read only.'.format(name))
|
||||
|
||||
try:
|
||||
data = field.to_representation(value)
|
||||
@@ -441,7 +441,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
field = self.registry.get_setting_field(name)
|
||||
if field.read_only:
|
||||
logger.warning('Attempt to delete read only setting "%s".', name)
|
||||
raise ImproperlyConfigured('Setting "%s" is read only.'.format(name))
|
||||
raise ImproperlyConfigured('Setting "{}" is read only.'.format(name))
|
||||
for setting in Setting.objects.filter(key=name, user__isnull=True):
|
||||
setting.delete()
|
||||
# pre_delete handler will delete from cache.
|
||||
|
||||
@@ -230,7 +230,9 @@ def query_info(since, collection_type):
|
||||
@table_version('events_table.csv', '1.1')
|
||||
@table_version('unified_jobs_table.csv', '1.0')
|
||||
@table_version('unified_job_template_table.csv', '1.0')
|
||||
def copy_tables(since, full_path):
|
||||
@table_version('workflow_job_node_table.csv', '1.0')
|
||||
@table_version('workflow_job_template_node_table.csv', '1.0')
|
||||
def copy_tables(since, full_path, subset=None):
|
||||
def _copy_table(table, query, path):
|
||||
file_path = os.path.join(path, table + '_table.csv')
|
||||
file = open(file_path, 'w', encoding='utf-8')
|
||||
@@ -262,7 +264,8 @@ def copy_tables(since, full_path):
|
||||
FROM main_jobevent
|
||||
WHERE main_jobevent.created > {}
|
||||
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='events', query=events_query, path=full_path)
|
||||
if not subset or 'events' in subset:
|
||||
_copy_table(table='events', query=events_query, path=full_path)
|
||||
|
||||
unified_job_query = '''COPY (SELECT main_unifiedjob.id,
|
||||
main_unifiedjob.polymorphic_ctype_id,
|
||||
@@ -290,7 +293,8 @@ def copy_tables(since, full_path):
|
||||
WHERE (main_unifiedjob.created > {0} OR main_unifiedjob.finished > {0})
|
||||
AND main_unifiedjob.launch_type != 'sync'
|
||||
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
if not subset or 'unified_jobs' in subset:
|
||||
_copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
|
||||
unified_job_template_query = '''COPY (SELECT main_unifiedjobtemplate.id,
|
||||
main_unifiedjobtemplate.polymorphic_ctype_id,
|
||||
@@ -309,6 +313,71 @@ def copy_tables(since, full_path):
|
||||
main_unifiedjobtemplate.status
|
||||
FROM main_unifiedjobtemplate, django_content_type
|
||||
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
if not subset or 'unified_job_template' in subset:
|
||||
_copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
|
||||
workflow_job_node_query = '''COPY (SELECT main_workflowjobnode.id,
|
||||
main_workflowjobnode.created,
|
||||
main_workflowjobnode.modified,
|
||||
main_workflowjobnode.job_id,
|
||||
main_workflowjobnode.unified_job_template_id,
|
||||
main_workflowjobnode.workflow_job_id,
|
||||
main_workflowjobnode.inventory_id,
|
||||
success_nodes.nodes AS success_nodes,
|
||||
failure_nodes.nodes AS failure_nodes,
|
||||
always_nodes.nodes AS always_nodes,
|
||||
main_workflowjobnode.do_not_run,
|
||||
main_workflowjobnode.all_parents_must_converge
|
||||
FROM main_workflowjobnode
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobnode_id, ARRAY_AGG(to_workflowjobnode_id) AS nodes
|
||||
FROM main_workflowjobnode_success_nodes
|
||||
GROUP BY from_workflowjobnode_id
|
||||
) success_nodes ON main_workflowjobnode.id = success_nodes.from_workflowjobnode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobnode_id, ARRAY_AGG(to_workflowjobnode_id) AS nodes
|
||||
FROM main_workflowjobnode_failure_nodes
|
||||
GROUP BY from_workflowjobnode_id
|
||||
) failure_nodes ON main_workflowjobnode.id = failure_nodes.from_workflowjobnode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobnode_id, ARRAY_AGG(to_workflowjobnode_id) AS nodes
|
||||
FROM main_workflowjobnode_always_nodes
|
||||
GROUP BY from_workflowjobnode_id
|
||||
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
|
||||
WHERE main_workflowjobnode.modified > {}
|
||||
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
if not subset or 'workflow_job_node' in subset:
|
||||
_copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)
|
||||
|
||||
workflow_job_template_node_query = '''COPY (SELECT main_workflowjobtemplatenode.id,
|
||||
main_workflowjobtemplatenode.created,
|
||||
main_workflowjobtemplatenode.modified,
|
||||
main_workflowjobtemplatenode.unified_job_template_id,
|
||||
main_workflowjobtemplatenode.workflow_job_template_id,
|
||||
main_workflowjobtemplatenode.inventory_id,
|
||||
success_nodes.nodes AS success_nodes,
|
||||
failure_nodes.nodes AS failure_nodes,
|
||||
always_nodes.nodes AS always_nodes,
|
||||
main_workflowjobtemplatenode.all_parents_must_converge
|
||||
FROM main_workflowjobtemplatenode
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobtemplatenode_id, ARRAY_AGG(to_workflowjobtemplatenode_id) AS nodes
|
||||
FROM main_workflowjobtemplatenode_success_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) success_nodes ON main_workflowjobtemplatenode.id = success_nodes.from_workflowjobtemplatenode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobtemplatenode_id, ARRAY_AGG(to_workflowjobtemplatenode_id) AS nodes
|
||||
FROM main_workflowjobtemplatenode_failure_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) failure_nodes ON main_workflowjobtemplatenode.id = failure_nodes.from_workflowjobtemplatenode_id
|
||||
LEFT JOIN (
|
||||
SELECT from_workflowjobtemplatenode_id, ARRAY_AGG(to_workflowjobtemplatenode_id) AS nodes
|
||||
FROM main_workflowjobtemplatenode_always_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
|
||||
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
if not subset or 'workflow_job_template_node' in subset:
|
||||
_copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)
|
||||
|
||||
return
|
||||
|
||||
@@ -787,6 +787,29 @@ register(
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_MAX_DISK_USAGE_GB',
|
||||
field_class=fields.IntegerField,
|
||||
default=1,
|
||||
min_value=1,
|
||||
label=_('Maximum disk persistance for external log aggregation (in GB)'),
|
||||
help_text=_('Amount of data to store (in gigabytes) during an outage of '
|
||||
'the external log aggregator (defaults to 1). '
|
||||
'Equivalent to the rsyslogd queue.maxdiskspace setting.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_MAX_DISK_USAGE_PATH',
|
||||
field_class=fields.CharField,
|
||||
default='/var/lib/awx',
|
||||
label=_('File system location for rsyslogd disk persistence'),
|
||||
help_text=_('Location to persist logs that should be retried after an outage '
|
||||
'of the external log aggregator (defaults to /var/lib/awx). '
|
||||
'Equivalent to the rsyslogd queue.spoolDirectory setting.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
|
||||
|
||||
register(
|
||||
|
||||
@@ -95,19 +95,17 @@ class BroadcastConsumer(AsyncJsonWebsocketConsumer):
|
||||
try:
|
||||
WebsocketSecretAuthHelper.is_authorized(self.scope)
|
||||
except Exception:
|
||||
# TODO: log ip of connected client
|
||||
logger.warn("Broadcast client failed to authorize for reason.")
|
||||
logger.warn(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.")
|
||||
await self.close()
|
||||
return
|
||||
|
||||
# TODO: log ip of connected client
|
||||
logger.info(f"Broadcast client connected.")
|
||||
await self.accept()
|
||||
await self.channel_layer.group_add(settings.BROADCAST_WEBSOCKET_GROUP_NAME, self.channel_name)
|
||||
logger.info(f"client '{self.channel_name}' joined the broadcast group.")
|
||||
|
||||
async def disconnect(self, code):
|
||||
# TODO: log ip of disconnected client
|
||||
logger.info("Client disconnected")
|
||||
logger.info("client '{self.channel_name}' disconnected from the broadcast group.")
|
||||
await self.channel_layer.group_discard(settings.BROADCAST_WEBSOCKET_GROUP_NAME, self.channel_name)
|
||||
|
||||
async def internal_message(self, event):
|
||||
await self.send(event['text'])
|
||||
@@ -132,6 +130,14 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
|
||||
await self.send_json({"close": True})
|
||||
await self.close()
|
||||
|
||||
async def disconnect(self, code):
|
||||
current_groups = set(self.scope['session'].pop('groups') if 'groups' in self.scope['session'] else [])
|
||||
for group_name in current_groups:
|
||||
await self.channel_layer.group_discard(
|
||||
group_name,
|
||||
self.channel_name,
|
||||
)
|
||||
|
||||
@database_sync_to_async
|
||||
def user_can_see_object_id(self, user_access, oid):
|
||||
# At this point user is a channels.auth.UserLazyObject object
|
||||
@@ -189,7 +195,6 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
|
||||
group_name,
|
||||
self.channel_name
|
||||
)
|
||||
logger.debug(f"Channel {self.channel_name} left groups {old_groups} and joined {new_groups_exclusive}")
|
||||
self.scope['session']['groups'] = new_groups
|
||||
await self.send_json({
|
||||
"groups_current": list(new_groups),
|
||||
|
||||
@@ -22,7 +22,7 @@ class Scheduler(Scheduler):
|
||||
|
||||
def run():
|
||||
ppid = os.getppid()
|
||||
logger.warn(f'periodic beat started')
|
||||
logger.warn('periodic beat started')
|
||||
while True:
|
||||
if os.getppid() != ppid:
|
||||
# if the parent PID changes, this process has been orphaned
|
||||
|
||||
@@ -123,9 +123,9 @@ class AWXConsumerRedis(AWXConsumerBase):
|
||||
res = json.loads(res[1])
|
||||
self.process_task(res)
|
||||
except redis.exceptions.RedisError:
|
||||
logger.exception(f"encountered an error communicating with redis")
|
||||
logger.exception("encountered an error communicating with redis")
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
logger.exception(f"failed to decode JSON message from redis")
|
||||
logger.exception("failed to decode JSON message from redis")
|
||||
if self.should_stop:
|
||||
return
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
for e in events:
|
||||
try:
|
||||
if (
|
||||
isinstance(exc, IntegrityError),
|
||||
isinstance(exc, IntegrityError) and
|
||||
getattr(e, 'host_id', '')
|
||||
):
|
||||
# this is one potential IntegrityError we can
|
||||
|
||||
@@ -199,7 +199,7 @@ class ProjectOptions(models.Model):
|
||||
results = []
|
||||
project_path = self.get_project_path()
|
||||
if project_path:
|
||||
for dirpath, dirnames, filenames in os.walk(smart_str(project_path)):
|
||||
for dirpath, dirnames, filenames in os.walk(smart_str(project_path), followlinks=True):
|
||||
if skip_directory(dirpath):
|
||||
continue
|
||||
for filename in filenames:
|
||||
|
||||
@@ -123,7 +123,7 @@ class SimpleDAG(object):
|
||||
self.root_nodes.discard(to_obj_ord)
|
||||
|
||||
if from_obj_ord is None and to_obj_ord is None:
|
||||
raise LookupError("From object {} and to object not found".format(from_obj, to_obj))
|
||||
raise LookupError("From object {} and to object {} not found".format(from_obj, to_obj))
|
||||
elif from_obj_ord is None:
|
||||
raise LookupError("From object not found {}".format(from_obj))
|
||||
elif to_obj_ord is None:
|
||||
|
||||
@@ -226,7 +226,7 @@ class TaskManager():
|
||||
# non-Ansible jobs on isolated instances run on controller
|
||||
task.instance_group = rampart_group.controller
|
||||
task.execution_node = random.choice(list(rampart_group.controller.instances.all().values_list('hostname', flat=True)))
|
||||
logger.debug('Submitting isolated {} to queue {}.'.format(
|
||||
logger.debug('Submitting isolated {} to queue {} on node {}.'.format(
|
||||
task.log_format, task.instance_group.name, task.execution_node))
|
||||
elif controller_node:
|
||||
task.instance_group = rampart_group
|
||||
|
||||
@@ -220,7 +220,7 @@ def create_job_template(name, roles=None, persisted=True, webhook_service='', **
|
||||
if 'organization' in kwargs:
|
||||
org = kwargs['organization']
|
||||
if type(org) is not Organization:
|
||||
org = mk_organization(org, '%s-desc'.format(org), persisted=persisted)
|
||||
org = mk_organization(org, org, persisted=persisted)
|
||||
|
||||
if 'credential' in kwargs:
|
||||
cred = kwargs['credential']
|
||||
@@ -298,7 +298,7 @@ def create_organization(name, roles=None, persisted=True, **kwargs):
|
||||
labels = {}
|
||||
notification_templates = {}
|
||||
|
||||
org = mk_organization(name, '%s-desc'.format(name), persisted=persisted)
|
||||
org = mk_organization(name, name, persisted=persisted)
|
||||
|
||||
if 'inventories' in kwargs:
|
||||
for i in kwargs['inventories']:
|
||||
|
||||
@@ -12,6 +12,9 @@ from awx.main.analytics import collectors
|
||||
from awx.main.models import (
|
||||
ProjectUpdate,
|
||||
InventorySource,
|
||||
WorkflowJob,
|
||||
WorkflowJobNode,
|
||||
JobTemplate,
|
||||
)
|
||||
|
||||
|
||||
@@ -19,60 +22,139 @@ from awx.main.models import (
|
||||
def sqlite_copy_expert(request):
|
||||
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
path = tempfile.mkdtemp(prefix='copied_tables')
|
||||
path = tempfile.mkdtemp(prefix="copied_tables")
|
||||
|
||||
def write_stdout(self, sql, fd):
|
||||
# Would be cool if we instead properly disected the SQL query and verified
|
||||
# it that way. But instead, we just take the nieve approach here.
|
||||
assert sql.startswith('COPY (')
|
||||
assert sql.endswith(') TO STDOUT WITH CSV HEADER')
|
||||
assert sql.startswith("COPY (")
|
||||
assert sql.endswith(") TO STDOUT WITH CSV HEADER")
|
||||
|
||||
sql = sql.replace('COPY (', '')
|
||||
sql = sql.replace(') TO STDOUT WITH CSV HEADER', '')
|
||||
sql = sql.replace("COPY (", "")
|
||||
sql = sql.replace(") TO STDOUT WITH CSV HEADER", "")
|
||||
# sqlite equivalent
|
||||
sql = sql.replace("ARRAY_AGG", "GROUP_CONCAT")
|
||||
|
||||
# Remove JSON style queries
|
||||
# TODO: could replace JSON style queries with sqlite kind of equivalents
|
||||
sql_new = []
|
||||
for line in sql.split('\n'):
|
||||
if line.find('main_jobevent.event_data::') == -1:
|
||||
for line in sql.split("\n"):
|
||||
if line.find("main_jobevent.event_data::") == -1:
|
||||
sql_new.append(line)
|
||||
elif not line.endswith(','):
|
||||
sql_new[-1] = sql_new[-1].rstrip(',')
|
||||
sql = '\n'.join(sql_new)
|
||||
elif not line.endswith(","):
|
||||
sql_new[-1] = sql_new[-1].rstrip(",")
|
||||
sql = "\n".join(sql_new)
|
||||
|
||||
self.execute(sql)
|
||||
results = self.fetchall()
|
||||
headers = [i[0] for i in self.description]
|
||||
|
||||
csv_handle = csv.writer(fd, delimiter=',', quoting=csv.QUOTE_ALL, escapechar='\\', lineterminator='\n')
|
||||
csv_handle = csv.writer(
|
||||
fd,
|
||||
delimiter=",",
|
||||
quoting=csv.QUOTE_ALL,
|
||||
escapechar="\\",
|
||||
lineterminator="\n",
|
||||
)
|
||||
csv_handle.writerow(headers)
|
||||
csv_handle.writerows(results)
|
||||
|
||||
|
||||
setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout)
|
||||
setattr(SQLiteCursorWrapper, "copy_expert", write_stdout)
|
||||
request.addfinalizer(lambda: shutil.rmtree(path))
|
||||
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, 'copy_expert'))
|
||||
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, "copy_expert"))
|
||||
return path
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_unified_job_query(sqlite_copy_expert, project, inventory, job_template):
|
||||
'''
|
||||
def test_copy_tables_unified_job_query(
|
||||
sqlite_copy_expert, project, inventory, job_template
|
||||
):
|
||||
"""
|
||||
Ensure that various unified job types are in the output of the query.
|
||||
'''
|
||||
"""
|
||||
|
||||
time_start = now()
|
||||
inv_src = InventorySource.objects.create(name="inventory_update1", inventory=inventory, source='gce')
|
||||
inv_src = InventorySource.objects.create(
|
||||
name="inventory_update1", inventory=inventory, source="gce"
|
||||
)
|
||||
|
||||
project_update_name = ProjectUpdate.objects.create(project=project, name="project_update1").name
|
||||
project_update_name = ProjectUpdate.objects.create(
|
||||
project=project, name="project_update1"
|
||||
).name
|
||||
inventory_update_name = inv_src.create_unified_job().name
|
||||
job_name = job_template.create_unified_job().name
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
collectors.copy_tables(time_start, tmpdir)
|
||||
with open(os.path.join(tmpdir, 'unified_jobs_table.csv')) as f:
|
||||
lines = ''.join([l for l in f])
|
||||
collectors.copy_tables(time_start, tmpdir, subset="unified_jobs")
|
||||
with open(os.path.join(tmpdir, "unified_jobs_table.csv")) as f:
|
||||
lines = "".join([l for l in f])
|
||||
|
||||
assert project_update_name in lines
|
||||
assert inventory_update_name in lines
|
||||
assert job_name in lines
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job(states=["new", "new", "new", "new", "new"]):
|
||||
"""
|
||||
Workflow topology:
|
||||
node[0]
|
||||
/\
|
||||
s/ \f
|
||||
/ \
|
||||
node[1,5] node[3]
|
||||
/ \
|
||||
s/ \f
|
||||
/ \
|
||||
node[2] node[4]
|
||||
"""
|
||||
wfj = WorkflowJob.objects.create()
|
||||
jt = JobTemplate.objects.create(name="test-jt")
|
||||
nodes = [
|
||||
WorkflowJobNode.objects.create(workflow_job=wfj, unified_job_template=jt)
|
||||
for i in range(0, 6)
|
||||
]
|
||||
for node, state in zip(nodes, states):
|
||||
if state:
|
||||
node.job = jt.create_job()
|
||||
node.job.status = state
|
||||
node.job.save()
|
||||
node.save()
|
||||
nodes[0].success_nodes.add(nodes[1])
|
||||
nodes[0].success_nodes.add(nodes[5])
|
||||
nodes[1].success_nodes.add(nodes[2])
|
||||
nodes[0].failure_nodes.add(nodes[3])
|
||||
nodes[3].failure_nodes.add(nodes[4])
|
||||
return wfj
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job):
|
||||
time_start = now()
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
collectors.copy_tables(time_start, tmpdir, subset="workflow_job_node_query")
|
||||
with open(os.path.join(tmpdir, "workflow_job_node_table.csv")) as f:
|
||||
reader = csv.reader(f)
|
||||
# Pop the headers
|
||||
next(reader)
|
||||
lines = [l for l in reader]
|
||||
|
||||
ids = [int(l[0]) for l in lines]
|
||||
|
||||
assert ids == list(
|
||||
workflow_job.workflow_nodes.all().values_list("id", flat=True)
|
||||
)
|
||||
|
||||
for index, relationship in zip(
|
||||
[7, 8, 9], ["success_nodes", "failure_nodes", "always_nodes"]
|
||||
):
|
||||
for i, l in enumerate(lines):
|
||||
related_nodes = (
|
||||
[int(e) for e in l[index].split(",")] if l[index] else []
|
||||
)
|
||||
assert related_nodes == list(
|
||||
getattr(workflow_job.workflow_nodes.all()[i], relationship)
|
||||
.all()
|
||||
.values_list("id", flat=True)
|
||||
), f"(right side) workflow_nodes.all()[{i}].{relationship}.all()"
|
||||
|
||||
@@ -64,6 +64,7 @@ def could_be_playbook(project_path, dir_path, filename):
|
||||
matched = True
|
||||
break
|
||||
except IOError:
|
||||
logger.exception(f'failed to open {playbook_path}')
|
||||
return None
|
||||
if not matched:
|
||||
return None
|
||||
|
||||
@@ -56,6 +56,7 @@ __all__ = [
|
||||
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',
|
||||
'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule',
|
||||
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout',
|
||||
'StubLicense'
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import os
|
||||
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from django.conf import settings
|
||||
@@ -13,15 +15,26 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', '')
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', '')
|
||||
timeout = getattr(settings, 'LOG_AGGREGATOR_TCP_TIMEOUT', 5)
|
||||
max_disk_space = getattr(settings, 'LOG_AGGREGATOR_MAX_DISK_USAGE_GB', 1)
|
||||
spool_directory = getattr(settings, 'LOG_AGGREGATOR_MAX_DISK_USAGE_PATH', '/var/lib/awx').rstrip('/')
|
||||
|
||||
if not os.access(spool_directory, os.W_OK):
|
||||
spool_directory = '/var/lib/awx'
|
||||
|
||||
max_bytes = settings.MAX_EVENT_RES_DATA
|
||||
parts.extend([
|
||||
'$WorkDirectory /var/lib/awx/rsyslog',
|
||||
f'$MaxMessageSize {max_bytes}',
|
||||
'$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf',
|
||||
f'main_queue(queue.spoolDirectory="{spool_directory}" queue.maxdiskspace="{max_disk_space}g" queue.type="Disk" queue.filename="awx-external-logger-backlog")', # noqa
|
||||
'module(load="imuxsock" SysSock.Use="off")',
|
||||
'input(type="imuxsock" Socket="' + settings.LOGGING['handlers']['external_logger']['address'] + '" unlink="on")',
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
])
|
||||
|
||||
def escape_quotes(x):
|
||||
return x.replace('"', '\\"')
|
||||
|
||||
if not enabled:
|
||||
parts.append('action(type="omfile" file="/dev/null")') # rsyslog needs *at least* one valid action to start
|
||||
tmpl = '\n'.join(parts)
|
||||
@@ -35,7 +48,7 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
host = '%s://%s' % (scheme, host) if scheme else '//%s' % host
|
||||
parsed = urlparse.urlsplit(host)
|
||||
|
||||
host = parsed.hostname
|
||||
host = escape_quotes(parsed.hostname)
|
||||
try:
|
||||
if parsed.port:
|
||||
port = parsed.port
|
||||
@@ -64,8 +77,8 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
if parsed.query:
|
||||
path = f'{path}?{urlparse.quote(parsed.query)}'
|
||||
params.append(f'restpath="{path}"')
|
||||
username = getattr(settings, 'LOG_AGGREGATOR_USERNAME', '')
|
||||
password = getattr(settings, 'LOG_AGGREGATOR_PASSWORD', '')
|
||||
username = escape_quotes(getattr(settings, 'LOG_AGGREGATOR_USERNAME', ''))
|
||||
password = escape_quotes(getattr(settings, 'LOG_AGGREGATOR_PASSWORD', ''))
|
||||
if getattr(settings, 'LOG_AGGREGATOR_TYPE', None) == 'splunk':
|
||||
# splunk has a weird authorization header <shrug>
|
||||
if password:
|
||||
|
||||
@@ -70,7 +70,7 @@ class WebsocketTask():
|
||||
|
||||
async def connect(self, attempt):
|
||||
from awx.main.consumers import WebsocketSecretAuthHelper # noqa
|
||||
logger.debug(f"{self.name} connect attempt {attempt} to {self.remote_host}")
|
||||
logger.debug(f"Connection from {self.name} to {self.remote_host} attempt number {attempt}.")
|
||||
|
||||
'''
|
||||
Can not put get_channel_layer() in the init code because it is in the init
|
||||
@@ -83,7 +83,7 @@ class WebsocketTask():
|
||||
if attempt > 0:
|
||||
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_RECONNECT_RETRY_RATE_SECONDS)
|
||||
except asyncio.CancelledError:
|
||||
logger.warn(f"{self.name} connection to {self.remote_host} cancelled")
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled")
|
||||
raise
|
||||
|
||||
uri = f"{self.protocol}://{self.remote_host}:{self.remote_port}/websocket/{self.endpoint}/"
|
||||
@@ -94,22 +94,25 @@ class WebsocketTask():
|
||||
async with aiohttp.ClientSession(headers={'secret': secret_val},
|
||||
timeout=timeout) as session:
|
||||
async with session.ws_connect(uri, ssl=self.verify_ssl, heartbeat=20) as websocket:
|
||||
logger.info(f"Connection from {self.name} to {self.remote_host} established.")
|
||||
self.stats.record_connection_established()
|
||||
attempt = 0
|
||||
await self.run_loop(websocket)
|
||||
except asyncio.CancelledError:
|
||||
# TODO: Check if connected and disconnect
|
||||
# Possibly use run_until_complete() if disconnect is async
|
||||
logger.warn(f"{self.name} connection to {self.remote_host} cancelled")
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled.")
|
||||
self.stats.record_connection_lost()
|
||||
raise
|
||||
except client_exceptions.ClientConnectorError as e:
|
||||
logger.warn(f"Failed to connect to {self.remote_host}: '{e}'. Reconnecting ...")
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.")
|
||||
except asyncio.TimeoutError:
|
||||
logger.warn(f"Timeout while trying to connect to {self.remote_host}. Reconnecting ...")
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} timed out.")
|
||||
except Exception as e:
|
||||
# Early on, this is our canary. I'm not sure what exceptions we can really encounter.
|
||||
logger.warn(f"Websocket broadcast client exception {type(e)} {e}")
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.")
|
||||
else:
|
||||
logger.warn(f"Connection from {self.name} to {self.remote_host} list.")
|
||||
|
||||
self.stats.record_connection_lost()
|
||||
self.start(attempt=attempt + 1)
|
||||
@@ -160,9 +163,9 @@ class BroadcastWebsocketManager(object):
|
||||
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
|
||||
|
||||
if deleted_remote_hosts:
|
||||
logger.warn(f"{self.local_hostname} going to remove {deleted_remote_hosts} from the websocket broadcast list")
|
||||
logger.warn(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
if new_remote_hosts:
|
||||
logger.warn(f"{self.local_hostname} going to add {new_remote_hosts} to the websocket broadcast list")
|
||||
logger.warn(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
for h in deleted_remote_hosts:
|
||||
self.broadcast_tasks[h].cancel()
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
recursive: true
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--blocking-io"
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
@@ -51,6 +52,7 @@
|
||||
mode: pull
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--blocking-io"
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
dest: "{{ dest }}"
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--blocking-io"
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
RSH: "oc rsh --config={{ ansible_kubectl_config }}"
|
||||
|
||||
@@ -936,6 +936,8 @@ LOG_AGGREGATOR_ENABLED = False
|
||||
LOG_AGGREGATOR_TCP_TIMEOUT = 5
|
||||
LOG_AGGREGATOR_VERIFY_CERT = True
|
||||
LOG_AGGREGATOR_LEVEL = 'INFO'
|
||||
LOG_AGGREGATOR_MAX_DISK_USAGE_GB = 1
|
||||
LOG_AGGREGATOR_MAX_DISK_USAGE_PATH = '/var/lib/awx'
|
||||
|
||||
# The number of retry attempts for websocket session establishment
|
||||
# If you're encountering issues establishing websockets in clustered Tower,
|
||||
@@ -1106,9 +1108,9 @@ LOGGING = {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'level': 'WARNING',
|
||||
},
|
||||
'celery': { # for celerybeat connection warnings
|
||||
'daphne': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'level': 'WARNING',
|
||||
'level': 'INFO',
|
||||
},
|
||||
'rest_framework.request': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
@@ -1139,6 +1141,10 @@ LOGGING = {
|
||||
'awx.main.dispatch': {
|
||||
'handlers': ['dispatcher'],
|
||||
},
|
||||
'awx.main.consumers': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'level': 'INFO',
|
||||
},
|
||||
'awx.main.wsbroadcast': {
|
||||
'handlers': ['wsbroadcast'],
|
||||
},
|
||||
|
||||
@@ -215,8 +215,8 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){
|
||||
dataTitle: i18n._("Source Variables"),
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>" + i18n._("Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables ") +
|
||||
"<a href=\"https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.ini\" target=\"_blank\">" +
|
||||
i18n._("view ec2.ini in the Ansible github repo.") + "</a></p>" +
|
||||
"<a href=\"https://github.com/ansible-collections/community.aws/blob/master/scripts/inventory/ec2.ini\" target=\"_blank\">" +
|
||||
i18n._("view ec2.ini in the community.aws repo.") + "</a></p>" +
|
||||
"<p>" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
|
||||
i18n._("JSON:") + "<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
@@ -239,8 +239,8 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){
|
||||
dataTitle: i18n._("Source Variables"),
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>" + i18n._("Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables ") +
|
||||
"<a href=\"https://github.com/ansible/ansible/blob/devel/contrib/inventory/vmware_inventory.ini\" target=\"_blank\">" +
|
||||
i18n._("view vmware_inventory.ini in the Ansible github repo.") + "</a></p>" +
|
||||
"<a href=\"https://github.com/ansible-collections/vmware/blob/master/scripts/inventory/vmware_inventory.ini\" target=\"_blank\">" +
|
||||
i18n._("view vmware_inventory.ini in the vmware community repo.") + "</a></p>" +
|
||||
"<p>" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
|
||||
i18n._("JSON:") + "<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
@@ -314,8 +314,8 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){
|
||||
dataTitle: i18n._("Source Variables"),
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>" + i18n._("Override variables found in azure_rm.ini and used by the inventory update script. For a detailed description of these variables ") +
|
||||
"<a href=\"https://github.com/ansible/ansible/blob/devel/contrib/inventory/azure_rm.ini\" target=\"_blank\">" +
|
||||
i18n._("view azure_rm.ini in the Ansible github repo.") + "</a></p>" +
|
||||
"<a href=\"https://github.com/ansible-collections/community.general/blob/master/scripts/inventory/azure_rm.ini\" target=\"_blank\">" +
|
||||
i18n._("view azure_rm.ini in the Ansible community.general github repo.") + "</a></p>" +
|
||||
"<p>" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
|
||||
i18n._("JSON:") + "<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
|
||||
@@ -16,6 +16,7 @@ class JobTemplates extends SchedulesMixin(
|
||||
this.disassociateLabel = this.disassociateLabel.bind(this);
|
||||
this.readCredentials = this.readCredentials.bind(this);
|
||||
this.readAccessList = this.readAccessList.bind(this);
|
||||
this.readWebhookKey = this.readWebhookKey.bind(this);
|
||||
}
|
||||
|
||||
launch(id, data) {
|
||||
@@ -82,6 +83,14 @@ class JobTemplates extends SchedulesMixin(
|
||||
destroySurvey(id) {
|
||||
return this.http.delete(`${this.baseUrl}${id}/survey_spec/`);
|
||||
}
|
||||
|
||||
readWebhookKey(id) {
|
||||
return this.http.get(`${this.baseUrl}${id}/webhook_key/`);
|
||||
}
|
||||
|
||||
updateWebhookKey(id) {
|
||||
return this.http.post(`${this.baseUrl}${id}/webhook_key/`);
|
||||
}
|
||||
}
|
||||
|
||||
export default JobTemplates;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React from 'react';
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { oneOf, bool, number, string, func } from 'prop-types';
|
||||
import { Controlled as ReactCodeMirror } from 'react-codemirror2';
|
||||
import styled from 'styled-components';
|
||||
@@ -67,6 +67,20 @@ function CodeMirrorInput({
|
||||
fullHeight,
|
||||
className,
|
||||
}) {
|
||||
// Workaround for CodeMirror bug: If CodeMirror renders in a modal on the
|
||||
// modal's initial render, it appears as an empty box due to mis-calculated
|
||||
// element height. Forcing an initial render before mounting <CodeMirror>
|
||||
// fixes this.
|
||||
const [isInitialized, setIsInitialized] = useState(false);
|
||||
useEffect(() => {
|
||||
if (!isInitialized) {
|
||||
setIsInitialized(true);
|
||||
}
|
||||
}, [isInitialized]);
|
||||
if (!isInitialized) {
|
||||
return <div />;
|
||||
}
|
||||
|
||||
return (
|
||||
<CodeMirror
|
||||
className={`pf-c-form-control ${className}`}
|
||||
|
||||
@@ -2,10 +2,15 @@ import React from 'react';
|
||||
import { node, string } from 'prop-types';
|
||||
import { Trans } from '@lingui/macro';
|
||||
import { Link } from 'react-router-dom';
|
||||
import styled from 'styled-components';
|
||||
import { formatDateString } from '@util/dates';
|
||||
import Detail from './Detail';
|
||||
import _Detail from './Detail';
|
||||
import { SummaryFieldUser } from '../../types';
|
||||
|
||||
const Detail = styled(_Detail)`
|
||||
word-break: break-word;
|
||||
`;
|
||||
|
||||
function UserDateDetail({ label, date, user, dataCy = null }) {
|
||||
const dateStr = formatDateString(date);
|
||||
const username = user ? user.username : '';
|
||||
|
||||
@@ -8,19 +8,26 @@ const QuestionCircleIcon = styled(PFQuestionCircleIcon)`
|
||||
margin-left: 10px;
|
||||
`;
|
||||
|
||||
function FieldTooltip({ content }) {
|
||||
function FieldTooltip({ content, ...rest }) {
|
||||
if (!content) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Tooltip
|
||||
position="right"
|
||||
content={content}
|
||||
trigger="click mouseenter focus"
|
||||
{...rest}
|
||||
>
|
||||
<QuestionCircleIcon />
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
FieldTooltip.propTypes = {
|
||||
content: node.isRequired,
|
||||
content: node,
|
||||
};
|
||||
FieldTooltip.defaultProps = {
|
||||
content: null,
|
||||
};
|
||||
|
||||
export default FieldTooltip;
|
||||
|
||||
@@ -1,18 +1,8 @@
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { useField } from 'formik';
|
||||
import {
|
||||
FormGroup,
|
||||
TextInput,
|
||||
TextArea,
|
||||
Tooltip,
|
||||
} from '@patternfly/react-core';
|
||||
import { QuestionCircleIcon as PFQuestionCircleIcon } from '@patternfly/react-icons';
|
||||
import styled from 'styled-components';
|
||||
|
||||
const QuestionCircleIcon = styled(PFQuestionCircleIcon)`
|
||||
margin-left: 10px;
|
||||
`;
|
||||
import { FormGroup, TextInput, TextArea } from '@patternfly/react-core';
|
||||
import FieldTooltip from './FieldTooltip';
|
||||
|
||||
function FormField(props) {
|
||||
const {
|
||||
@@ -40,15 +30,7 @@ function FormField(props) {
|
||||
isValid={isValid}
|
||||
label={label}
|
||||
>
|
||||
{tooltip && (
|
||||
<Tooltip
|
||||
content={tooltip}
|
||||
maxWidth={tooltipMaxWidth}
|
||||
position="right"
|
||||
>
|
||||
<QuestionCircleIcon />
|
||||
</Tooltip>
|
||||
)}
|
||||
<FieldTooltip content={tooltip} maxWidth={tooltipMaxWidth} />
|
||||
<TextArea
|
||||
id={id}
|
||||
isRequired={isRequired}
|
||||
@@ -69,15 +51,7 @@ function FormField(props) {
|
||||
isValid={isValid}
|
||||
label={label}
|
||||
>
|
||||
{tooltip && (
|
||||
<Tooltip
|
||||
content={tooltip}
|
||||
maxWidth={tooltipMaxWidth}
|
||||
position="right"
|
||||
>
|
||||
<QuestionCircleIcon />
|
||||
</Tooltip>
|
||||
)}
|
||||
<FieldTooltip content={tooltip} maxWidth={tooltipMaxWidth} />
|
||||
<TextInput
|
||||
id={id}
|
||||
isRequired={isRequired}
|
||||
|
||||
@@ -8,6 +8,7 @@ import CredentialsStep from './CredentialsStep';
|
||||
import OtherPromptsStep from './OtherPromptsStep';
|
||||
import SurveyStep from './SurveyStep';
|
||||
import PreviewStep from './PreviewStep';
|
||||
import mergeExtraVars from './mergeExtraVars';
|
||||
|
||||
function LaunchPrompt({ config, resource, onLaunch, onCancel, i18n }) {
|
||||
const steps = [];
|
||||
@@ -69,9 +70,10 @@ function LaunchPrompt({ config, resource, onLaunch, onCancel, i18n }) {
|
||||
});
|
||||
}
|
||||
if (config.survey_enabled) {
|
||||
initialValues.survey = {};
|
||||
steps.push({
|
||||
name: i18n._(t`Survey`),
|
||||
component: <SurveyStep />,
|
||||
component: <SurveyStep template={resource} />,
|
||||
});
|
||||
}
|
||||
steps.push({
|
||||
@@ -93,7 +95,7 @@ function LaunchPrompt({ config, resource, onLaunch, onCancel, i18n }) {
|
||||
setValue('limit', values.limit);
|
||||
setValue('job_tags', values.job_tags);
|
||||
setValue('skip_tags', values.skip_tags);
|
||||
setValue('extra_vars', values.extra_vars);
|
||||
setValue('extra_vars', mergeExtraVars(values.extra_vars, values.survey));
|
||||
onLaunch(postValues);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,7 +1,204 @@
|
||||
import React from 'react';
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { Formik, useField } from 'formik';
|
||||
import { JobTemplatesAPI, WorkflowJobTemplatesAPI } from '@api';
|
||||
import {
|
||||
Form,
|
||||
FormGroup,
|
||||
Select,
|
||||
SelectOption,
|
||||
SelectVariant,
|
||||
} from '@patternfly/react-core';
|
||||
import FormField, { FieldTooltip } from '@components/FormField';
|
||||
import AnsibleSelect from '@components/AnsibleSelect';
|
||||
import ContentLoading from '@components/ContentLoading';
|
||||
import ContentError from '@components/ContentError';
|
||||
import useRequest from '@util/useRequest';
|
||||
import {
|
||||
required,
|
||||
minMaxValue,
|
||||
maxLength,
|
||||
minLength,
|
||||
integer,
|
||||
combine,
|
||||
} from '@util/validators';
|
||||
|
||||
function InventoryStep() {
|
||||
return <div />;
|
||||
function SurveyStep({ template, i18n }) {
|
||||
const { result: survey, request: fetchSurvey, isLoading, error } = useRequest(
|
||||
useCallback(async () => {
|
||||
const { data } =
|
||||
template.type === 'workflow_job_template'
|
||||
? await WorkflowJobTemplatesAPI.readSurvey(template.id)
|
||||
: await JobTemplatesAPI.readSurvey(template.id);
|
||||
return data;
|
||||
}, [template])
|
||||
);
|
||||
useEffect(() => {
|
||||
fetchSurvey();
|
||||
}, [fetchSurvey]);
|
||||
|
||||
if (error) {
|
||||
return <ContentError error={error} />;
|
||||
}
|
||||
if (isLoading || !survey) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
|
||||
const initialValues = {};
|
||||
survey.spec.forEach(question => {
|
||||
if (question.type === 'multiselect') {
|
||||
initialValues[question.variable] = question.default.split('\n');
|
||||
} else {
|
||||
initialValues[question.variable] = question.default;
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
<SurveySubForm survey={survey} initialValues={initialValues} i18n={i18n} />
|
||||
);
|
||||
}
|
||||
|
||||
export default InventoryStep;
|
||||
// This is a nested Formik form to perform validation on individual
|
||||
// survey questions. When changes to the inner form occur (onBlur), the
|
||||
// values for all questions are added to the outer form's `survey` field
|
||||
// as a single object.
|
||||
function SurveySubForm({ survey, initialValues, i18n }) {
|
||||
const [, , surveyFieldHelpers] = useField('survey');
|
||||
useEffect(() => {
|
||||
// set survey initial values to parent form
|
||||
surveyFieldHelpers.setValue(initialValues);
|
||||
/* eslint-disable-next-line react-hooks/exhaustive-deps */
|
||||
}, []);
|
||||
|
||||
const fieldTypes = {
|
||||
text: TextField,
|
||||
textarea: TextField,
|
||||
password: TextField,
|
||||
multiplechoice: MultipleChoiceField,
|
||||
multiselect: MultiSelectField,
|
||||
integer: NumberField,
|
||||
float: NumberField,
|
||||
};
|
||||
return (
|
||||
<Formik initialValues={initialValues}>
|
||||
{({ values }) => (
|
||||
<Form onBlur={() => surveyFieldHelpers.setValue(values)}>
|
||||
{' '}
|
||||
{survey.spec.map(question => {
|
||||
const Field = fieldTypes[question.type];
|
||||
return (
|
||||
<Field key={question.variable} question={question} i18n={i18n} />
|
||||
);
|
||||
})}
|
||||
</Form>
|
||||
)}
|
||||
</Formik>
|
||||
);
|
||||
}
|
||||
|
||||
function TextField({ question, i18n }) {
|
||||
const validators = [
|
||||
question.required ? required(null, i18n) : null,
|
||||
question.min ? minLength(question.min, i18n) : null,
|
||||
question.max ? maxLength(question.max, i18n) : null,
|
||||
];
|
||||
return (
|
||||
<FormField
|
||||
id={`survey-question-${question.variable}`}
|
||||
name={question.variable}
|
||||
label={question.question_name}
|
||||
tooltip={question.question_description}
|
||||
isRequired={question.required}
|
||||
validate={combine(validators)}
|
||||
type={question.type}
|
||||
minLength={question.min}
|
||||
maxLength={question.max}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function NumberField({ question, i18n }) {
|
||||
const validators = [
|
||||
question.required ? required(null, i18n) : null,
|
||||
minMaxValue(question.min, question.max, i18n),
|
||||
question.type === 'integer' ? integer(i18n) : null,
|
||||
];
|
||||
return (
|
||||
<FormField
|
||||
id={`survey-question-${question.variable}`}
|
||||
name={question.variable}
|
||||
label={question.question_name}
|
||||
tooltip={question.question_description}
|
||||
isRequired={question.required}
|
||||
validate={combine(validators)}
|
||||
type="number"
|
||||
min={question.min}
|
||||
max={question.max}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function MultipleChoiceField({ question }) {
|
||||
const [field, meta] = useField(question.variable);
|
||||
const id = `survey-question-${question.variable}`;
|
||||
const isValid = !(meta.touched && meta.error);
|
||||
return (
|
||||
<FormGroup
|
||||
fieldId={id}
|
||||
helperTextInvalid={meta.error}
|
||||
isRequired={question.required}
|
||||
isValid={isValid}
|
||||
label={question.question_name}
|
||||
>
|
||||
<FieldTooltip content={question.question_description} />
|
||||
<AnsibleSelect
|
||||
id={id}
|
||||
isValid={isValid}
|
||||
{...field}
|
||||
data={question.choices.split('\n').map(opt => ({
|
||||
key: opt,
|
||||
value: opt,
|
||||
label: opt,
|
||||
}))}
|
||||
/>
|
||||
</FormGroup>
|
||||
);
|
||||
}
|
||||
|
||||
function MultiSelectField({ question }) {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [field, meta, helpers] = useField(question.variable);
|
||||
const id = `survey-question-${question.variable}`;
|
||||
const isValid = !(meta.touched && meta.error);
|
||||
return (
|
||||
<FormGroup
|
||||
fieldId={id}
|
||||
helperTextInvalid={meta.error}
|
||||
isRequired={question.required}
|
||||
isValid={isValid}
|
||||
label={question.question_name}
|
||||
>
|
||||
<FieldTooltip content={question.question_description} />
|
||||
<Select
|
||||
variant={SelectVariant.typeaheadMulti}
|
||||
id={id}
|
||||
onToggle={setIsOpen}
|
||||
onSelect={(event, option) => {
|
||||
if (field.value.includes(option)) {
|
||||
helpers.setValue(field.value.filter(o => o !== option));
|
||||
} else {
|
||||
helpers.setValue(field.value.concat(option));
|
||||
}
|
||||
}}
|
||||
isExpanded={isOpen}
|
||||
selections={field.value}
|
||||
>
|
||||
{question.choices.split('\n').map(opt => (
|
||||
<SelectOption key={opt} value={opt} />
|
||||
))}
|
||||
</Select>
|
||||
</FormGroup>
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(SurveyStep);
|
||||
|
||||
11
awx/ui_next/src/components/LaunchPrompt/mergeExtraVars.js
Normal file
11
awx/ui_next/src/components/LaunchPrompt/mergeExtraVars.js
Normal file
@@ -0,0 +1,11 @@
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
export default function mergeExtraVars(extraVars, survey = {}) {
|
||||
const vars = yaml.safeLoad(extraVars) || {};
|
||||
return {
|
||||
...vars,
|
||||
...survey,
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: "safe" version that obscures passwords for preview step
|
||||
@@ -0,0 +1,34 @@
|
||||
import mergeExtraVars from './mergeExtraVars';
|
||||
|
||||
describe('mergeExtraVars', () => {
|
||||
test('should handle yaml string', () => {
|
||||
const yaml = '---\none: 1\ntwo: 2';
|
||||
expect(mergeExtraVars(yaml)).toEqual({
|
||||
one: 1,
|
||||
two: 2,
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle json string', () => {
|
||||
const jsonString = '{"one": 1, "two": 2}';
|
||||
expect(mergeExtraVars(jsonString)).toEqual({
|
||||
one: 1,
|
||||
two: 2,
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle empty string', () => {
|
||||
expect(mergeExtraVars('')).toEqual({});
|
||||
});
|
||||
|
||||
test('should merge survey results into extra vars object', () => {
|
||||
const yaml = '---\none: 1\ntwo: 2';
|
||||
const survey = { foo: 'bar', bar: 'baz' };
|
||||
expect(mergeExtraVars(yaml, survey)).toEqual({
|
||||
one: 1,
|
||||
two: 2,
|
||||
foo: 'bar',
|
||||
bar: 'baz',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -65,7 +65,13 @@ class ListHeader extends React.Component {
|
||||
}
|
||||
|
||||
handleRemoveAll() {
|
||||
this.pushHistoryState(null);
|
||||
// remove everything in oldParams except for page_size and order_by
|
||||
const { location, qsConfig } = this.props;
|
||||
const oldParams = parseQueryString(qsConfig, location.search);
|
||||
const oldParamsClone = { ...oldParams };
|
||||
delete oldParamsClone.page_size;
|
||||
delete oldParamsClone.order_by;
|
||||
this.pushHistoryState(removeParams(qsConfig, oldParams, oldParamsClone));
|
||||
}
|
||||
|
||||
handleSort(key, order) {
|
||||
|
||||
@@ -46,4 +46,69 @@ describe('ListHeader', () => {
|
||||
// since order_by = name is the default, that should be strip out of the search
|
||||
expect(history.location.search).toEqual('');
|
||||
});
|
||||
|
||||
test('should test clear all', () => {
|
||||
const query = '?item.page_size=5&item.name=foo';
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: [`/organizations/1/teams${query}`],
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<ListHeader
|
||||
itemCount={7}
|
||||
qsConfig={qsConfig}
|
||||
searchColumns={[{ name: 'foo', key: 'foo', isDefault: true }]}
|
||||
sortColumns={[{ name: 'foo', key: 'foo' }]}
|
||||
/>,
|
||||
{ context: { router: { history } } }
|
||||
);
|
||||
|
||||
expect(history.location.search).toEqual(query);
|
||||
const toolbar = wrapper.find('DataListToolbar');
|
||||
toolbar.prop('clearAllFilters')();
|
||||
expect(history.location.search).toEqual('?item.page_size=5');
|
||||
});
|
||||
|
||||
test('should test handle search', () => {
|
||||
const query = '?item.page_size=10';
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: [`/organizations/1/teams${query}`],
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<ListHeader
|
||||
itemCount={7}
|
||||
qsConfig={qsConfig}
|
||||
searchColumns={[{ name: 'foo', key: 'foo', isDefault: true }]}
|
||||
sortColumns={[{ name: 'foo', key: 'foo' }]}
|
||||
/>,
|
||||
{ context: { router: { history } } }
|
||||
);
|
||||
|
||||
expect(history.location.search).toEqual(query);
|
||||
const toolbar = wrapper.find('DataListToolbar');
|
||||
toolbar.prop('onSearch')('name__icontains', 'foo');
|
||||
expect(history.location.search).toEqual(
|
||||
'?item.name__icontains=foo&item.page_size=10'
|
||||
);
|
||||
});
|
||||
|
||||
test('should test handle remove', () => {
|
||||
const query = '?item.name__icontains=foo&item.page_size=10';
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: [`/organizations/1/teams${query}`],
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<ListHeader
|
||||
itemCount={7}
|
||||
qsConfig={qsConfig}
|
||||
searchColumns={[{ name: 'foo', key: 'foo', isDefault: true }]}
|
||||
sortColumns={[{ name: 'foo', key: 'foo' }]}
|
||||
/>,
|
||||
{ context: { router: { history } } }
|
||||
);
|
||||
|
||||
expect(history.location.search).toEqual(query);
|
||||
const toolbar = wrapper.find('DataListToolbar');
|
||||
toolbar.prop('onRemove')('name__icontains', 'foo');
|
||||
expect(history.location.search).toEqual('?item.page_size=10');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,3 +3,4 @@ export { default as InstanceGroupsLookup } from './InstanceGroupsLookup';
|
||||
export { default as InventoryLookup } from './InventoryLookup';
|
||||
export { default as ProjectLookup } from './ProjectLookup';
|
||||
export { default as MultiCredentialsLookup } from './MultiCredentialsLookup';
|
||||
export { default as CredentialLookup } from './CredentialLookup';
|
||||
|
||||
@@ -43,11 +43,13 @@ class PaginatedDataList extends React.Component {
|
||||
this.pushHistoryState(replaceParams(oldParams, { page: pageNumber }));
|
||||
}
|
||||
|
||||
handleSetPageSize(event, pageSize) {
|
||||
handleSetPageSize(event, pageSize, page) {
|
||||
const { history, qsConfig } = this.props;
|
||||
const { search } = history.location;
|
||||
const oldParams = parseQueryString(qsConfig, search);
|
||||
this.pushHistoryState(replaceParams(oldParams, { page_size: pageSize }));
|
||||
this.pushHistoryState(
|
||||
replaceParams(oldParams, { page_size: pageSize, page })
|
||||
);
|
||||
}
|
||||
|
||||
pushHistoryState(params) {
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<PaginatedDataList />', () => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
test('initially renders succesfully', () => {
|
||||
test('initially renders successfully', () => {
|
||||
mountWithContexts(
|
||||
<PaginatedDataList
|
||||
items={mockData}
|
||||
@@ -83,11 +83,11 @@ describe('<PaginatedDataList />', () => {
|
||||
);
|
||||
|
||||
const pagination = wrapper.find('Pagination');
|
||||
pagination.prop('onPerPageSelect')(null, 25);
|
||||
expect(history.location.search).toEqual('?item.page_size=25');
|
||||
pagination.prop('onPerPageSelect')(null, 25, 2);
|
||||
expect(history.location.search).toEqual('?item.page=2&item.page_size=25');
|
||||
wrapper.update();
|
||||
// since page_size = 5 is the default, that should be strip out of the search
|
||||
pagination.prop('onPerPageSelect')(null, 5);
|
||||
expect(history.location.search).toEqual('');
|
||||
pagination.prop('onPerPageSelect')(null, 5, 2);
|
||||
expect(history.location.search).toEqual('?item.page=2');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,8 +6,9 @@ import { Link } from 'react-router-dom';
|
||||
import styled from 'styled-components';
|
||||
import { toTitleCase } from '@util/strings';
|
||||
|
||||
import { Chip, ChipGroup } from '@patternfly/react-core';
|
||||
import { Chip, ChipGroup, Divider } from '@patternfly/react-core';
|
||||
import { VariablesDetail } from '@components/CodeMirrorInput';
|
||||
import CredentialChip from '@components/CredentialChip';
|
||||
import { DetailList, Detail, UserDateDetail } from '@components/DetailList';
|
||||
|
||||
import PromptProjectDetail from './PromptProjectDetail';
|
||||
@@ -172,7 +173,6 @@ function PromptDetail({ i18n, resource, launchConfig = {} }) {
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* TODO: Add JT, WFJT, Inventory Source Details */}
|
||||
{details?.type === 'project' && (
|
||||
<PromptProjectDetail resource={details} />
|
||||
)}
|
||||
@@ -200,6 +200,7 @@ function PromptDetail({ i18n, resource, launchConfig = {} }) {
|
||||
|
||||
{hasPromptData(launchConfig) && hasOverrides && (
|
||||
<>
|
||||
<Divider css="margin-top: var(--pf-global--spacer--lg)" />
|
||||
<PromptHeader>{i18n._(t`Prompted Values`)}</PromptHeader>
|
||||
<DetailList aria-label="Prompt Overrides">
|
||||
{overrides?.job_type && (
|
||||
@@ -211,14 +212,16 @@ function PromptDetail({ i18n, resource, launchConfig = {} }) {
|
||||
{overrides?.credentials && (
|
||||
<Detail
|
||||
fullWidth
|
||||
label={i18n._(t`Credential`)}
|
||||
label={i18n._(t`Credentials`)}
|
||||
rows={4}
|
||||
value={
|
||||
<ChipGroup numChips={5}>
|
||||
{overrides.credentials.map(cred => (
|
||||
<Chip key={cred.id} isReadOnly>
|
||||
{cred.name}
|
||||
</Chip>
|
||||
<CredentialChip
|
||||
key={cred.id}
|
||||
credential={cred}
|
||||
isReadOnly
|
||||
/>
|
||||
))}
|
||||
</ChipGroup>
|
||||
}
|
||||
|
||||
@@ -24,12 +24,14 @@ function PromptJobTemplateDetail({ i18n, resource }) {
|
||||
job_type,
|
||||
limit,
|
||||
playbook,
|
||||
related,
|
||||
scm_branch,
|
||||
skip_tags,
|
||||
summary_fields,
|
||||
url,
|
||||
use_fact_cache,
|
||||
verbosity,
|
||||
webhook_key,
|
||||
webhook_service,
|
||||
} = resource;
|
||||
|
||||
const VERBOSITY = {
|
||||
@@ -114,23 +116,49 @@ function PromptJobTemplateDetail({ i18n, resource }) {
|
||||
value={diff_mode ? 'On' : 'Off'}
|
||||
/>
|
||||
<Detail label={i18n._(t` Job Slicing`)} value={job_slice_count} />
|
||||
{host_config_key && (
|
||||
<React.Fragment>
|
||||
<Detail label={i18n._(t`Host Config Key`)} value={host_config_key} />
|
||||
<Detail
|
||||
label={i18n._(t`Provisioning Callback URL`)}
|
||||
value={`${window.location.origin + url}callback/`}
|
||||
/>
|
||||
</React.Fragment>
|
||||
<Detail label={i18n._(t`Host Config Key`)} value={host_config_key} />
|
||||
{related?.callback && (
|
||||
<Detail
|
||||
label={i18n._(t`Provisioning Callback URL`)}
|
||||
value={`${window.location.origin}${related.callback}`}
|
||||
/>
|
||||
)}
|
||||
<Detail
|
||||
label={i18n._(t`Webhook Service`)}
|
||||
value={toTitleCase(webhook_service)}
|
||||
/>
|
||||
{related.webhook_receiver && (
|
||||
<Detail
|
||||
label={i18n._(t`Webhook URL`)}
|
||||
value={`${window.location.origin}${related.webhook_receiver}`}
|
||||
/>
|
||||
)}
|
||||
<Detail label={i18n._(t`Webhook Key`)} value={webhook_key} />
|
||||
{summary_fields?.webhook_credential && (
|
||||
<Detail
|
||||
fullWidth
|
||||
label={i18n._(t`Webhook Credential`)}
|
||||
value={
|
||||
<CredentialChip
|
||||
key={summary_fields.webhook_credential?.id}
|
||||
credential={summary_fields.webhook_credential}
|
||||
isReadOnly
|
||||
/>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
{optionsList && <Detail label={i18n._(t`Options`)} value={optionsList} />}
|
||||
{summary_fields?.credentials?.length > 0 && (
|
||||
<Detail
|
||||
fullWidth
|
||||
label={i18n._(t`Credentials`)}
|
||||
value={summary_fields.credentials.map(chip => (
|
||||
<CredentialChip key={chip.id} credential={chip} isReadOnly />
|
||||
))}
|
||||
value={
|
||||
<ChipGroup numChips={5}>
|
||||
{summary_fields.credentials.map(cred => (
|
||||
<CredentialChip key={cred.id} credential={cred} isReadOnly />
|
||||
))}
|
||||
</ChipGroup>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
{summary_fields?.labels?.results?.length > 0 && (
|
||||
|
||||
@@ -5,6 +5,7 @@ import mockData from './data.job_template.json';
|
||||
|
||||
const mockJT = {
|
||||
...mockData,
|
||||
webhook_key: 'PiM3n2',
|
||||
instance_groups: [
|
||||
{
|
||||
id: 1,
|
||||
@@ -49,9 +50,24 @@ describe('PromptJobTemplateDetail', () => {
|
||||
assertDetail('Show Changes', 'Off');
|
||||
assertDetail('Job Slicing', '1');
|
||||
assertDetail('Host Config Key', 'a1b2c3');
|
||||
assertDetail('Webhook Service', 'Github');
|
||||
assertDetail('Webhook Key', 'PiM3n2');
|
||||
expect(wrapper.find('StatusIcon')).toHaveLength(2);
|
||||
expect(wrapper.find('Detail[label="Webhook URL"] dd').text()).toEqual(
|
||||
expect.stringContaining('/api/v2/job_templates/7/github/')
|
||||
);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Provisioning Callback URL"] dd').text()
|
||||
).toEqual(expect.stringContaining('/api/v2/job_templates/7/callback/'));
|
||||
expect(
|
||||
wrapper
|
||||
.find('Detail[label="Webhook Credential"]')
|
||||
.containsAllMatchingElements([
|
||||
<span>
|
||||
<strong>Github Token:</strong>GitHub Cred
|
||||
</span>,
|
||||
])
|
||||
).toEqual(true);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Credentials"]').containsAllMatchingElements([
|
||||
<span>
|
||||
|
||||
@@ -1,8 +1,120 @@
|
||||
import React from 'react';
|
||||
import { CardBody } from '@components/Card';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Link } from 'react-router-dom';
|
||||
|
||||
function PromptWFJobTemplateDetail() {
|
||||
return <CardBody>Coming soon :)</CardBody>;
|
||||
import { Chip, ChipGroup, List, ListItem } from '@patternfly/react-core';
|
||||
import CredentialChip from '@components/CredentialChip';
|
||||
import { Detail } from '@components/DetailList';
|
||||
import { VariablesDetail } from '@components/CodeMirrorInput';
|
||||
import Sparkline from '@components/Sparkline';
|
||||
import { toTitleCase } from '@util/strings';
|
||||
|
||||
function PromptWFJobTemplateDetail({ i18n, resource }) {
|
||||
const {
|
||||
allow_simultaneous,
|
||||
extra_vars,
|
||||
limit,
|
||||
related,
|
||||
scm_branch,
|
||||
summary_fields,
|
||||
webhook_key,
|
||||
webhook_service,
|
||||
} = resource;
|
||||
|
||||
let optionsList = '';
|
||||
if (allow_simultaneous || webhook_service) {
|
||||
optionsList = (
|
||||
<List>
|
||||
{allow_simultaneous && (
|
||||
<ListItem>{i18n._(t`Enable Concurrent Jobs`)}</ListItem>
|
||||
)}
|
||||
{webhook_service && <ListItem>{i18n._(t`Enable Webhooks`)}</ListItem>}
|
||||
</List>
|
||||
);
|
||||
}
|
||||
|
||||
const inventoryKind =
|
||||
summary_fields?.inventory?.kind === 'smart'
|
||||
? 'smart_inventory'
|
||||
: 'inventory';
|
||||
|
||||
const recentJobs = summary_fields.recent_jobs.map(job => ({
|
||||
...job,
|
||||
type: 'job',
|
||||
}));
|
||||
|
||||
return (
|
||||
<>
|
||||
{summary_fields.recent_jobs?.length > 0 && (
|
||||
<Detail
|
||||
value={<Sparkline jobs={recentJobs} />}
|
||||
label={i18n._(t`Activity`)}
|
||||
/>
|
||||
)}
|
||||
{summary_fields?.inventory && (
|
||||
<Detail
|
||||
label={i18n._(t`Inventory`)}
|
||||
value={
|
||||
<Link
|
||||
to={`/${inventoryKind}/${summary_fields.inventory?.id}/details`}
|
||||
>
|
||||
{summary_fields.inventory?.name}
|
||||
</Link>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
<Detail label={i18n._(t`Source Control Branch`)} value={scm_branch} />
|
||||
<Detail label={i18n._(t`Limit`)} value={limit} />
|
||||
<Detail
|
||||
label={i18n._(t`Webhook Service`)}
|
||||
value={toTitleCase(webhook_service)}
|
||||
/>
|
||||
<Detail label={i18n._(t`Webhook Key`)} value={webhook_key} />
|
||||
{related.webhook_receiver && (
|
||||
<Detail
|
||||
label={i18n._(t`Webhook URL`)}
|
||||
value={`${window.location.origin}${related.webhook_receiver}`}
|
||||
/>
|
||||
)}
|
||||
{optionsList && <Detail label={i18n._(t`Options`)} value={optionsList} />}
|
||||
{summary_fields?.webhook_credential && (
|
||||
<Detail
|
||||
fullWidth
|
||||
label={i18n._(t`Webhook Credential`)}
|
||||
value={
|
||||
<CredentialChip
|
||||
key={summary_fields.webhook_credential?.id}
|
||||
credential={summary_fields.webhook_credential}
|
||||
isReadOnly
|
||||
/>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
{summary_fields?.labels?.results?.length > 0 && (
|
||||
<Detail
|
||||
fullWidth
|
||||
label={i18n._(t`Labels`)}
|
||||
value={
|
||||
<ChipGroup numChips={5}>
|
||||
{summary_fields.labels.results.map(label => (
|
||||
<Chip key={label.id} isReadOnly>
|
||||
{label.name}
|
||||
</Chip>
|
||||
))}
|
||||
</ChipGroup>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
{extra_vars && (
|
||||
<VariablesDetail
|
||||
label={i18n._(t`Variables`)}
|
||||
rows={4}
|
||||
value={extra_vars}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default PromptWFJobTemplateDetail;
|
||||
export default withI18n()(PromptWFJobTemplateDetail);
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import React from 'react';
|
||||
import { mountWithContexts } from '@testUtils/enzymeHelpers';
|
||||
import PromptWFJobTemplateDetail from './PromptWFJobTemplateDetail';
|
||||
import mockData from './data.workflow_template.json';
|
||||
|
||||
const mockWF = {
|
||||
...mockData,
|
||||
webhook_key: 'Pim3mRXT0',
|
||||
};
|
||||
|
||||
describe('PromptWFJobTemplateDetail', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeAll(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<PromptWFJobTemplateDetail resource={mockWF} />
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
wrapper.unmount();
|
||||
});
|
||||
|
||||
test('should render successfully', () => {
|
||||
expect(wrapper.find('PromptWFJobTemplateDetail')).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should render expected details', () => {
|
||||
function assertDetail(label, value) {
|
||||
expect(wrapper.find(`Detail[label="${label}"] dt`).text()).toBe(label);
|
||||
expect(wrapper.find(`Detail[label="${label}"] dd`).text()).toBe(value);
|
||||
}
|
||||
|
||||
expect(wrapper.find('StatusIcon')).toHaveLength(1);
|
||||
assertDetail('Inventory', 'Mock Smart Inv');
|
||||
assertDetail('Source Control Branch', '/bar/');
|
||||
assertDetail('Limit', 'hosts1,hosts2');
|
||||
assertDetail('Webhook Service', 'Github');
|
||||
assertDetail('Webhook Key', 'Pim3mRXT0');
|
||||
expect(wrapper.find('Detail[label="Webhook URL"] dd').text()).toEqual(
|
||||
expect.stringContaining('/api/v2/workflow_job_templates/47/github/')
|
||||
);
|
||||
expect(
|
||||
wrapper
|
||||
.find('Detail[label="Options"]')
|
||||
.containsAllMatchingElements([
|
||||
<li>Enable Concurrent Jobs</li>,
|
||||
<li>Enable Webhooks</li>,
|
||||
])
|
||||
).toEqual(true);
|
||||
expect(
|
||||
wrapper
|
||||
.find('Detail[label="Webhook Credential"]')
|
||||
.containsAllMatchingElements([
|
||||
<span>
|
||||
<strong>Github Token:</strong>github
|
||||
</span>,
|
||||
])
|
||||
).toEqual(true);
|
||||
expect(
|
||||
wrapper
|
||||
.find('Detail[label="Labels"]')
|
||||
.containsAllMatchingElements([<span>L_10o0</span>, <span>L_20o0</span>])
|
||||
).toEqual(true);
|
||||
expect(wrapper.find('VariablesDetail').prop('value')).toEqual(
|
||||
'---\nmock: data'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -16,6 +16,8 @@
|
||||
"schedules": "/api/v2/job_templates/7/schedules/",
|
||||
"activity_stream": "/api/v2/job_templates/7/activity_stream/",
|
||||
"launch": "/api/v2/job_templates/7/launch/",
|
||||
"webhook_key": "/api/v2/job_templates/7/webhook_key/",
|
||||
"webhook_receiver": "/api/v2/job_templates/7/github/",
|
||||
"notification_templates_started": "/api/v2/job_templates/7/notification_templates_started/",
|
||||
"notification_templates_success": "/api/v2/job_templates/7/notification_templates_success/",
|
||||
"notification_templates_error": "/api/v2/job_templates/7/notification_templates_error/",
|
||||
@@ -24,7 +26,9 @@
|
||||
"object_roles": "/api/v2/job_templates/7/object_roles/",
|
||||
"instance_groups": "/api/v2/job_templates/7/instance_groups/",
|
||||
"slice_workflow_jobs": "/api/v2/job_templates/7/slice_workflow_jobs/",
|
||||
"copy": "/api/v2/job_templates/7/copy/"
|
||||
"copy": "/api/v2/job_templates/7/copy/",
|
||||
"callback": "/api/v2/job_templates/7/callback/",
|
||||
"webhook_credential": "/api/v2/credentials/8/"
|
||||
},
|
||||
"summary_fields": {
|
||||
"inventory": {
|
||||
@@ -64,6 +68,14 @@
|
||||
"status": "successful",
|
||||
"failed": false
|
||||
},
|
||||
"webhook_credential": {
|
||||
"id": 8,
|
||||
"name": "GitHub Cred",
|
||||
"description": "",
|
||||
"kind": "github_token",
|
||||
"cloud": false,
|
||||
"credential_type_id": 12
|
||||
},
|
||||
"created_by": {
|
||||
"id": 1,
|
||||
"username": "admin",
|
||||
@@ -123,6 +135,12 @@
|
||||
"status": "successful",
|
||||
"finished": "2019-10-01T14:34:35.142483Z",
|
||||
"type": "job"
|
||||
},
|
||||
{
|
||||
"id": 13,
|
||||
"status": "successful",
|
||||
"finished": "2019-10-01T14:34:35.142483Z",
|
||||
"type": "job"
|
||||
}
|
||||
],
|
||||
"extra_credentials": [],
|
||||
@@ -174,5 +192,7 @@
|
||||
"diff_mode": false,
|
||||
"allow_simultaneous": true,
|
||||
"custom_virtualenv": null,
|
||||
"job_slice_count": 1
|
||||
"job_slice_count": 1,
|
||||
"webhook_service": "github",
|
||||
"webhook_credential": 8
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
{
|
||||
"id": 47,
|
||||
"type": "workflow_job_template",
|
||||
"url": "/api/v2/workflow_job_templates/47/",
|
||||
"related": {
|
||||
"created_by": "/api/v2/users/8/",
|
||||
"modified_by": "/api/v2/users/1/",
|
||||
"last_job": "/api/v2/workflow_jobs/226/",
|
||||
"workflow_jobs": "/api/v2/workflow_job_templates/47/workflow_jobs/",
|
||||
"schedules": "/api/v2/workflow_job_templates/47/schedules/",
|
||||
"launch": "/api/v2/workflow_job_templates/47/launch/",
|
||||
"webhook_key": "/api/v2/workflow_job_templates/47/webhook_key/",
|
||||
"webhook_receiver": "/api/v2/workflow_job_templates/47/github/",
|
||||
"workflow_nodes": "/api/v2/workflow_job_templates/47/workflow_nodes/",
|
||||
"labels": "/api/v2/workflow_job_templates/47/labels/",
|
||||
"activity_stream": "/api/v2/workflow_job_templates/47/activity_stream/",
|
||||
"notification_templates_started": "/api/v2/workflow_job_templates/47/notification_templates_started/",
|
||||
"notification_templates_success": "/api/v2/workflow_job_templates/47/notification_templates_success/",
|
||||
"notification_templates_error": "/api/v2/workflow_job_templates/47/notification_templates_error/",
|
||||
"notification_templates_approvals": "/api/v2/workflow_job_templates/47/notification_templates_approvals/",
|
||||
"access_list": "/api/v2/workflow_job_templates/47/access_list/",
|
||||
"object_roles": "/api/v2/workflow_job_templates/47/object_roles/",
|
||||
"survey_spec": "/api/v2/workflow_job_templates/47/survey_spec/",
|
||||
"copy": "/api/v2/workflow_job_templates/47/copy/",
|
||||
"organization": "/api/v2/organizations/3/",
|
||||
"webhook_credential": "/api/v2/credentials/8/"
|
||||
},
|
||||
"summary_fields": {
|
||||
"organization": {
|
||||
"id": 3,
|
||||
"name": "Mock Org",
|
||||
"description": ""
|
||||
},
|
||||
"inventory": {
|
||||
"id": 7,
|
||||
"name": "Mock Smart Inv",
|
||||
"description": "",
|
||||
"has_active_failures": false,
|
||||
"total_hosts": 1,
|
||||
"hosts_with_active_failures": 0,
|
||||
"total_groups": 0,
|
||||
"has_inventory_sources": false,
|
||||
"total_inventory_sources": 0,
|
||||
"inventory_sources_with_failures": 0,
|
||||
"organization_id": 1,
|
||||
"kind": "smart"
|
||||
},
|
||||
"last_job": {
|
||||
"id": 226,
|
||||
"name": "abc",
|
||||
"description": "From Tower bulk-data script",
|
||||
"finished": "2020-04-08T21:30:44.282245Z",
|
||||
"status": "failed",
|
||||
"failed": true
|
||||
},
|
||||
"last_update": {
|
||||
"id": 226,
|
||||
"name": "abc",
|
||||
"description": "From Tower bulk-data script",
|
||||
"status": "failed",
|
||||
"failed": true
|
||||
},
|
||||
"webhook_credential": {
|
||||
"id": 8,
|
||||
"name": "github",
|
||||
"description": "",
|
||||
"kind": "github_token",
|
||||
"cloud": false,
|
||||
"credential_type_id": 12
|
||||
},
|
||||
"created_by": {
|
||||
"id": 8,
|
||||
"username": "user-2",
|
||||
"first_name": "",
|
||||
"last_name": ""
|
||||
},
|
||||
"modified_by": {
|
||||
"id": 1,
|
||||
"username": "admin",
|
||||
"first_name": "",
|
||||
"last_name": ""
|
||||
},
|
||||
"object_roles": {
|
||||
"admin_role": {
|
||||
"description": "Can manage all aspects of the workflow job template",
|
||||
"name": "Admin",
|
||||
"id": 260
|
||||
},
|
||||
"execute_role": {
|
||||
"description": "May run the workflow job template",
|
||||
"name": "Execute",
|
||||
"id": 261
|
||||
},
|
||||
"read_role": {
|
||||
"description": "May view settings for the workflow job template",
|
||||
"name": "Read",
|
||||
"id": 262
|
||||
},
|
||||
"approval_role": {
|
||||
"description": "Can approve or deny a workflow approval node",
|
||||
"name": "Approve",
|
||||
"id": 263
|
||||
}
|
||||
},
|
||||
"user_capabilities": {
|
||||
"edit": true,
|
||||
"delete": true,
|
||||
"start": true,
|
||||
"schedule": true,
|
||||
"copy": true
|
||||
},
|
||||
"labels": {
|
||||
"count": 2,
|
||||
"results": [
|
||||
{
|
||||
"id": 104,
|
||||
"name": "L_10o0"
|
||||
},
|
||||
{
|
||||
"id": 105,
|
||||
"name": "L_20o0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"recent_jobs": [
|
||||
{
|
||||
"id": 226,
|
||||
"status": "failed",
|
||||
"finished": "2020-04-08T21:30:44.282245Z",
|
||||
"canceled_on": null,
|
||||
"type": "workflow_job"
|
||||
}
|
||||
]
|
||||
},
|
||||
"created": "2020-04-07T16:38:02.856877Z",
|
||||
"modified": "2020-04-13T20:53:53.761355Z",
|
||||
"name": "Mock Workflow",
|
||||
"description": "Mock WF Description",
|
||||
"last_job_run": "2020-04-08T21:30:44.282245Z",
|
||||
"last_job_failed": true,
|
||||
"next_job_run": null,
|
||||
"status": "failed",
|
||||
"extra_vars": "---\nmock: data",
|
||||
"organization": 3,
|
||||
"survey_enabled": false,
|
||||
"allow_simultaneous": true,
|
||||
"ask_variables_on_launch": false,
|
||||
"inventory": 7,
|
||||
"limit": "hosts1,hosts2",
|
||||
"scm_branch": "/bar/",
|
||||
"ask_inventory_on_launch": true,
|
||||
"ask_scm_branch_on_launch": true,
|
||||
"ask_limit_on_launch": true,
|
||||
"webhook_service": "github",
|
||||
"webhook_credential": 8
|
||||
}
|
||||
@@ -27,7 +27,6 @@ describe('StatusIcon', () => {
|
||||
});
|
||||
test('renders a successful status when host status is "ok"', () => {
|
||||
const wrapper = mount(<StatusIcon status="ok" />);
|
||||
wrapper.debug();
|
||||
expect(wrapper).toHaveLength(1);
|
||||
expect(wrapper.find('StatusIcon__SuccessfulTop')).toHaveLength(1);
|
||||
expect(wrapper.find('StatusIcon__SuccessfulBottom')).toHaveLength(1);
|
||||
|
||||
@@ -3,7 +3,7 @@ import { useHistory } from 'react-router-dom';
|
||||
import { Card, PageSection } from '@patternfly/react-core';
|
||||
import { CardBody } from '@components/Card';
|
||||
import JobTemplateForm from '../shared/JobTemplateForm';
|
||||
import { JobTemplatesAPI } from '@api';
|
||||
import { JobTemplatesAPI, OrganizationsAPI } from '@api';
|
||||
|
||||
function JobTemplateAdd() {
|
||||
const [formSubmitError, setFormSubmitError] = useState(null);
|
||||
@@ -15,11 +15,13 @@ function JobTemplateAdd() {
|
||||
instanceGroups,
|
||||
initialInstanceGroups,
|
||||
credentials,
|
||||
webhook_credential,
|
||||
...remainingValues
|
||||
} = values;
|
||||
|
||||
setFormSubmitError(null);
|
||||
remainingValues.project = remainingValues.project.id;
|
||||
remainingValues.webhook_credential = webhook_credential?.id;
|
||||
try {
|
||||
const {
|
||||
data: { id, type },
|
||||
@@ -36,6 +38,16 @@ function JobTemplateAdd() {
|
||||
}
|
||||
|
||||
async function submitLabels(templateId, labels = [], orgId) {
|
||||
if (!orgId) {
|
||||
try {
|
||||
const {
|
||||
data: { results },
|
||||
} = await OrganizationsAPI.read();
|
||||
orgId = results[0].id;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const associationPromises = labels.map(label =>
|
||||
JobTemplatesAPI.associateLabel(templateId, label, orgId)
|
||||
);
|
||||
|
||||
@@ -152,6 +152,10 @@ describe('<JobTemplateAdd />', () => {
|
||||
project: 2,
|
||||
playbook: 'Baz',
|
||||
inventory: 2,
|
||||
webhook_credential: undefined,
|
||||
webhook_key: '',
|
||||
webhook_service: '',
|
||||
webhook_url: '',
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -100,11 +100,13 @@ class JobTemplateEdit extends Component {
|
||||
instanceGroups,
|
||||
initialInstanceGroups,
|
||||
credentials,
|
||||
webhook_credential,
|
||||
...remainingValues
|
||||
} = values;
|
||||
|
||||
this.setState({ formSubmitError: null });
|
||||
remainingValues.project = values.project.id;
|
||||
remainingValues.webhook_credential = webhook_credential?.id || null;
|
||||
try {
|
||||
await JobTemplatesAPI.update(template.id, remainingValues);
|
||||
await Promise.all([
|
||||
|
||||
@@ -62,6 +62,12 @@ const mockJobTemplate = {
|
||||
type: 'job_template',
|
||||
use_fact_cache: false,
|
||||
verbosity: '0',
|
||||
webhook_credential: null,
|
||||
webhook_key: 'webhook Key',
|
||||
webhook_service: 'gitlab',
|
||||
related: {
|
||||
webhook_receiver: '/api/v2/workflow_job_templates/57/gitlab/',
|
||||
},
|
||||
};
|
||||
|
||||
const mockRelatedCredentials = {
|
||||
@@ -245,6 +251,8 @@ describe('<JobTemplateEdit />', () => {
|
||||
delete expected.summary_fields;
|
||||
delete expected.id;
|
||||
delete expected.type;
|
||||
delete expected.related;
|
||||
expected.webhook_url = `${window.location.origin}${mockJobTemplate.related.webhook_receiver}`;
|
||||
expect(JobTemplatesAPI.update).toHaveBeenCalledWith(1, expected);
|
||||
expect(JobTemplatesAPI.disassociateLabel).toHaveBeenCalledTimes(2);
|
||||
expect(JobTemplatesAPI.associateLabel).toHaveBeenCalledTimes(4);
|
||||
@@ -308,6 +316,12 @@ describe('<JobTemplateEdit />', () => {
|
||||
{ id: 1, kind: 'cloud', name: 'Foo' },
|
||||
{ id: 2, kind: 'ssh', name: 'Bar' },
|
||||
],
|
||||
webhook_credential: {
|
||||
id: 7,
|
||||
name: 'webhook credential',
|
||||
kind: 'github_token',
|
||||
credential_type_id: 12,
|
||||
},
|
||||
},
|
||||
};
|
||||
await act(async () =>
|
||||
|
||||
@@ -199,6 +199,7 @@ function SurveyQuestionForm({
|
||||
t`Each answer choice must be on a separate line.`
|
||||
)}
|
||||
isRequired
|
||||
rows="10"
|
||||
/>
|
||||
<FormField
|
||||
id="question-default"
|
||||
|
||||
@@ -45,6 +45,12 @@ function Template({ i18n, me, setBreadcrumb }) {
|
||||
role_level: 'notification_admin_role',
|
||||
}),
|
||||
]);
|
||||
if (data.webhook_service && data?.related?.webhook_key) {
|
||||
const {
|
||||
data: { webhook_key },
|
||||
} = await JobTemplatesAPI.readWebhookKey(templateId);
|
||||
data.webhook_key = webhook_key;
|
||||
}
|
||||
setBreadcrumb(data);
|
||||
|
||||
return {
|
||||
|
||||
@@ -72,6 +72,7 @@ function NodeViewModal({ i18n }) {
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
let { data } = await nodeAPI?.readDetail(unifiedJobTemplate.id);
|
||||
|
||||
if (data?.type === 'job_template') {
|
||||
const {
|
||||
data: { results = [] },
|
||||
@@ -79,6 +80,13 @@ function NodeViewModal({ i18n }) {
|
||||
data = Object.assign(data, { instance_groups: results });
|
||||
}
|
||||
|
||||
if (data?.related?.webhook_receiver) {
|
||||
const {
|
||||
data: { webhook_key },
|
||||
} = await nodeAPI?.readWebhookKey(data.id);
|
||||
data = Object.assign(data, { webhook_key });
|
||||
}
|
||||
|
||||
return data;
|
||||
}, [nodeAPI, unifiedJobTemplate.id]),
|
||||
null
|
||||
|
||||
@@ -11,9 +11,23 @@ import NodeViewModal from './NodeViewModal';
|
||||
jest.mock('@api/models/JobTemplates');
|
||||
jest.mock('@api/models/WorkflowJobTemplates');
|
||||
WorkflowJobTemplatesAPI.readLaunch.mockResolvedValue({});
|
||||
WorkflowJobTemplatesAPI.readDetail.mockResolvedValue({});
|
||||
WorkflowJobTemplatesAPI.readDetail.mockResolvedValue({
|
||||
data: {
|
||||
id: 1,
|
||||
type: 'workflow_job_template',
|
||||
related: {
|
||||
webhook_receiver: '/api/v2/job_templates/7/gitlab/',
|
||||
},
|
||||
},
|
||||
});
|
||||
WorkflowJobTemplatesAPI.readWebhookKey.mockResolvedValue({
|
||||
data: {
|
||||
webhook_key: 'Pim3mRXT0',
|
||||
},
|
||||
});
|
||||
JobTemplatesAPI.readLaunch.mockResolvedValue({});
|
||||
JobTemplatesAPI.readInstanceGroups.mockResolvedValue({});
|
||||
JobTemplatesAPI.readWebhookKey.mockResolvedValue({});
|
||||
JobTemplatesAPI.readDetail.mockResolvedValue({
|
||||
data: {
|
||||
id: 1,
|
||||
@@ -74,6 +88,7 @@ describe('NodeViewModal', () => {
|
||||
expect(JobTemplatesAPI.readDetail).not.toHaveBeenCalled();
|
||||
expect(JobTemplatesAPI.readInstanceGroups).not.toHaveBeenCalled();
|
||||
expect(WorkflowJobTemplatesAPI.readLaunch).toHaveBeenCalledWith(1);
|
||||
expect(WorkflowJobTemplatesAPI.readWebhookKey).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
test('Close button dispatches as expected', () => {
|
||||
@@ -125,6 +140,7 @@ describe('NodeViewModal', () => {
|
||||
});
|
||||
waitForLoaded(wrapper);
|
||||
expect(WorkflowJobTemplatesAPI.readLaunch).not.toHaveBeenCalled();
|
||||
expect(JobTemplatesAPI.readWebhookKey).not.toHaveBeenCalledWith();
|
||||
expect(JobTemplatesAPI.readLaunch).toHaveBeenCalledWith(1);
|
||||
expect(JobTemplatesAPI.readDetail).toHaveBeenCalledWith(1);
|
||||
expect(JobTemplatesAPI.readInstanceGroups).toHaveBeenCalledTimes(1);
|
||||
|
||||
@@ -40,6 +40,9 @@ import {
|
||||
import { JobTemplatesAPI, ProjectsAPI } from '@api';
|
||||
import LabelSelect from './LabelSelect';
|
||||
import PlaybookSelect from './PlaybookSelect';
|
||||
import WebhookSubForm from './WebhookSubForm';
|
||||
|
||||
const { origin } = document.location;
|
||||
|
||||
function JobTemplateForm({
|
||||
template,
|
||||
@@ -59,6 +62,10 @@ function JobTemplateForm({
|
||||
Boolean(template?.host_config_key)
|
||||
);
|
||||
|
||||
const [enableWebhooks, setEnableWebhooks] = useState(
|
||||
Boolean(template.webhook_service)
|
||||
);
|
||||
|
||||
const { values: formikValues } = useFormikContext();
|
||||
const [jobTypeField, jobTypeMeta, jobTypeHelpers] = useField({
|
||||
name: 'job_type',
|
||||
@@ -174,7 +181,6 @@ function JobTemplateForm({
|
||||
];
|
||||
let callbackUrl;
|
||||
if (template?.related) {
|
||||
const { origin } = document.location;
|
||||
const path = template.related.callback || `${template.url}callback`;
|
||||
callbackUrl = `${origin}${path}`;
|
||||
}
|
||||
@@ -498,6 +504,25 @@ function JobTemplateForm({
|
||||
setAllowCallbacks(checked);
|
||||
}}
|
||||
/>
|
||||
<Checkbox
|
||||
aria-label={i18n._(t`Enable Webhook`)}
|
||||
label={
|
||||
<span>
|
||||
{i18n._(t`Enable Webhook`)}
|
||||
|
||||
<FieldTooltip
|
||||
content={i18n._(
|
||||
t`Enable webhook for this workflow job template.`
|
||||
)}
|
||||
/>
|
||||
</span>
|
||||
}
|
||||
id="wfjt-enabled-webhooks"
|
||||
isChecked={enableWebhooks}
|
||||
onChange={checked => {
|
||||
setEnableWebhooks(checked);
|
||||
}}
|
||||
/>
|
||||
<CheckboxField
|
||||
id="option-concurrent"
|
||||
name="allow_simultaneous"
|
||||
@@ -516,6 +541,7 @@ function JobTemplateForm({
|
||||
</FormCheckboxLayout>
|
||||
</FormGroup>
|
||||
</FormFullWidthLayout>
|
||||
<WebhookSubForm enableWebhooks={enableWebhooks} />
|
||||
{allowCallbacks && (
|
||||
<>
|
||||
{callbackUrl && (
|
||||
@@ -572,7 +598,7 @@ JobTemplateForm.defaultProps = {
|
||||
};
|
||||
|
||||
const FormikApp = withFormik({
|
||||
mapPropsToValues({ template = {} }) {
|
||||
mapPropsToValues({ template = {}, i18n }) {
|
||||
const {
|
||||
summary_fields = {
|
||||
labels: { results: [] },
|
||||
@@ -616,6 +642,14 @@ const FormikApp = withFormik({
|
||||
instanceGroups: [],
|
||||
credentials: summary_fields.credentials || [],
|
||||
extra_vars: template.extra_vars || '---\n',
|
||||
webhook_service: template.webhook_service || '',
|
||||
webhook_url: template?.related?.webhook_receiver
|
||||
? `${origin}${template.related.webhook_receiver}`
|
||||
: i18n._(t`a new webhook url will be generated on save.`).toUpperCase(),
|
||||
webhook_key:
|
||||
template.webhook_key ||
|
||||
i18n._(t`a new webhook key will be generated on save.`).toUpperCase(),
|
||||
webhook_credential: template?.summary_fields?.webhook_credential || null,
|
||||
};
|
||||
},
|
||||
handleSubmit: async (values, { props, setErrors }) => {
|
||||
|
||||
@@ -2,6 +2,8 @@ import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { mountWithContexts, waitForElement } from '@testUtils/enzymeHelpers';
|
||||
import { sleep } from '@testUtils/testUtils';
|
||||
import { Route } from 'react-router-dom';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import JobTemplateForm from './JobTemplateForm';
|
||||
import { LabelsAPI, JobTemplatesAPI, ProjectsAPI, CredentialsAPI } from '@api';
|
||||
|
||||
@@ -34,6 +36,10 @@ describe('<JobTemplateForm />', () => {
|
||||
{ id: 2, kind: 'ssh', name: 'Bar' },
|
||||
],
|
||||
},
|
||||
related: { webhook_receiver: '/api/v2/workflow_job_templates/57/gitlab/' },
|
||||
webhook_key: 'webhook key',
|
||||
webhook_service: 'github',
|
||||
webhook_credential: 7,
|
||||
};
|
||||
const mockInstanceGroups = [
|
||||
{
|
||||
@@ -86,6 +92,9 @@ describe('<JobTemplateForm />', () => {
|
||||
JobTemplatesAPI.readInstanceGroups.mockReturnValue({
|
||||
data: { results: mockInstanceGroups },
|
||||
});
|
||||
JobTemplatesAPI.updateWebhookKey.mockReturnValue({
|
||||
data: { webhook_key: 'webhook key' },
|
||||
});
|
||||
ProjectsAPI.readPlaybooks.mockReturnValue({
|
||||
data: ['debug.yml'],
|
||||
});
|
||||
@@ -209,6 +218,123 @@ describe('<JobTemplateForm />', () => {
|
||||
]);
|
||||
});
|
||||
|
||||
test('webhooks and enable concurrent jobs functions properly', async () => {
|
||||
let wrapper;
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/templates/job_template/1/edit'],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route
|
||||
path="/templates/job_template/:id/edit"
|
||||
component={() => (
|
||||
<JobTemplateForm
|
||||
template={mockData}
|
||||
handleSubmit={jest.fn()}
|
||||
handleCancel={jest.fn()}
|
||||
/>
|
||||
)}
|
||||
/>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
match: { params: { id: 1 } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
act(() => {
|
||||
wrapper.find('Checkbox[aria-label="Enable Webhook"]').invoke('onChange')(
|
||||
true,
|
||||
{
|
||||
currentTarget: { value: true, type: 'change', checked: true },
|
||||
}
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper.find('Checkbox[aria-label="Enable Webhook"]').prop('isChecked')
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
wrapper.find('input[aria-label="wfjt-webhook-key"]').prop('readOnly')
|
||||
).toBe(true);
|
||||
expect(
|
||||
wrapper.find('input[aria-label="wfjt-webhook-key"]').prop('value')
|
||||
).toBe('webhook key');
|
||||
await act(() =>
|
||||
wrapper.find('Button[aria-label="Update webhook key"]').prop('onClick')()
|
||||
);
|
||||
expect(JobTemplatesAPI.updateWebhookKey).toBeCalledWith('1');
|
||||
expect(
|
||||
wrapper.find('TextInputBase[aria-label="Webhook URL"]').prop('value')
|
||||
).toContain('/api/v2/workflow_job_templates/57/gitlab/');
|
||||
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('FormGroup[name="webhook_service"]').length).toBe(1);
|
||||
|
||||
await act(async () =>
|
||||
wrapper.find('AnsibleSelect#webhook_service').prop('onChange')(
|
||||
{},
|
||||
'gitlab'
|
||||
)
|
||||
);
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('AnsibleSelect#webhook_service').prop('value')).toBe(
|
||||
'gitlab'
|
||||
);
|
||||
});
|
||||
|
||||
test('webhooks should render properly, without data', async () => {
|
||||
let wrapper;
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/templates/job_template/1/edit'],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route
|
||||
path="/templates/job_template/:id/edit"
|
||||
component={() => (
|
||||
<JobTemplateForm
|
||||
template={{
|
||||
...mockData,
|
||||
webhook_credential: null,
|
||||
webhook_key: '',
|
||||
webhook_service: 'github',
|
||||
related: { webhook_receiver: '' },
|
||||
}}
|
||||
handleSubmit={jest.fn()}
|
||||
handleCancel={jest.fn()}
|
||||
/>
|
||||
)}
|
||||
/>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
match: { params: { id: 1 } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
expect(
|
||||
wrapper.find('TextInputBase#template-webhook_key').prop('value')
|
||||
).toBe('A NEW WEBHOOK KEY WILL BE GENERATED ON SAVE.');
|
||||
expect(
|
||||
wrapper.find('Button[aria-label="Update webhook key"]').prop('isDisabled')
|
||||
).toBe(true);
|
||||
});
|
||||
test('should call handleSubmit when Submit button is clicked', async () => {
|
||||
const handleSubmit = jest.fn();
|
||||
let wrapper;
|
||||
|
||||
232
awx/ui_next/src/screens/Template/shared/WebhookSubForm.jsx
Normal file
232
awx/ui_next/src/screens/Template/shared/WebhookSubForm.jsx
Normal file
@@ -0,0 +1,232 @@
|
||||
import React, { useEffect, useCallback } from 'react';
|
||||
import { SyncAltIcon } from '@patternfly/react-icons';
|
||||
import { useParams, useLocation } from 'react-router-dom';
|
||||
import { t } from '@lingui/macro';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import {
|
||||
FormGroup,
|
||||
TextInput,
|
||||
InputGroup,
|
||||
Button,
|
||||
} from '@patternfly/react-core';
|
||||
import ContentError from '@components/ContentError';
|
||||
import ContentLoading from '@components/ContentLoading';
|
||||
import useRequest from '@util/useRequest';
|
||||
import { useField } from 'formik';
|
||||
import { FormColumnLayout } from '@components/FormLayout';
|
||||
import { CredentialLookup } from '@components/Lookup';
|
||||
import AnsibleSelect from '@components/AnsibleSelect';
|
||||
import { FieldTooltip } from '@components/FormField';
|
||||
import { JobTemplatesAPI, CredentialTypesAPI } from '@api';
|
||||
|
||||
function WebhookSubForm({ i18n, enableWebhooks }) {
|
||||
const { id, templateType } = useParams();
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const { origin } = document.location;
|
||||
|
||||
const [
|
||||
webhookServiceField,
|
||||
webhookServiceMeta,
|
||||
webhookServiceHelpers,
|
||||
] = useField('webhook_service');
|
||||
|
||||
const [webhookUrlField, webhookUrlMeta, webhookUrlHelpers] = useField(
|
||||
'webhook_url'
|
||||
);
|
||||
const [webhookKeyField, webhookKeyMeta, webhookKeyHelpers] = useField(
|
||||
'webhook_key'
|
||||
);
|
||||
const [
|
||||
webhookCredentialField,
|
||||
webhookCredentialMeta,
|
||||
webhookCredentialHelpers,
|
||||
] = useField('webhook_credential');
|
||||
|
||||
const {
|
||||
request: loadCredentialType,
|
||||
error,
|
||||
isLoading,
|
||||
result: credTypeId,
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
let results;
|
||||
if (webhookServiceField.value) {
|
||||
results = await CredentialTypesAPI.read({
|
||||
namespace: `${webhookServiceField.value}_token`,
|
||||
});
|
||||
// TODO: Consider how to handle the situation where the results returns
|
||||
// and empty array, or any of the other values is undefined or null (data, results, id)
|
||||
}
|
||||
return results?.data?.results[0]?.id;
|
||||
}, [webhookServiceField.value])
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
loadCredentialType();
|
||||
}, [loadCredentialType]);
|
||||
|
||||
useEffect(() => {
|
||||
if (enableWebhooks) {
|
||||
webhookServiceHelpers.setValue(webhookServiceMeta.initialValue);
|
||||
webhookUrlHelpers.setValue(webhookUrlMeta.initialValue);
|
||||
webhookKeyHelpers.setValue(webhookKeyMeta.initialValue);
|
||||
webhookCredentialHelpers.setValue(webhookCredentialMeta.initialValue);
|
||||
} else {
|
||||
webhookServiceHelpers.setValue('');
|
||||
webhookUrlHelpers.setValue('');
|
||||
webhookKeyHelpers.setValue('');
|
||||
webhookCredentialHelpers.setValue(null);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [enableWebhooks]);
|
||||
|
||||
const { request: fetchWebhookKey, error: webhookKeyError } = useRequest(
|
||||
useCallback(async () => {
|
||||
const {
|
||||
data: { webhook_key: key },
|
||||
} = await JobTemplatesAPI.updateWebhookKey(id);
|
||||
webhookKeyHelpers.setValue(key);
|
||||
}, [webhookKeyHelpers, id])
|
||||
);
|
||||
|
||||
const changeWebhookKey = async () => {
|
||||
await fetchWebhookKey();
|
||||
};
|
||||
const isUpdateKeyDisabled =
|
||||
pathname.endsWith('/add') ||
|
||||
webhookKeyMeta.initialValue ===
|
||||
'A NEW WEBHOOK KEY WILL BE GENERATED ON SAVE.';
|
||||
const webhookServiceOptions = [
|
||||
{
|
||||
value: '',
|
||||
key: '',
|
||||
label: i18n._(t`Choose a Webhook Service`),
|
||||
isDisabled: true,
|
||||
},
|
||||
{
|
||||
value: 'github',
|
||||
key: 'github',
|
||||
label: i18n._(t`GitHub`),
|
||||
isDisabled: false,
|
||||
},
|
||||
{
|
||||
value: 'gitlab',
|
||||
key: 'gitlab',
|
||||
label: i18n._(t`GitLab`),
|
||||
isDisabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
if (error || webhookKeyError) {
|
||||
return <ContentError error={error} />;
|
||||
}
|
||||
if (isLoading) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
return (
|
||||
enableWebhooks && (
|
||||
<FormColumnLayout>
|
||||
<FormGroup
|
||||
name="webhook_service"
|
||||
fieldId="webhook_service"
|
||||
helperTextInvalid={webhookServiceMeta.error}
|
||||
label={i18n._(t`Webhook Service`)}
|
||||
>
|
||||
<FieldTooltip content={i18n._(t`Select a webhook service.`)} />
|
||||
<AnsibleSelect
|
||||
{...webhookServiceField}
|
||||
id="webhook_service"
|
||||
data={webhookServiceOptions}
|
||||
onChange={(event, val) => {
|
||||
webhookServiceHelpers.setValue(val);
|
||||
webhookUrlHelpers.setValue(
|
||||
pathname.endsWith('/add')
|
||||
? i18n
|
||||
._(t`a new webhook url will be generated on save.`)
|
||||
.toUpperCase()
|
||||
: `${origin}/api/v2/${templateType}s/${id}/${val}/`
|
||||
);
|
||||
if (val === webhookServiceMeta.initialValue || val === '') {
|
||||
webhookKeyHelpers.setValue(webhookKeyMeta.initialValue);
|
||||
webhookCredentialHelpers.setValue(
|
||||
webhookCredentialMeta.initialValue
|
||||
);
|
||||
} else {
|
||||
webhookKeyHelpers.setValue(
|
||||
i18n
|
||||
._(t`a new webhook key will be generated on save.`)
|
||||
.toUpperCase()
|
||||
);
|
||||
webhookCredentialHelpers.setValue(null);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</FormGroup>
|
||||
<>
|
||||
<FormGroup
|
||||
type="text"
|
||||
fieldId="jt-webhookURL"
|
||||
label={i18n._(t`Webhook URL`)}
|
||||
name="webhook_url"
|
||||
>
|
||||
<FieldTooltip
|
||||
content={i18n._(
|
||||
t`Webhook services can launch jobs with this workflow job template by making a POST request to this URL.`
|
||||
)}
|
||||
/>
|
||||
<TextInput
|
||||
id="t-webhookURL"
|
||||
aria-label={i18n._(t`Webhook URL`)}
|
||||
value={webhookUrlField.value}
|
||||
isReadOnly
|
||||
/>
|
||||
</FormGroup>
|
||||
<FormGroup
|
||||
label={i18n._(t`Webhook Key`)}
|
||||
fieldId="template-webhook_key"
|
||||
>
|
||||
<FieldTooltip
|
||||
content={i18n._(
|
||||
t`Webhook services can use this as a shared secret.`
|
||||
)}
|
||||
/>
|
||||
<InputGroup>
|
||||
<TextInput
|
||||
id="template-webhook_key"
|
||||
isReadOnly
|
||||
aria-label="wfjt-webhook-key"
|
||||
value={webhookKeyField.value}
|
||||
/>
|
||||
<Button
|
||||
isDisabled={isUpdateKeyDisabled}
|
||||
variant="tertiary"
|
||||
aria-label={i18n._(t`Update webhook key`)}
|
||||
onClick={changeWebhookKey}
|
||||
>
|
||||
<SyncAltIcon />
|
||||
</Button>
|
||||
</InputGroup>
|
||||
</FormGroup>
|
||||
</>
|
||||
|
||||
{credTypeId && (
|
||||
<CredentialLookup
|
||||
label={i18n._(t`Webhook Credential`)}
|
||||
tooltip={i18n._(
|
||||
t`Optionally select the credential to use to send status updates back to the webhook service.`
|
||||
)}
|
||||
credentialTypeId={credTypeId}
|
||||
onChange={value => {
|
||||
webhookCredentialHelpers.setValue(value || null);
|
||||
}}
|
||||
isValid={!webhookCredentialMeta.error}
|
||||
helperTextInvalid={webhookCredentialMeta.error}
|
||||
value={webhookCredentialField.value}
|
||||
/>
|
||||
)}
|
||||
</FormColumnLayout>
|
||||
)
|
||||
);
|
||||
}
|
||||
export default withI18n()(WebhookSubForm);
|
||||
124
awx/ui_next/src/screens/Template/shared/WebhookSubForm.test.jsx
Normal file
124
awx/ui_next/src/screens/Template/shared/WebhookSubForm.test.jsx
Normal file
@@ -0,0 +1,124 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Route } from 'react-router-dom';
|
||||
import { createMemoryHistory } from 'history';
|
||||
|
||||
import { mountWithContexts, waitForElement } from '@testUtils/enzymeHelpers';
|
||||
import { CredentialsAPI } from '@api';
|
||||
import { Formik } from 'formik';
|
||||
|
||||
import WebhookSubForm from './WebhookSubForm';
|
||||
|
||||
jest.mock('@api');
|
||||
|
||||
describe('<WebhooksSubForm />', () => {
|
||||
let wrapper;
|
||||
let history;
|
||||
const initialValues = {
|
||||
webhook_url: '/api/v2/job_templates/51/github/',
|
||||
webhook_credential: { id: 1, name: 'Github credential' },
|
||||
webhook_service: 'github',
|
||||
webhook_key: 'webhook key',
|
||||
};
|
||||
beforeEach(async () => {
|
||||
history = createMemoryHistory({
|
||||
initialEntries: ['templates/job_template/51/edit'],
|
||||
});
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { results: [{ id: 12, name: 'Github credential' }] },
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="templates/:templateType/:id/edit">
|
||||
<Formik initialValues={initialValues}>
|
||||
<WebhookSubForm enableWebhooks />
|
||||
</Formik>
|
||||
</Route>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: { pathname: 'templates/job_template/51/edit' },
|
||||
match: { params: { id: 51, templateType: 'job_template' } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
test('mounts properly', () => {
|
||||
expect(wrapper.length).toBe(1);
|
||||
});
|
||||
test('should render initial values properly', () => {
|
||||
waitForElement(wrapper, 'Lookup__ChipHolder', el => el.lenth > 0);
|
||||
expect(wrapper.find('AnsibleSelect').prop('value')).toBe('github');
|
||||
expect(
|
||||
wrapper.find('TextInputBase[aria-label="Webhook URL"]').prop('value')
|
||||
).toContain('/api/v2/job_templates/51/github/');
|
||||
expect(
|
||||
wrapper.find('TextInputBase[aria-label="wfjt-webhook-key"]').prop('value')
|
||||
).toBe('webhook key');
|
||||
expect(
|
||||
wrapper
|
||||
.find('Chip')
|
||||
.find('span')
|
||||
.text()
|
||||
).toBe('Github credential');
|
||||
});
|
||||
test('should make other credential type available', async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { results: [{ id: 13, name: 'GitLab credential' }] },
|
||||
});
|
||||
await act(async () =>
|
||||
wrapper.find('AnsibleSelect').prop('onChange')({}, 'gitlab')
|
||||
);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
namespace: 'gitlab_token',
|
||||
});
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper.find('TextInputBase[aria-label="Webhook URL"]').prop('value')
|
||||
).toContain('/api/v2/job_templates/51/gitlab/');
|
||||
expect(
|
||||
wrapper.find('TextInputBase[aria-label="wfjt-webhook-key"]').prop('value')
|
||||
).toBe('A NEW WEBHOOK KEY WILL BE GENERATED ON SAVE.');
|
||||
});
|
||||
test('should have disabled button to update webhook key', async () => {
|
||||
let newWrapper;
|
||||
await act(async () => {
|
||||
newWrapper = mountWithContexts(
|
||||
<Route path="templates/:templateType/:id/edit">
|
||||
<Formik
|
||||
initialValues={{
|
||||
...initialValues,
|
||||
webhook_key: 'A NEW WEBHOOK KEY WILL BE GENERATED ON SAVE.',
|
||||
}}
|
||||
>
|
||||
<WebhookSubForm enableWebhooks />
|
||||
</Formik>
|
||||
</Route>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: { pathname: 'templates/job_template/51/edit' },
|
||||
match: { params: { id: 51, templateType: 'job_template' } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
expect(
|
||||
newWrapper
|
||||
.find("Button[aria-label='Update webhook key']")
|
||||
.prop('isDisabled')
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -1,121 +1,96 @@
|
||||
import React, { Component } from 'react';
|
||||
import React, { useEffect, useCallback } from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Switch, Route, withRouter, Redirect, Link } from 'react-router-dom';
|
||||
import {
|
||||
Switch,
|
||||
Route,
|
||||
Redirect,
|
||||
Link,
|
||||
useRouteMatch,
|
||||
useLocation,
|
||||
} from 'react-router-dom';
|
||||
import useRequest from '@util/useRequest';
|
||||
import { UsersAPI } from '@api';
|
||||
import { Card, CardActions, PageSection } from '@patternfly/react-core';
|
||||
import { TabbedCardHeader } from '@components/Card';
|
||||
import CardCloseButton from '@components/CardCloseButton';
|
||||
import RoutedTabs from '@components/RoutedTabs';
|
||||
import ContentError from '@components/ContentError';
|
||||
import ContentLoading from '@components/ContentLoading';
|
||||
import RoutedTabs from '@components/RoutedTabs';
|
||||
import UserDetail from './UserDetail';
|
||||
import UserEdit from './UserEdit';
|
||||
import UserOrganizations from './UserOrganizations';
|
||||
import UserTeams from './UserTeams';
|
||||
import UserTokens from './UserTokens';
|
||||
import { UsersAPI } from '@api';
|
||||
|
||||
class User extends Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
function User({ i18n, setBreadcrumb }) {
|
||||
const location = useLocation();
|
||||
const match = useRouteMatch('/users/:id');
|
||||
const userListUrl = `/users`;
|
||||
const {
|
||||
result: user,
|
||||
error: contentError,
|
||||
isLoading,
|
||||
request: fetchUser,
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
const { data } = await UsersAPI.readDetail(match.params.id);
|
||||
return data;
|
||||
}, [match.params.id]),
|
||||
null
|
||||
);
|
||||
|
||||
this.state = {
|
||||
user: null,
|
||||
hasContentLoading: true,
|
||||
contentError: null,
|
||||
isInitialized: false,
|
||||
};
|
||||
this.loadUser = this.loadUser.bind(this);
|
||||
}
|
||||
useEffect(() => {
|
||||
fetchUser();
|
||||
}, [fetchUser, location.pathname]);
|
||||
|
||||
async componentDidMount() {
|
||||
await this.loadUser();
|
||||
this.setState({ isInitialized: true });
|
||||
}
|
||||
|
||||
async componentDidUpdate(prevProps) {
|
||||
const { location, match } = this.props;
|
||||
const url = `/users/${match.params.id}/`;
|
||||
|
||||
if (
|
||||
prevProps.location.pathname.startsWith(url) &&
|
||||
prevProps.location !== location &&
|
||||
location.pathname === `${url}details`
|
||||
) {
|
||||
await this.loadUser();
|
||||
useEffect(() => {
|
||||
if (user) {
|
||||
setBreadcrumb(user);
|
||||
}
|
||||
}
|
||||
}, [user, setBreadcrumb]);
|
||||
|
||||
async loadUser() {
|
||||
const { match, setBreadcrumb } = this.props;
|
||||
const id = parseInt(match.params.id, 10);
|
||||
|
||||
this.setState({ contentError: null, hasContentLoading: true });
|
||||
try {
|
||||
const { data } = await UsersAPI.readDetail(id);
|
||||
setBreadcrumb(data);
|
||||
this.setState({ user: data });
|
||||
} catch (err) {
|
||||
this.setState({ contentError: err });
|
||||
} finally {
|
||||
this.setState({ hasContentLoading: false });
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
const { location, match, i18n } = this.props;
|
||||
|
||||
const { user, contentError, hasContentLoading, isInitialized } = this.state;
|
||||
|
||||
const tabsArray = [
|
||||
{ name: i18n._(t`Details`), link: `${match.url}/details`, id: 0 },
|
||||
{
|
||||
name: i18n._(t`Organizations`),
|
||||
link: `${match.url}/organizations`,
|
||||
id: 1,
|
||||
},
|
||||
{ name: i18n._(t`Teams`), link: `${match.url}/teams`, id: 2 },
|
||||
{ name: i18n._(t`Access`), link: `${match.url}/access`, id: 3 },
|
||||
{ name: i18n._(t`Tokens`), link: `${match.url}/tokens`, id: 4 },
|
||||
];
|
||||
|
||||
let cardHeader = (
|
||||
<TabbedCardHeader>
|
||||
<RoutedTabs tabsArray={tabsArray} />
|
||||
<CardActions>
|
||||
<CardCloseButton linkTo="/users" />
|
||||
</CardActions>
|
||||
</TabbedCardHeader>
|
||||
);
|
||||
|
||||
if (!isInitialized) {
|
||||
cardHeader = null;
|
||||
}
|
||||
|
||||
if (location.pathname.endsWith('edit')) {
|
||||
cardHeader = null;
|
||||
}
|
||||
|
||||
if (!hasContentLoading && contentError) {
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
<ContentError error={contentError}>
|
||||
{contentError.response.status === 404 && (
|
||||
<span>
|
||||
{i18n._(`User not found.`)}{' '}
|
||||
<Link to="/users">{i18n._(`View all Users.`)}</Link>
|
||||
</span>
|
||||
)}
|
||||
</ContentError>
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
const tabsArray = [
|
||||
{ name: i18n._(t`Details`), link: `${match.url}/details`, id: 0 },
|
||||
{
|
||||
name: i18n._(t`Organizations`),
|
||||
link: `${match.url}/organizations`,
|
||||
id: 1,
|
||||
},
|
||||
{ name: i18n._(t`Teams`), link: `${match.url}/teams`, id: 2 },
|
||||
{ name: i18n._(t`Access`), link: `${match.url}/access`, id: 3 },
|
||||
{ name: i18n._(t`Tokens`), link: `${match.url}/tokens`, id: 4 },
|
||||
];
|
||||
|
||||
if (contentError) {
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
{cardHeader}
|
||||
<ContentError error={contentError}>
|
||||
{contentError.response && contentError.response.status === 404 && (
|
||||
<span>
|
||||
{i18n._(`User not found.`)}{' '}
|
||||
<Link to={userListUrl}>{i18n._(`View all Users.`)}</Link>
|
||||
</span>
|
||||
)}
|
||||
</ContentError>
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
{['edit'].some(name => location.pathname.includes(name)) ? null : (
|
||||
<TabbedCardHeader>
|
||||
<RoutedTabs tabsArray={tabsArray} />
|
||||
<CardActions>
|
||||
<CardCloseButton linkTo={userListUrl} />
|
||||
</CardActions>
|
||||
</TabbedCardHeader>
|
||||
)}
|
||||
{isLoading && <ContentLoading />}
|
||||
{!isLoading && user && (
|
||||
<Switch>
|
||||
<Redirect from="/users/:id" to="/users/:id/details" exact />
|
||||
{user && (
|
||||
@@ -146,22 +121,19 @@ class User extends Component {
|
||||
<UserTokens id={Number(match.params.id)} />
|
||||
</Route>
|
||||
<Route key="not-found" path="*">
|
||||
{!hasContentLoading && (
|
||||
<ContentError isNotFound>
|
||||
{match.params.id && (
|
||||
<Link to={`/users/${match.params.id}/details`}>
|
||||
{i18n._(`View User Details`)}
|
||||
</Link>
|
||||
)}
|
||||
</ContentError>
|
||||
)}
|
||||
<ContentError isNotFound>
|
||||
{match.params.id && (
|
||||
<Link to={`/users/${match.params.id}/details`}>
|
||||
{i18n._(`View User Details`)}
|
||||
</Link>
|
||||
)}
|
||||
</ContentError>
|
||||
</Route>
|
||||
</Switch>
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
)}
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(withRouter(User));
|
||||
export { User as _User };
|
||||
export default withI18n()(User);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { UsersAPI } from '@api';
|
||||
import { mountWithContexts, waitForElement } from '@testUtils/enzymeHelpers';
|
||||
@@ -7,11 +8,6 @@ import User from './User';
|
||||
|
||||
jest.mock('@api');
|
||||
|
||||
const mockMe = {
|
||||
is_super_user: true,
|
||||
is_system_auditor: false,
|
||||
};
|
||||
|
||||
async function getUsers() {
|
||||
return {
|
||||
count: 1,
|
||||
@@ -24,29 +20,78 @@ async function getUsers() {
|
||||
}
|
||||
|
||||
describe('<User />', () => {
|
||||
test('initially renders succesfully', () => {
|
||||
test('initially renders successfully', async () => {
|
||||
UsersAPI.readDetail.mockResolvedValue({ data: mockDetails });
|
||||
UsersAPI.read.mockImplementation(getUsers);
|
||||
mountWithContexts(<User setBreadcrumb={() => {}} me={mockMe} />);
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/users/1'],
|
||||
});
|
||||
await act(async () => {
|
||||
mountWithContexts(<User setBreadcrumb={() => {}} />, {
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
match: {
|
||||
params: { id: 1 },
|
||||
url: '/users/1',
|
||||
path: '/users/1',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('notifications tab shown for admins', async () => {
|
||||
test('tabs shown for users', async () => {
|
||||
UsersAPI.readDetail.mockResolvedValue({ data: mockDetails });
|
||||
UsersAPI.read.mockImplementation(getUsers);
|
||||
|
||||
const wrapper = mountWithContexts(
|
||||
<User setBreadcrumb={() => {}} me={mockMe} />
|
||||
);
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/users/1'],
|
||||
});
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<User setBreadcrumb={() => {}} />, {
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
match: {
|
||||
params: { id: 1 },
|
||||
url: '/users/1',
|
||||
path: '/users/1',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
await waitForElement(wrapper, '.pf-c-tabs__item', el => el.length === 5);
|
||||
|
||||
/* eslint-disable react/button-has-type */
|
||||
expect(
|
||||
wrapper
|
||||
.find('Tabs')
|
||||
.containsAllMatchingElements([
|
||||
<button aria-label="Details">Details</button>,
|
||||
<button aria-label="Organizations">Organizations</button>,
|
||||
<button aria-label="Teams">Teams</button>,
|
||||
<button aria-label="Access">Access</button>,
|
||||
<button aria-label="Tokens">Tokens</button>,
|
||||
])
|
||||
).toEqual(true);
|
||||
});
|
||||
|
||||
test('should show content error when user attempts to navigate to erroneous route', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/users/1/foobar'],
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<User setBreadcrumb={() => {}} me={mockMe} />,
|
||||
{
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<User setBreadcrumb={() => {}} />, {
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
@@ -60,8 +105,8 @@ describe('<User />', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
await waitForElement(wrapper, 'ContentError', el => el.length === 1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -212,7 +212,7 @@ class UsersList extends Component {
|
||||
<UserListItem
|
||||
key={o.id}
|
||||
user={o}
|
||||
detailUrl={`${match.url}/${o.id}`}
|
||||
detailUrl={`${match.url}/${o.id}/details`}
|
||||
isSelected={selected.some(row => row.id === o.id)}
|
||||
onSelect={() => this.handleSelect(o)}
|
||||
/>
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import React, { Fragment, useState } from 'react';
|
||||
import React, { Fragment, useState, useCallback } from 'react';
|
||||
import { Route, useRouteMatch, Switch } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import { Config } from '@contexts/Config';
|
||||
import Breadcrumbs from '@components/Breadcrumbs/Breadcrumbs';
|
||||
|
||||
import UsersList from './UserList/UserList';
|
||||
@@ -17,24 +16,26 @@ function Users({ i18n }) {
|
||||
});
|
||||
const match = useRouteMatch();
|
||||
|
||||
const addUserBreadcrumb = user => {
|
||||
if (!user) {
|
||||
return;
|
||||
}
|
||||
|
||||
setBreadcrumbConfig({
|
||||
'/users': i18n._(t`Users`),
|
||||
'/users/add': i18n._(t`Create New User`),
|
||||
[`/users/${user.id}`]: `${user.username}`,
|
||||
[`/users/${user.id}/edit`]: i18n._(t`Edit Details`),
|
||||
[`/users/${user.id}/details`]: i18n._(t`Details`),
|
||||
[`/users/${user.id}/access`]: i18n._(t`Access`),
|
||||
[`/users/${user.id}/teams`]: i18n._(t`Teams`),
|
||||
[`/users/${user.id}/organizations`]: i18n._(t`Organizations`),
|
||||
[`/users/${user.id}/tokens`]: i18n._(t`Tokens`),
|
||||
});
|
||||
};
|
||||
const addUserBreadcrumb = useCallback(
|
||||
user => {
|
||||
if (!user) {
|
||||
return;
|
||||
}
|
||||
|
||||
setBreadcrumbConfig({
|
||||
'/users': i18n._(t`Users`),
|
||||
'/users/add': i18n._(t`Create New User`),
|
||||
[`/users/${user.id}`]: `${user.username}`,
|
||||
[`/users/${user.id}/edit`]: i18n._(t`Edit Details`),
|
||||
[`/users/${user.id}/details`]: i18n._(t`Details`),
|
||||
[`/users/${user.id}/access`]: i18n._(t`Access`),
|
||||
[`/users/${user.id}/teams`]: i18n._(t`Teams`),
|
||||
[`/users/${user.id}/organizations`]: i18n._(t`Organizations`),
|
||||
[`/users/${user.id}/tokens`]: i18n._(t`Tokens`),
|
||||
});
|
||||
},
|
||||
[i18n]
|
||||
);
|
||||
return (
|
||||
<Fragment>
|
||||
<Breadcrumbs breadcrumbConfig={breadcrumbConfig} />
|
||||
@@ -43,11 +44,7 @@ function Users({ i18n }) {
|
||||
<UserAdd />
|
||||
</Route>
|
||||
<Route path={`${match.path}/:id`}>
|
||||
<Config>
|
||||
{({ me }) => (
|
||||
<User setBreadcrumb={addUserBreadcrumb} me={me || {}} />
|
||||
)}
|
||||
</Config>
|
||||
<User setBreadcrumb={addUserBreadcrumb} />
|
||||
</Route>
|
||||
<Route path={`${match.path}`}>
|
||||
<UsersList />
|
||||
|
||||
@@ -6,7 +6,7 @@ import { mountWithContexts } from '@testUtils/enzymeHelpers';
|
||||
import Users from './Users';
|
||||
|
||||
describe('<Users />', () => {
|
||||
test('initially renders succesfully', () => {
|
||||
test('initially renders successfully', () => {
|
||||
mountWithContexts(<Users />);
|
||||
});
|
||||
|
||||
|
||||
@@ -25,6 +25,15 @@ export function maxLength(max, i18n) {
|
||||
};
|
||||
}
|
||||
|
||||
export function minLength(min, i18n) {
|
||||
return value => {
|
||||
if (value.trim().length < min) {
|
||||
return i18n._(t`This field must be at least ${min} characters`);
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
}
|
||||
|
||||
export function minMaxValue(min, max, i18n) {
|
||||
return value => {
|
||||
if (value < min || value > max) {
|
||||
@@ -57,10 +66,21 @@ export function noWhiteSpace(i18n) {
|
||||
};
|
||||
}
|
||||
|
||||
export function integer(i18n) {
|
||||
return value => {
|
||||
const str = String(value);
|
||||
if (/[^0-9]/.test(str)) {
|
||||
return i18n._(t`This field must be an integer`);
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
}
|
||||
|
||||
export function combine(validators) {
|
||||
return value => {
|
||||
for (let i = 0; i < validators.length; i++) {
|
||||
const error = validators[i](value);
|
||||
const validate = validators[i];
|
||||
const error = validate ? validate(value) : null;
|
||||
if (error) {
|
||||
return error;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import { required, maxLength, noWhiteSpace, combine } from './validators';
|
||||
import {
|
||||
required,
|
||||
minLength,
|
||||
maxLength,
|
||||
noWhiteSpace,
|
||||
integer,
|
||||
combine,
|
||||
} from './validators';
|
||||
|
||||
const i18n = { _: val => val };
|
||||
|
||||
@@ -52,6 +59,21 @@ describe('validators', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test('minLength accepts value above min', () => {
|
||||
expect(minLength(3, i18n)('snazzy')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('minLength accepts value equal to min', () => {
|
||||
expect(minLength(10, i18n)('abracadbra')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('minLength rejects value below min', () => {
|
||||
expect(minLength(12, i18n)('abracadbra')).toEqual({
|
||||
id: 'This field must be at least {min} characters',
|
||||
values: { min: 12 },
|
||||
});
|
||||
});
|
||||
|
||||
test('noWhiteSpace returns error', () => {
|
||||
expect(noWhiteSpace(i18n)('this has spaces')).toEqual({
|
||||
id: 'This field must not contain spaces',
|
||||
@@ -68,6 +90,26 @@ describe('validators', () => {
|
||||
expect(noWhiteSpace(i18n)('this_has_no_whitespace')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('integer should accept integer (number)', () => {
|
||||
expect(integer(i18n)(13)).toBeUndefined();
|
||||
});
|
||||
|
||||
test('integer should accept integer (string)', () => {
|
||||
expect(integer(i18n)('13')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('integer should reject decimal/float', () => {
|
||||
expect(integer(i18n)(13.1)).toEqual({
|
||||
id: 'This field must be an integer',
|
||||
});
|
||||
});
|
||||
|
||||
test('integer should reject string containing alphanum', () => {
|
||||
expect(integer(i18n)('15a')).toEqual({
|
||||
id: 'This field must be an integer',
|
||||
});
|
||||
});
|
||||
|
||||
test('combine should run all validators', () => {
|
||||
const validators = [required(null, i18n), noWhiteSpace(i18n)];
|
||||
expect(combine(validators)('')).toEqual({
|
||||
@@ -78,4 +120,12 @@ describe('validators', () => {
|
||||
});
|
||||
expect(combine(validators)('ok')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('combine should skip null validators', () => {
|
||||
const validators = [required(null, i18n), null];
|
||||
expect(combine(validators)('')).toEqual({
|
||||
id: 'This field must not be blank',
|
||||
});
|
||||
expect(combine(validators)('ok')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -20,11 +20,12 @@ Installing the `tar.gz` involves no special instructions.
|
||||
|
||||
## Running
|
||||
|
||||
Non-deprecated modules in this collection have no python requirements, but
|
||||
Non-deprecated modules in this collection have no Python requirements, but
|
||||
may require the official [AWX CLI](https://docs.ansible.com/ansible-tower/latest/html/towercli/index.html)
|
||||
in the future. The `DOCUMENTATION` for each module will report this.
|
||||
|
||||
You can specify authentication by a combination of either:
|
||||
|
||||
- host, username, password
|
||||
- host, OAuth2 token
|
||||
|
||||
@@ -33,6 +34,7 @@ AWX CLI [login](https://docs.ansible.com/ansible-tower/latest/html/towercli/refe
|
||||
command.
|
||||
|
||||
These can be specified via:
|
||||
|
||||
- environment variables (most useful when running against localhost)
|
||||
- direct module parameters
|
||||
- a config file path specified by the `tower_config_file` parameter
|
||||
@@ -51,22 +53,28 @@ oauth_token = LEdCpKVKc4znzffcpQL5vLG8oyeku6
|
||||
## Release and Upgrade Notes
|
||||
|
||||
Notable releases of the `awx.awx` collection:
|
||||
- 7.0.0 is intended to be identical to the content prior to the migration, aside from changes necessary to function as a collection
|
||||
- 11.0.0 has no non-deprecated modules that depend on the deprecated `tower-cli` [PyPI](https://pypi.org/project/ansible-tower-cli/)
|
||||
|
||||
- 7.0.0 is intended to be identical to the content prior to the migration, aside from changes necessary to function as a collection.
|
||||
- 11.0.0 has no non-deprecated modules that depend on the deprecated `tower-cli` [PyPI](https://pypi.org/project/ansible-tower-cli/).
|
||||
|
||||
The following notes are changes that may require changes to playbooks:
|
||||
|
||||
- When a project is created, it will wait for the update/sync to finish by default; this can be turned off with the `wait` parameter, if desired.
|
||||
- Creating a "scan" type job template is no longer supported.
|
||||
- Specifying a custom certificate via the `TOWER_CERTIFICATE` environment variable no longer works.
|
||||
- Type changes of variable fields
|
||||
- `extra_vars` in the `tower_job_launch` module worked with a `list` previously, but now only works with a `dict` type.
|
||||
- `extra_vars` in the `tower_workflow_job_template` module worked with a `string` previously but now expects a `dict`.
|
||||
- When the `extra_vars` parameter is used with the `tower_job_launch` module, the launch will fail unless `ask_extra_vars` or `survey_enabled` is explicitly set to `True` on the Job Template.
|
||||
- The `variables` parameter in the `tower_group`, `tower_host` and `tower_inventory` modules now expects a `dict` type and no longer supports the use of `@` syntax for a file.
|
||||
- Type changes of other types of fields
|
||||
- `inputs` or `injectors` in the `tower_credential_type` module worked with a string previously but now expects a `dict`.
|
||||
- `schema` in the `tower_workflow_job_template` module worked with a `string` previously but not expects a `list` of `dict`s.
|
||||
- Type changes of variable fields:
|
||||
|
||||
- `extra_vars` in the `tower_job_launch` module worked with a `list` previously, but now only works with a `dict` type
|
||||
- `extra_vars` in the `tower_workflow_job_template` module worked with a `string` previously but now expects a `dict`
|
||||
- When the `extra_vars` parameter is used with the `tower_job_launch` module, the launch will fail unless `ask_extra_vars` or `survey_enabled` is explicitly set to `True` on the Job Template
|
||||
- The `variables` parameter in the `tower_group`, `tower_host` and `tower_inventory` modules now expects a `dict` type and no longer supports the use of `@` syntax for a file
|
||||
|
||||
|
||||
- Type changes of other types of fields:
|
||||
|
||||
- `inputs` or `injectors` in the `tower_credential_type` module worked with a string previously but now expects a `dict`
|
||||
- `schema` in the `tower_workflow_job_template` module worked with a `string` previously but not expects a `list` of `dict`s
|
||||
|
||||
- `tower_group` used to also service inventory sources, but this functionality has been removed from this module; use `tower_inventory_source` instead.
|
||||
- Specified `tower_config` file used to handle `k=v` pairs on a single line; this is no longer supported. Please use a file formatted as `yaml`, `json` or `ini` only.
|
||||
- Some return values (e.g., `credential_type`) have been removed. Use of `id` is recommended.
|
||||
|
||||
15
awx_collection/meta/routing.yml
Normal file
15
awx_collection/meta/routing.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
plugin_routing:
|
||||
modules:
|
||||
tower_receive:
|
||||
deprecation:
|
||||
removal_date: TBD
|
||||
warning_text: see plugin documentation for details
|
||||
tower_send:
|
||||
deprecation:
|
||||
removal_date: TBD
|
||||
warning_text: see plugin documentation for details
|
||||
tower_workflow_template:
|
||||
deprecation:
|
||||
removal_date: TBD
|
||||
warning_text: see plugin documentation for details
|
||||
@@ -275,6 +275,21 @@ options:
|
||||
- If value not set, will try environment variable C(TOWER_OAUTH_TOKEN) and then config files
|
||||
type: str
|
||||
version_added: "3.7"
|
||||
notification_templates_started:
|
||||
description:
|
||||
- list of notifications to send on start
|
||||
type: list
|
||||
elements: str
|
||||
notification_templates_success:
|
||||
description:
|
||||
- list of notifications to send on success
|
||||
type: list
|
||||
elements: str
|
||||
notification_templates_error:
|
||||
description:
|
||||
- list of notifications to send on error
|
||||
type: list
|
||||
elements: str
|
||||
|
||||
extends_documentation_fragment: awx.awx.auth
|
||||
|
||||
@@ -365,6 +380,9 @@ def main():
|
||||
webhook_service=dict(choices=['github', 'gitlab']),
|
||||
webhook_credential=dict(),
|
||||
labels=dict(type="list", elements='str'),
|
||||
notification_templates_started=dict(type="list", elements='str'),
|
||||
notification_templates_success=dict(type="list", elements='str'),
|
||||
notification_templates_error=dict(type="list", elements='str'),
|
||||
state=dict(choices=['present', 'absent'], default='present'),
|
||||
)
|
||||
|
||||
@@ -441,6 +459,24 @@ def main():
|
||||
for item in labels:
|
||||
labels_ids.append(module.resolve_name_to_id('labels', item))
|
||||
|
||||
notifications_start = module.params.get('notification_templates_started')
|
||||
notification_start_ids = []
|
||||
if notifications_start is not None:
|
||||
for item in notifications_start:
|
||||
notification_start_ids.append(module.resolve_name_to_id('notification_templates', item))
|
||||
|
||||
notifications_success = module.params.get('notification_templates_success')
|
||||
notification_success_ids = []
|
||||
if notifications_success is not None:
|
||||
for item in notifications_success:
|
||||
notification_success_ids.append(module.resolve_name_to_id('notification_templates', item))
|
||||
|
||||
notifications_error = module.params.get('notification_templates_error')
|
||||
notification_error_ids = []
|
||||
if notifications_error is not None:
|
||||
for item in notifications_error:
|
||||
notification_error_ids.append(module.resolve_name_to_id('notification_templates', item))
|
||||
|
||||
on_change = None
|
||||
new_spec = module.params.get('survey_spec')
|
||||
if new_spec is not None:
|
||||
@@ -465,6 +501,9 @@ def main():
|
||||
associations={
|
||||
'credentials': credentials_ids,
|
||||
'labels': labels_ids,
|
||||
'notification_templates_success': notification_success_ids,
|
||||
'notification_templates_started': notification_start_ids,
|
||||
'notification_templates_error': notification_error_ids
|
||||
},
|
||||
on_create=on_change, on_update=on_change,
|
||||
)
|
||||
|
||||
@@ -83,11 +83,13 @@ options:
|
||||
Only valid if scm_update_on_launch is to True, otherwise ignored.
|
||||
type: int
|
||||
default: 0
|
||||
scm_allow_override:
|
||||
allow_override:
|
||||
description:
|
||||
- Allow changing the SCM branch or revision in a job template that uses this project.
|
||||
type: bool
|
||||
version_added: "3.7"
|
||||
aliases:
|
||||
- scm_allow_override
|
||||
job_timeout:
|
||||
version_added: "2.8"
|
||||
description:
|
||||
@@ -188,7 +190,7 @@ def main():
|
||||
scm_delete_on_update=dict(type='bool', default=False),
|
||||
scm_update_on_launch=dict(type='bool', default=False),
|
||||
scm_update_cache_timeout=dict(type='int', default=0),
|
||||
scm_allow_override=dict(type='bool'),
|
||||
allow_override=dict(type='bool', aliases=['scm_allow_override']),
|
||||
job_timeout=dict(type='int', default=0),
|
||||
custom_virtualenv=dict(),
|
||||
organization=dict(required=True),
|
||||
@@ -214,7 +216,7 @@ def main():
|
||||
scm_delete_on_update = module.params.get('scm_delete_on_update')
|
||||
scm_update_on_launch = module.params.get('scm_update_on_launch')
|
||||
scm_update_cache_timeout = module.params.get('scm_update_cache_timeout')
|
||||
scm_allow_override = module.params.get('scm_allow_override')
|
||||
allow_override = module.params.get('allow_override')
|
||||
job_timeout = module.params.get('job_timeout')
|
||||
custom_virtualenv = module.params.get('custom_virtualenv')
|
||||
organization = module.params.get('organization')
|
||||
@@ -253,8 +255,8 @@ def main():
|
||||
project_fields['description'] = description
|
||||
if scm_credential is not None:
|
||||
project_fields['credential'] = scm_credential_id
|
||||
if scm_allow_override is not None:
|
||||
project_fields['scm_allow_override'] = scm_allow_override
|
||||
if allow_override is not None:
|
||||
project_fields['allow_override'] = allow_override
|
||||
if scm_type == '':
|
||||
project_fields['local_path'] = local_path
|
||||
|
||||
|
||||
@@ -9,13 +9,17 @@ __metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'status': ['deprecated'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: tower_receive
|
||||
deprecated:
|
||||
removed_in: "3.7"
|
||||
why: Deprecated in favor of upcoming C(_export) module.
|
||||
alternative: Once published, use M(tower_export) instead.
|
||||
author: "John Westcott IV (@john-westcott-iv)"
|
||||
version_added: "2.8"
|
||||
short_description: Receive assets from Ansible Tower.
|
||||
|
||||
@@ -9,13 +9,17 @@ __metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'status': ['deprecated'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: tower_send
|
||||
deprecated:
|
||||
removed_in: "3.7"
|
||||
why: Deprecated in favor of upcoming C(_import) module.
|
||||
alternative: Once published, use M(tower_import) instead.
|
||||
author: "John Westcott IV (@john-westcott-iv)"
|
||||
version_added: "2.8"
|
||||
short_description: Send assets to Ansible Tower.
|
||||
|
||||
@@ -8,7 +8,7 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
ANSIBLE_METADATA = {'status': ['deprecated'],
|
||||
'supported_by': 'community',
|
||||
'metadata_version': '1.1'}
|
||||
|
||||
@@ -16,6 +16,10 @@ ANSIBLE_METADATA = {'status': ['preview'],
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: tower_workflow_template
|
||||
deprecated:
|
||||
removed_in: "3.7"
|
||||
why: Deprecated in favor of C(_workflow_job_template) and C(_workflow_job_template_node) modules.
|
||||
alternative: Use M(tower_workflow_job_template) and M(_workflow_job_template_node) instead.
|
||||
author: "Adrien Fleury (@fleu42)"
|
||||
version_added: "2.7"
|
||||
short_description: create, update, or destroy Ansible Tower workflow template.
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
jt1: "AWX-Collection-tests-tower_job_template-jt1-{{ test_id }}"
|
||||
jt2: "AWX-Collection-tests-tower_job_template-jt2-{{ test_id }}"
|
||||
lab1: "AWX-Collection-tests-tower_job_template-lab1-{{ test_id }}"
|
||||
email_not: "AWX-Collection-tests-tower_job_template-email-not-{{ test_id }}"
|
||||
|
||||
- name: Create a Demo Project
|
||||
tower_project:
|
||||
@@ -46,6 +47,22 @@
|
||||
name: "{{ lab1 }}"
|
||||
organization: Default
|
||||
|
||||
- name: Add email notification
|
||||
tower_notification:
|
||||
name: "{{ email_not }}"
|
||||
organization: Default
|
||||
notification_type: email
|
||||
username: user
|
||||
password: s3cr3t
|
||||
sender: tower@example.com
|
||||
recipients:
|
||||
- user1@example.com
|
||||
host: smtp.example.com
|
||||
port: 25
|
||||
use_tls: false
|
||||
use_ssl: false
|
||||
state: present
|
||||
|
||||
- name: Create Job Template 1
|
||||
tower_job_template:
|
||||
name: "{{ jt1 }}"
|
||||
@@ -240,6 +257,37 @@
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Add started notification to Job Template 2
|
||||
tower_job_template:
|
||||
name: "{{ jt2 }}"
|
||||
notification_templates_started:
|
||||
- "{{ email_not }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Re Add started notification to Job Template 2
|
||||
tower_job_template:
|
||||
name: "{{ jt2 }}"
|
||||
notification_templates_started:
|
||||
- "{{ email_not }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is not changed"
|
||||
|
||||
- name: Remove started notification to Job Template 2
|
||||
tower_job_template:
|
||||
name: "{{ jt2 }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Delete Job Template 2
|
||||
tower_job_template:
|
||||
name: "{{ jt2 }}"
|
||||
@@ -286,3 +334,9 @@
|
||||
state: absent
|
||||
|
||||
# You can't delete a label directly so no cleanup needed
|
||||
|
||||
- name: Delete email notification
|
||||
tower_notification:
|
||||
name: "{{ email_not }}"
|
||||
organization: Default
|
||||
state: absent
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
set_fact:
|
||||
project_name1: "AWX-Collection-tests-tower_project-project1-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
project_name2: "AWX-Collection-tests-tower_project-project2-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
project_name3: "AWX-Collection-tests-tower_project-project3-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
jt1: "AWX-Collection-tests-tower_project-jt1-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
scm_cred_name: "AWX-Collection-tests-tower_project-scm-cred-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
org_name: "AWX-Collection-tests-tower_project-org-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
cred_name: "AWX-Collection-tests-tower_project-cred-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
@@ -118,7 +120,60 @@
|
||||
- "result.msg =='The credentials Non Existing Credential was not found on the Tower server' or
|
||||
result.msg =='Failed to update project, credential not found: Non Existing Credential'"
|
||||
|
||||
- name: Delete the test project
|
||||
- name: Create a git project without credentials without waiting
|
||||
tower_project:
|
||||
name: "{{ project_name3 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_branch: empty_branch
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
allow_override: true
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
- name: Create a job template that overrides the project scm_branch
|
||||
tower_job_template:
|
||||
name: "{{ jt1 }}"
|
||||
project: "{{ project_name3 }}"
|
||||
inventory: "Demo Inventory"
|
||||
scm_branch: master
|
||||
playbook: debug.yml
|
||||
|
||||
- name: Launch "{{ jt1 }}"
|
||||
tower_job_launch:
|
||||
job_template: "{{ jt1 }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
- name: "wait for job {{ result.id }}"
|
||||
tower_job_wait:
|
||||
job_id: "{{ result.id }}"
|
||||
register: job
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- job is successful
|
||||
|
||||
- name: Delete the test job_template
|
||||
tower_job_template:
|
||||
name: "{{ jt1 }}"
|
||||
project: "{{ project_name3 }}"
|
||||
inventory: "Demo Inventory"
|
||||
state: absent
|
||||
|
||||
- name: Delete the test project 3
|
||||
tower_project:
|
||||
name: "{{ project_name3 }}"
|
||||
organization: Default
|
||||
state: absent
|
||||
|
||||
- name: Delete the test project 2
|
||||
tower_project:
|
||||
name: "{{ project_name2 }}"
|
||||
organization: "{{ org_name }}"
|
||||
@@ -136,7 +191,7 @@
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
- name: Delete the other test project
|
||||
- name: Delete the test project 1
|
||||
tower_project:
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
---
|
||||
- name: Export all Tower assets
|
||||
tower_receive:
|
||||
all: true
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is successful"
|
||||
|
||||
- name: Extract names from output
|
||||
set_fact:
|
||||
object_names: "{{ result.assets | map(attribute='name') | list }}"
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is successful"
|
||||
- "'Default' in object_names"
|
||||
@@ -1,81 +0,0 @@
|
||||
---
|
||||
- name: Test no parameters
|
||||
tower_send:
|
||||
register: result
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is failed"
|
||||
|
||||
- name: Create user json
|
||||
set_fact:
|
||||
user:
|
||||
- username: "jowestco"
|
||||
first_name: "John"
|
||||
last_name: "Westcott"
|
||||
asset_type: "user"
|
||||
email: "john.westcott.iv@redhat.com"
|
||||
|
||||
- name: Test a new import of asset
|
||||
tower_send:
|
||||
assets: "{{ user | to_json() }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Test an existing import of asset
|
||||
tower_send:
|
||||
assets: "{{ user | to_json() }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is successful"
|
||||
- "result is not changed"
|
||||
|
||||
- name: Change an existing asset
|
||||
tower_send:
|
||||
assets: "{{ user | combine({'last_name': 'Westcott IV'}) | to_json() }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Ensure the organization is not created
|
||||
tower_organization:
|
||||
name: "Red Hat"
|
||||
state: absent
|
||||
|
||||
- name: Create organization json
|
||||
set_fact:
|
||||
organization:
|
||||
- asset_type: organization
|
||||
name: "Red Hat"
|
||||
|
||||
- name: Create temp file
|
||||
tempfile:
|
||||
state: file
|
||||
register: my_temp_file
|
||||
|
||||
- name: Drop down a file to import
|
||||
copy:
|
||||
dest: "{{ my_temp_file.path }}"
|
||||
content: "{{ organization | to_nice_json() }}"
|
||||
|
||||
- name: Create org via files
|
||||
tower_send:
|
||||
files: "{{ my_temp_file.path }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Remove Temp File
|
||||
file:
|
||||
path: "{{ my_temp_file.path }}"
|
||||
state: absent
|
||||
6
awx_collection/tests/sanity/ignore-2.10.txt
Normal file
6
awx_collection/tests/sanity/ignore-2.10.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
plugins/modules/tower_receive.py validate-modules:deprecation-mismatch
|
||||
plugins/modules/tower_receive.py validate-modules:invalid-documentation
|
||||
plugins/modules/tower_send.py validate-modules:deprecation-mismatch
|
||||
plugins/modules/tower_send.py validate-modules:invalid-documentation
|
||||
plugins/modules/tower_workflow_template.py validate-modules:deprecation-mismatch
|
||||
plugins/modules/tower_workflow_template.py validate-modules:invalid-documentation
|
||||
6
awx_collection/tests/sanity/ignore-2.9.txt
Normal file
6
awx_collection/tests/sanity/ignore-2.9.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
plugins/modules/tower_receive.py validate-modules:deprecation-mismatch
|
||||
plugins/modules/tower_receive.py validate-modules:invalid-documentation
|
||||
plugins/modules/tower_send.py validate-modules:deprecation-mismatch
|
||||
plugins/modules/tower_send.py validate-modules:invalid-documentation
|
||||
plugins/modules/tower_workflow_template.py validate-modules:deprecation-mismatch
|
||||
plugins/modules/tower_workflow_template.py validate-modules:invalid-documentation
|
||||
@@ -25,8 +25,9 @@ class WorkflowJobTemplate(HasCopy, HasCreate, HasNotifications, HasSurvey, Unifi
|
||||
# return job
|
||||
jobs_pg = self.related.workflow_jobs.get(id=result.workflow_job)
|
||||
if jobs_pg.count != 1:
|
||||
msg = "workflow_job_template launched (id:{}) but job not found in response at {}/workflow_jobs/" % \
|
||||
(result.json['workflow_job'], self.url)
|
||||
msg = "workflow_job_template launched (id:{}) but job not found in response at {}/workflow_jobs/".format(
|
||||
result.json['workflow_job'], self.url
|
||||
)
|
||||
raise exc.UnexpectedAWXState(msg)
|
||||
return jobs_pg.results[0]
|
||||
|
||||
|
||||
@@ -79,7 +79,7 @@ class URLRegistry(object):
|
||||
for url_pattern, method_pattern in url_iterable:
|
||||
if url_pattern in self.store and method_pattern in self.store[url_pattern]:
|
||||
if method_pattern.pattern == not_provided:
|
||||
exc_msg = '"{0.pattern}" already has methodless registration.'.format(url_pattern, method_pattern)
|
||||
exc_msg = '"{0.pattern}" already has methodless registration.'.format(url_pattern)
|
||||
else:
|
||||
exc_msg = ('"{0.pattern}" already has registered method "{1.pattern}"'
|
||||
.format(url_pattern, method_pattern))
|
||||
|
||||
@@ -14,7 +14,7 @@ hijack cookies will only get the `session_id` itself, which does not imply any c
|
||||
a limited time, and can be revoked at any time.
|
||||
|
||||
> Note: The CSRF token will by default allow HTTP. To increase security, the `CSRF_COOKIE_SECURE` setting should
|
||||
be set to False.
|
||||
be set to True.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -26,20 +26,8 @@ stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:awx-rsyslogd]
|
||||
command = rsyslogd -n -i /awx_devel/rsyslog.pid
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopwaitsecs = 1
|
||||
stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/stderr
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
[group:tower-processes]
|
||||
programs=dispatcher,callback-receiver,awx-rsyslogd
|
||||
programs=dispatcher,callback-receiver
|
||||
priority=5
|
||||
|
||||
# TODO: Exit Handler
|
||||
|
||||
@@ -7,6 +7,7 @@ ENV LC_ALL en_US.UTF-8
|
||||
USER root
|
||||
|
||||
ADD google-cloud-sdk.repo /etc/yum.repos.d/
|
||||
ADD rsyslog.repo /etc/yum.repos.d/rsyslog.repo
|
||||
|
||||
# sync with installer/roles/image_build/templates/Dockerfile.j2
|
||||
RUN dnf -y update && \
|
||||
@@ -49,6 +50,7 @@ RUN dnf -y update && \
|
||||
python3-setuptools \
|
||||
python3-pycurl \
|
||||
rsync \
|
||||
rsyslog-omhttp \
|
||||
subversion \
|
||||
sudo \
|
||||
swig \
|
||||
@@ -97,9 +99,6 @@ RUN cd /usr/local/bin && \
|
||||
curl -L https://github.com/openshift/origin/releases/download/v3.11.0/openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz | \
|
||||
tar -xz --strip-components=1 --wildcards --no-anchored 'oc'
|
||||
|
||||
ADD rsyslog.repo /etc/yum.repos.d/rsyslog.repo
|
||||
RUN yum install -y rsyslog-omhttp
|
||||
|
||||
# Pre-create things that we need to write to
|
||||
RUN for dir in /home/awx /var/run/supervisor /var/lib/awx /var/lib/awx/rsyslog /var/lib/awx/rsyslog/conf.d /var/run/awx-rsyslog /var/log/tower /var/log/nginx /var/lib/nginx; \
|
||||
do mkdir -p $dir; chmod -R g+rwx $dir; chgrp -R root $dir; done && \
|
||||
|
||||
@@ -264,10 +264,6 @@ spec:
|
||||
image: {{ kubernetes_redis_image }}:{{ kubernetes_redis_image_tag }}
|
||||
imagePullPolicy: Always
|
||||
args: ["redis-server", "{{ kubernetes_redis_config_mount_path }}"]
|
||||
ports:
|
||||
- name: redis
|
||||
protocol: TCP
|
||||
containerPort: 6379
|
||||
volumeMounts:
|
||||
- name: {{ kubernetes_deployment_name }}-redis-config
|
||||
mountPath: "{{ kubernetes_redis_config_mount_path }}"
|
||||
|
||||
@@ -22,8 +22,9 @@ exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory,awx/ui,awx/api/url
|
||||
|
||||
[testenv:linters]
|
||||
deps =
|
||||
make
|
||||
flake8
|
||||
yamllint
|
||||
commands =
|
||||
flake8
|
||||
make flake8
|
||||
yamllint -s .
|
||||
|
||||
@@ -58,7 +58,6 @@ services:
|
||||
privileged: true
|
||||
image: ${DEV_DOCKER_TAG_BASE}/awx_devel:${TAG}
|
||||
hostname: awx-3
|
||||
entrypoint: ["bash"]
|
||||
command: launch_awx.sh
|
||||
working_dir: "/awx_devel"
|
||||
environment:
|
||||
|
||||
@@ -8,6 +8,7 @@ ENV LC_ALL en_US.UTF-8
|
||||
|
||||
ADD tools/docker-compose/ansible_nightly.repo /etc/yum.repos.d/ansible_nightly.repo
|
||||
ADD tools/docker-compose/google-cloud-sdk.repo /etc/yum.repos.d/
|
||||
ADD tools/docker-compose/rsyslog.repo /etc/yum.repos.d/
|
||||
|
||||
# sync with installer/roles/image_build/templates/Dockerfile.j2
|
||||
RUN dnf -y update && \
|
||||
@@ -50,6 +51,7 @@ RUN dnf -y update && \
|
||||
python3-setuptools \
|
||||
python3-pycurl \
|
||||
rsync \
|
||||
rsyslog-omhttp \
|
||||
subversion \
|
||||
sudo \
|
||||
swig \
|
||||
@@ -120,6 +122,7 @@ ADD tools/docker-compose/launch_awx.sh /usr/bin/launch_awx.sh
|
||||
ADD tools/docker-compose/start_tests.sh /start_tests.sh
|
||||
ADD tools/docker-compose/bootstrap_development.sh /usr/bin/bootstrap_development.sh
|
||||
ADD tools/docker-compose/entrypoint.sh /
|
||||
ADD tools/docker-compose/rsyslog.conf /var/lib/awx/rsyslog/rsyslog.conf
|
||||
ADD tools/scripts/awx-python /usr/bin/awx-python
|
||||
|
||||
# Pre-create things that we need to write to
|
||||
@@ -129,13 +132,6 @@ RUN for dir in /var/lib/awx /var/lib/awx/rsyslog /var/lib/awx/rsyslog/conf.d /va
|
||||
for file in /etc/passwd /etc/supervisord.conf /venv/awx/lib/python3.6/site-packages/awx.egg-link /var/run/nginx.pid; \
|
||||
do touch $file; chmod -R g+rwx $file; chgrp -R root $file; done
|
||||
|
||||
|
||||
RUN chmod -R 0775 /var/lib/awx /var/lib/awx/rsyslog
|
||||
ADD tools/docker-compose/rsyslog.repo /etc/yum.repos.d/
|
||||
RUN yum install -y rsyslog-omhttp
|
||||
ADD tools/docker-compose/rsyslog.conf /var/lib/awx/rsyslog/rsyslog.conf
|
||||
RUN chmod 0775 /var/lib/awx/rsyslog/rsyslog.conf
|
||||
|
||||
ENV HOME /var/lib/awx
|
||||
ENV PATH="/usr/local/n/versions/node/10.15.0/bin:${PATH}"
|
||||
ENV PATH="/usr/pgsql-10/bin:${PATH}"
|
||||
|
||||
@@ -28,6 +28,8 @@ SOSREPORT_TOWER_DIRS = [
|
||||
"/var/log/tower",
|
||||
"/var/log/nginx",
|
||||
"/var/log/supervisor",
|
||||
"/etc/opt/rh/rh-redis5/redis.conf",
|
||||
"/etc/redis.conf",
|
||||
"/var/log/dist-upgrade",
|
||||
"/var/log/installer",
|
||||
"/var/log/unattended-upgrades",
|
||||
|
||||
Reference in New Issue
Block a user