Merge pull request #10 from ansible/devel

Rebase from devel
This commit is contained in:
Sean Sullivan 2020-09-09 07:49:37 -05:00 committed by GitHub
commit 470b7aaeea
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
105 changed files with 2859 additions and 1233 deletions

View File

@ -80,7 +80,7 @@ For Linux platforms, refer to the following from Docker:
If you're not using Docker for Mac, or Docker for Windows, you may need, or choose to, install the Docker compose Python module separately, in which case you'll need to run the following:
```bash
(host)$ pip install docker-compose
(host)$ pip3 install docker-compose
```
#### Frontend Development

View File

@ -273,7 +273,7 @@ def copy_tables(since, full_path, subset=None):
main_unifiedjob.organization_id,
main_organization.name as organization_name,
main_job.inventory_id,
main_inventory.name,
main_inventory.name as inventory_name,
main_unifiedjob.created,
main_unifiedjob.name,
main_unifiedjob.unified_job_template_id,

View File

@ -1,5 +1,3 @@
import collections
import functools
import json
import logging
import time
@ -14,40 +12,12 @@ from django.contrib.auth.models import User
from channels.generic.websocket import AsyncJsonWebsocketConsumer
from channels.layers import get_channel_layer
from channels.db import database_sync_to_async
from channels_redis.core import RedisChannelLayer
logger = logging.getLogger('awx.main.consumers')
XRF_KEY = '_auth_user_xrf'
class BoundedQueue(asyncio.Queue):
def put_nowait(self, item):
if self.full():
# dispose the oldest item
# if we actually get into this code block, it likely means that
# this specific consumer has stopped reading
# unfortunately, channels_redis will just happily continue to
# queue messages specific to their channel until the heat death
# of the sun: https://github.com/django/channels_redis/issues/212
# this isn't a huge deal for browser clients that disconnect,
# but it *does* cause a problem for our global broadcast topic
# that's used to broadcast messages to peers in a cluster
# if we get into this code block, it's better to drop messages
# than to continue to malloc() forever
self.get_nowait()
return super(BoundedQueue, self).put_nowait(item)
class ExpiringRedisChannelLayer(RedisChannelLayer):
def __init__(self, *args, **kw):
super(ExpiringRedisChannelLayer, self).__init__(*args, **kw)
self.receive_buffer = collections.defaultdict(
functools.partial(BoundedQueue, self.capacity)
)
class WebsocketSecretAuthHelper:
"""
Middlewareish for websockets to verify node websocket broadcast interconnect.

View File

@ -12,6 +12,7 @@ from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.utils.timezone import now, timedelta
import redis
from solo.models import SingletonModel
from awx import __version__ as awx_application_version
@ -152,6 +153,14 @@ class Instance(HasPolicyEditsMixin, BaseModel):
self.capacity = get_system_task_capacity(self.capacity_adjustment)
else:
self.capacity = 0
try:
# if redis is down for some reason, that means we can't persist
# playbook event data; we should consider this a zero capacity event
redis.Redis.from_url(settings.BROKER_URL).ping()
except redis.ConnectionError:
self.capacity = 0
self.cpu = cpu[0]
self.memory = mem[0]
self.cpu_capacity = cpu[1]

View File

@ -12,6 +12,7 @@ import random
from django.db import transaction, connection
from django.utils.translation import ugettext_lazy as _, gettext_noop
from django.utils.timezone import now as tz_now
from django.conf import settings
# AWX
from awx.main.dispatch.reaper import reap_job
@ -45,6 +46,12 @@ class TaskManager():
def __init__(self):
self.graph = dict()
# start task limit indicates how many pending jobs can be started on this
# .schedule() run. Starting jobs is expensive, and there is code in place to reap
# the task manager after 5 minutes. At scale, the task manager can easily take more than
# 5 minutes to start pending jobs. If this limit is reached, pending jobs
# will no longer be started and will be started on the next task manager cycle.
self.start_task_limit = settings.START_TASK_LIMIT
for rampart_group in InstanceGroup.objects.prefetch_related('instances'):
self.graph[rampart_group.name] = dict(graph=DependencyGraph(rampart_group.name),
capacity_total=rampart_group.capacity,
@ -189,6 +196,10 @@ class TaskManager():
return result
def start_task(self, task, rampart_group, dependent_tasks=None, instance=None):
self.start_task_limit -= 1
if self.start_task_limit == 0:
# schedule another run immediately after this task manager
schedule_task_manager()
from awx.main.tasks import handle_work_error, handle_work_success
dependent_tasks = dependent_tasks or []
@ -448,6 +459,8 @@ class TaskManager():
def process_pending_tasks(self, pending_tasks):
running_workflow_templates = set([wf.unified_job_template_id for wf in self.get_running_workflow_jobs()])
for task in pending_tasks:
if self.start_task_limit <= 0:
break
if self.is_job_blocked(task):
logger.debug("{} is blocked from running".format(task.log_format))
continue

View File

@ -1632,11 +1632,6 @@ class RunJob(BaseTask):
# callbacks to work.
env['JOB_ID'] = str(job.pk)
env['INVENTORY_ID'] = str(job.inventory.pk)
if job.use_fact_cache:
library_source = self.get_path_to('..', 'plugins', 'library')
library_dest = os.path.join(private_data_dir, 'library')
copy_tree(library_source, library_dest)
env['ANSIBLE_LIBRARY'] = library_dest
if job.project:
env['PROJECT_REVISION'] = job.project.scm_revision
env['ANSIBLE_RETRY_FILES_ENABLED'] = "False"

View File

@ -52,11 +52,11 @@ patterns
--------
`mk` functions are single object fixtures. They should create only a single object with the minimum deps.
They should also accept a `persited` flag, if they must be persisted to work, they raise an error if persisted=False
They should also accept a `persisted` flag, if they must be persisted to work, they raise an error if persisted=False
`generate` and `apply` functions are helpers that build up the various parts of a `create` functions objects. These
should be useful for more than one create function to use and should explicitly accept all of the values needed
to execute. These functions should also be robust and have very speciifc error reporting about constraints and/or
to execute. These functions should also be robust and have very specific error reporting about constraints and/or
bad values.
`create` functions compose many of the `mk` and `generate` functions to make different object

View File

@ -1,3 +1,4 @@
import redis
import pytest
from unittest import mock
import json
@ -25,7 +26,8 @@ def test_orphan_unified_job_creation(instance, inventory):
@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000,62))
def test_job_capacity_and_with_inactive_node():
i = Instance.objects.create(hostname='test-1')
i.refresh_capacity()
with mock.patch.object(redis.client.Redis, 'ping', lambda self: True):
i.refresh_capacity()
assert i.capacity == 62
i.enabled = False
i.save()
@ -35,6 +37,19 @@ def test_job_capacity_and_with_inactive_node():
assert i.capacity == 0
@pytest.mark.django_db
@mock.patch('awx.main.utils.common.get_cpu_capacity', lambda: (2,8))
@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000,62))
def test_job_capacity_with_redis_disabled():
i = Instance.objects.create(hostname='test-1')
def _raise(self):
raise redis.ConnectionError()
with mock.patch.object(redis.client.Redis, 'ping', _raise):
i.refresh_capacity()
assert i.capacity == 0
@pytest.mark.django_db
def test_job_type_name():
job = Job.objects.create()

View File

@ -1,36 +0,0 @@
---
- hosts: all
vars:
scan_use_checksum: false
scan_use_recursive: false
tasks:
- name: "Scan packages (Unix/Linux)"
scan_packages:
os_family: '{{ ansible_os_family }}'
when: ansible_os_family != "Windows"
- name: "Scan services (Unix/Linux)"
scan_services:
when: ansible_os_family != "Windows"
- name: "Scan files (Unix/Linux)"
scan_files:
paths: '{{ scan_file_paths }}'
get_checksum: '{{ scan_use_checksum }}'
recursive: '{{ scan_use_recursive }}'
when: scan_file_paths is defined and ansible_os_family != "Windows"
- name: "Scan Insights for Machine ID (Unix/Linux)"
scan_insights:
when: ansible_os_family != "Windows"
- name: "Scan packages (Windows)"
win_scan_packages:
when: ansible_os_family == "Windows"
- name: "Scan services (Windows)"
win_scan_services:
when: ansible_os_family == "Windows"
- name: "Scan files (Windows)"
win_scan_files:
paths: '{{ scan_file_paths }}'
get_checksum: '{{ scan_use_checksum }}'
recursive: '{{ scan_use_recursive }}'
when: scan_file_paths is defined and ansible_os_family == "Windows"

View File

@ -1,166 +0,0 @@
#!/usr/bin/env python
import os
import stat
from ansible.module_utils.basic import * # noqa
DOCUMENTATION = '''
---
module: scan_files
short_description: Return file state information as fact data for a directory tree
description:
- Return file state information recursively for a directory tree on the filesystem
version_added: "1.9"
options:
path:
description: The path containing files to be analyzed
required: true
default: null
recursive:
description: scan this directory and all subdirectories
required: false
default: no
get_checksum:
description: Checksum files that you can access
required: false
default: false
requirements: [ ]
author: Matthew Jones
'''
EXAMPLES = '''
# Example fact output:
# host | success >> {
# "ansible_facts": {
# "files": [
# {
# "atime": 1427313854.0755742,
# "checksum": "cf7566e6149ad9af91e7589e0ea096a08de9c1e5",
# "ctime": 1427129299.22948,
# "dev": 51713,
# "gid": 0,
# "inode": 149601,
# "isblk": false,
# "ischr": false,
# "isdir": false,
# "isfifo": false,
# "isgid": false,
# "islnk": false,
# "isreg": true,
# "issock": false,
# "isuid": false,
# "mode": "0644",
# "mtime": 1427112663.0321455,
# "nlink": 1,
# "path": "/var/log/dmesg.1.gz",
# "rgrp": true,
# "roth": true,
# "rusr": true,
# "size": 28,
# "uid": 0,
# "wgrp": false,
# "woth": false,
# "wusr": true,
# "xgrp": false,
# "xoth": false,
# "xusr": false
# },
# {
# "atime": 1427314385.1155744,
# "checksum": "16fac7be61a6e4591a33ef4b729c5c3302307523",
# "ctime": 1427384148.5755742,
# "dev": 51713,
# "gid": 43,
# "inode": 149564,
# "isblk": false,
# "ischr": false,
# "isdir": false,
# "isfifo": false,
# "isgid": false,
# "islnk": false,
# "isreg": true,
# "issock": false,
# "isuid": false,
# "mode": "0664",
# "mtime": 1427384148.5755742,
# "nlink": 1,
# "path": "/var/log/wtmp",
# "rgrp": true,
# "roth": true,
# "rusr": true,
# "size": 48768,
# "uid": 0,
# "wgrp": true,
# "woth": false,
# "wusr": true,
# "xgrp": false,
# "xoth": false,
# "xusr": false
# },
'''
def main():
module = AnsibleModule( # noqa
argument_spec = dict(paths=dict(required=True, type='list'),
recursive=dict(required=False, default='no', type='bool'),
get_checksum=dict(required=False, default='no', type='bool')))
files = []
paths = module.params.get('paths')
for path in paths:
path = os.path.expanduser(path)
if not os.path.exists(path) or not os.path.isdir(path):
module.fail_json(msg = "Given path must exist and be a directory")
get_checksum = module.params.get('get_checksum')
should_recurse = module.params.get('recursive')
if not should_recurse:
path_list = [os.path.join(path, subpath) for subpath in os.listdir(path)]
else:
path_list = [os.path.join(w_path, f) for w_path, w_names, w_file in os.walk(path) for f in w_file]
for filepath in path_list:
try:
st = os.stat(filepath)
except OSError:
continue
mode = st.st_mode
d = {
'path' : filepath,
'mode' : "%04o" % stat.S_IMODE(mode),
'isdir' : stat.S_ISDIR(mode),
'ischr' : stat.S_ISCHR(mode),
'isblk' : stat.S_ISBLK(mode),
'isreg' : stat.S_ISREG(mode),
'isfifo' : stat.S_ISFIFO(mode),
'islnk' : stat.S_ISLNK(mode),
'issock' : stat.S_ISSOCK(mode),
'uid' : st.st_uid,
'gid' : st.st_gid,
'size' : st.st_size,
'inode' : st.st_ino,
'dev' : st.st_dev,
'nlink' : st.st_nlink,
'atime' : st.st_atime,
'mtime' : st.st_mtime,
'ctime' : st.st_ctime,
'wusr' : bool(mode & stat.S_IWUSR),
'rusr' : bool(mode & stat.S_IRUSR),
'xusr' : bool(mode & stat.S_IXUSR),
'wgrp' : bool(mode & stat.S_IWGRP),
'rgrp' : bool(mode & stat.S_IRGRP),
'xgrp' : bool(mode & stat.S_IXGRP),
'woth' : bool(mode & stat.S_IWOTH),
'roth' : bool(mode & stat.S_IROTH),
'xoth' : bool(mode & stat.S_IXOTH),
'isuid' : bool(mode & stat.S_ISUID),
'isgid' : bool(mode & stat.S_ISGID),
}
if get_checksum and stat.S_ISREG(mode) and os.access(filepath, os.R_OK):
d['checksum'] = module.sha1(filepath)
files.append(d)
results = dict(ansible_facts=dict(files=files))
module.exit_json(**results)
main()

View File

@ -1,66 +0,0 @@
#!/usr/bin/env python
from ansible.module_utils.basic import * # noqa
DOCUMENTATION = '''
---
module: scan_insights
short_description: Return insights id as fact data
description:
- Inspects the /etc/redhat-access-insights/machine-id file for insights id and returns the found id as fact data
version_added: "2.3"
options:
requirements: [ ]
author: Chris Meyers
'''
EXAMPLES = '''
# Example fact output:
# host | success >> {
# "ansible_facts": {
# "insights": {
# "system_id": "4da7d1f8-14f3-4cdc-acd5-a3465a41f25d"
# }, ... }
'''
INSIGHTS_SYSTEM_ID_FILE='/etc/redhat-access-insights/machine-id'
def get_system_id(filname):
system_id = None
try:
f = open(INSIGHTS_SYSTEM_ID_FILE, "r")
except IOError:
return None
else:
try:
data = f.readline()
system_id = str(data)
except (IOError, ValueError):
pass
finally:
f.close()
if system_id:
system_id = system_id.strip()
return system_id
def main():
module = AnsibleModule( # noqa
argument_spec = dict()
)
system_id = get_system_id(INSIGHTS_SYSTEM_ID_FILE)
results = {
'ansible_facts': {
'insights': {
'system_id': system_id
}
}
}
module.exit_json(**results)
main()

View File

@ -1,111 +0,0 @@
#!/usr/bin/env python
from ansible.module_utils.basic import * # noqa
DOCUMENTATION = '''
---
module: scan_packages
short_description: Return installed packages information as fact data
description:
- Return information about installed packages as fact data
version_added: "1.9"
options:
requirements: [ ]
author: Matthew Jones
'''
EXAMPLES = '''
# Example fact output:
# host | success >> {
# "ansible_facts": {
# "packages": {
# "libbz2-1.0": [
# {
# "version": "1.0.6-5",
# "source": "apt",
# "arch": "amd64",
# "name": "libbz2-1.0"
# }
# ],
# "patch": [
# {
# "version": "2.7.1-4ubuntu1",
# "source": "apt",
# "arch": "amd64",
# "name": "patch"
# }
# ],
# "gcc-4.8-base": [
# {
# "version": "4.8.2-19ubuntu1",
# "source": "apt",
# "arch": "amd64",
# "name": "gcc-4.8-base"
# },
# {
# "version": "4.9.2-19ubuntu1",
# "source": "apt",
# "arch": "amd64",
# "name": "gcc-4.8-base"
# }
# ]
# }
'''
def rpm_package_list():
import rpm
trans_set = rpm.TransactionSet()
installed_packages = {}
for package in trans_set.dbMatch():
package_details = dict(name=package[rpm.RPMTAG_NAME],
version=package[rpm.RPMTAG_VERSION],
release=package[rpm.RPMTAG_RELEASE],
epoch=package[rpm.RPMTAG_EPOCH],
arch=package[rpm.RPMTAG_ARCH],
source='rpm')
if package_details['name'] not in installed_packages:
installed_packages[package_details['name']] = [package_details]
else:
installed_packages[package_details['name']].append(package_details)
return installed_packages
def deb_package_list():
import apt
apt_cache = apt.Cache()
installed_packages = {}
apt_installed_packages = [pk for pk in apt_cache.keys() if apt_cache[pk].is_installed]
for package in apt_installed_packages:
ac_pkg = apt_cache[package].installed
package_details = dict(name=package,
version=ac_pkg.version,
arch=ac_pkg.architecture,
source='apt')
if package_details['name'] not in installed_packages:
installed_packages[package_details['name']] = [package_details]
else:
installed_packages[package_details['name']].append(package_details)
return installed_packages
def main():
module = AnsibleModule( # noqa
argument_spec = dict(os_family=dict(required=True))
)
ans_os = module.params['os_family']
if ans_os in ('RedHat', 'Suse', 'openSUSE Leap'):
packages = rpm_package_list()
elif ans_os == 'Debian':
packages = deb_package_list()
else:
packages = None
if packages is not None:
results = dict(ansible_facts=dict(packages=packages))
else:
results = dict(skipped=True, msg="Unsupported Distribution")
module.exit_json(**results)
main()

View File

@ -1,190 +0,0 @@
#!/usr/bin/env python
import re
from ansible.module_utils.basic import * # noqa
DOCUMENTATION = '''
---
module: scan_services
short_description: Return service state information as fact data
description:
- Return service state information as fact data for various service management utilities
version_added: "1.9"
options:
requirements: [ ]
author: Matthew Jones
'''
EXAMPLES = '''
- monit: scan_services
# Example fact output:
# host | success >> {
# "ansible_facts": {
# "services": {
# "network": {
# "source": "sysv",
# "state": "running",
# "name": "network"
# },
# "arp-ethers.service": {
# "source": "systemd",
# "state": "stopped",
# "name": "arp-ethers.service"
# }
# }
# }
'''
class BaseService(object):
def __init__(self, module):
self.module = module
self.incomplete_warning = False
class ServiceScanService(BaseService):
def gather_services(self):
services = {}
service_path = self.module.get_bin_path("service")
if service_path is None:
return None
initctl_path = self.module.get_bin_path("initctl")
chkconfig_path = self.module.get_bin_path("chkconfig")
# sysvinit
if service_path is not None and chkconfig_path is None:
rc, stdout, stderr = self.module.run_command("%s --status-all 2>&1 | grep -E \"\\[ (\\+|\\-) \\]\"" % service_path, use_unsafe_shell=True)
for line in stdout.split("\n"):
line_data = line.split()
if len(line_data) < 4:
continue # Skipping because we expected more data
service_name = " ".join(line_data[3:])
if line_data[1] == "+":
service_state = "running"
else:
service_state = "stopped"
services[service_name] = {"name": service_name, "state": service_state, "source": "sysv"}
# Upstart
if initctl_path is not None and chkconfig_path is None:
p = re.compile(r'^\s?(?P<name>.*)\s(?P<goal>\w+)\/(?P<state>\w+)(\,\sprocess\s(?P<pid>[0-9]+))?\s*$')
rc, stdout, stderr = self.module.run_command("%s list" % initctl_path)
real_stdout = stdout.replace("\r","")
for line in real_stdout.split("\n"):
m = p.match(line)
if not m:
continue
service_name = m.group('name')
service_goal = m.group('goal')
service_state = m.group('state')
if m.group('pid'):
pid = m.group('pid')
else:
pid = None # NOQA
payload = {"name": service_name, "state": service_state, "goal": service_goal, "source": "upstart"}
services[service_name] = payload
# RH sysvinit
elif chkconfig_path is not None:
#print '%s --status-all | grep -E "is (running|stopped)"' % service_path
p = re.compile(
r'(?P<service>.*?)\s+[0-9]:(?P<rl0>on|off)\s+[0-9]:(?P<rl1>on|off)\s+[0-9]:(?P<rl2>on|off)\s+'
r'[0-9]:(?P<rl3>on|off)\s+[0-9]:(?P<rl4>on|off)\s+[0-9]:(?P<rl5>on|off)\s+[0-9]:(?P<rl6>on|off)')
rc, stdout, stderr = self.module.run_command('%s' % chkconfig_path, use_unsafe_shell=True)
# Check for special cases where stdout does not fit pattern
match_any = False
for line in stdout.split('\n'):
if p.match(line):
match_any = True
if not match_any:
p_simple = re.compile(r'(?P<service>.*?)\s+(?P<rl0>on|off)')
match_any = False
for line in stdout.split('\n'):
if p_simple.match(line):
match_any = True
if match_any:
# Try extra flags " -l --allservices" needed for SLES11
rc, stdout, stderr = self.module.run_command('%s -l --allservices' % chkconfig_path, use_unsafe_shell=True)
elif '--list' in stderr:
# Extra flag needed for RHEL5
rc, stdout, stderr = self.module.run_command('%s --list' % chkconfig_path, use_unsafe_shell=True)
for line in stdout.split('\n'):
m = p.match(line)
if m:
service_name = m.group('service')
service_state = 'stopped'
if m.group('rl3') == 'on':
rc, stdout, stderr = self.module.run_command('%s %s status' % (service_path, service_name), use_unsafe_shell=True)
service_state = rc
if rc in (0,):
service_state = 'running'
#elif rc in (1,3):
else:
if 'root' in stderr or 'permission' in stderr.lower() or 'not in sudoers' in stderr.lower():
self.incomplete_warning = True
continue
else:
service_state = 'stopped'
service_data = {"name": service_name, "state": service_state, "source": "sysv"}
services[service_name] = service_data
return services
class SystemctlScanService(BaseService):
def systemd_enabled(self):
# Check if init is the systemd command, using comm as cmdline could be symlink
try:
f = open('/proc/1/comm', 'r')
except IOError:
# If comm doesn't exist, old kernel, no systemd
return False
for line in f:
if 'systemd' in line:
return True
return False
def gather_services(self):
services = {}
if not self.systemd_enabled():
return None
systemctl_path = self.module.get_bin_path("systemctl", opt_dirs=["/usr/bin", "/usr/local/bin"])
if systemctl_path is None:
return None
rc, stdout, stderr = self.module.run_command("%s list-unit-files --type=service | tail -n +2 | head -n -2" % systemctl_path, use_unsafe_shell=True)
for line in stdout.split("\n"):
line_data = line.split()
if len(line_data) != 2:
continue
if line_data[1] == "enabled":
state_val = "running"
else:
state_val = "stopped"
services[line_data[0]] = {"name": line_data[0], "state": state_val, "source": "systemd"}
return services
def main():
module = AnsibleModule(argument_spec = dict()) # noqa
service_modules = (ServiceScanService, SystemctlScanService)
all_services = {}
incomplete_warning = False
for svc_module in service_modules:
svcmod = svc_module(module)
svc = svcmod.gather_services()
if svc is not None:
all_services.update(svc)
if svcmod.incomplete_warning:
incomplete_warning = True
if len(all_services) == 0:
results = dict(skipped=True, msg="Failed to find any services. Sometimes this is due to insufficient privileges.")
else:
results = dict(ansible_facts=dict(services=all_services))
if incomplete_warning:
results['msg'] = "WARNING: Could not find status for all services. Sometimes this is due to insufficient privileges."
module.exit_json(**results)
main()

View File

@ -1,102 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$params = Parse-Args $args $true;
$paths = Get-Attr $params "paths" $FALSE;
If ($paths -eq $FALSE)
{
Fail-Json (New-Object psobject) "missing required argument: paths";
}
$get_checksum = Get-Attr $params "get_checksum" $false | ConvertTo-Bool;
$recursive = Get-Attr $params "recursive" $false | ConvertTo-Bool;
function Date_To_Timestamp($start_date, $end_date)
{
If($start_date -and $end_date)
{
Write-Output (New-TimeSpan -Start $start_date -End $end_date).TotalSeconds
}
}
$files = @()
ForEach ($path In $paths)
{
"Path: " + $path
ForEach ($file in Get-ChildItem $path -Recurse: $recursive)
{
"File: " + $file.FullName
$fileinfo = New-Object psobject
Set-Attr $fileinfo "path" $file.FullName
$info = Get-Item $file.FullName;
$iscontainer = Get-Attr $info "PSIsContainer" $null;
$length = Get-Attr $info "Length" $null;
$extension = Get-Attr $info "Extension" $null;
$attributes = Get-Attr $info "Attributes" "";
If ($info)
{
$accesscontrol = $info.GetAccessControl();
}
Else
{
$accesscontrol = $null;
}
$owner = Get-Attr $accesscontrol "Owner" $null;
$creationtime = Get-Attr $info "CreationTime" $null;
$lastaccesstime = Get-Attr $info "LastAccessTime" $null;
$lastwritetime = Get-Attr $info "LastWriteTime" $null;
$epoch_date = Get-Date -Date "01/01/1970"
If ($iscontainer)
{
Set-Attr $fileinfo "isdir" $TRUE;
}
Else
{
Set-Attr $fileinfo "isdir" $FALSE;
Set-Attr $fileinfo "size" $length;
}
Set-Attr $fileinfo "extension" $extension;
Set-Attr $fileinfo "attributes" $attributes.ToString();
# Set-Attr $fileinfo "owner" $getaccesscontrol.Owner;
# Set-Attr $fileinfo "owner" $info.GetAccessControl().Owner;
Set-Attr $fileinfo "owner" $owner;
Set-Attr $fileinfo "creationtime" (Date_To_Timestamp $epoch_date $creationtime);
Set-Attr $fileinfo "lastaccesstime" (Date_To_Timestamp $epoch_date $lastaccesstime);
Set-Attr $fileinfo "lastwritetime" (Date_To_Timestamp $epoch_date $lastwritetime);
If (($get_checksum) -and -not $fileinfo.isdir)
{
$hash = Get-FileChecksum($file.FullName);
Set-Attr $fileinfo "checksum" $hash;
}
$files += $fileinfo
}
}
$result = New-Object psobject @{
ansible_facts = New-Object psobject @{
files = $files
}
}
Exit-Json $result;

View File

@ -1,66 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$uninstall_native_path = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall"
$uninstall_wow6432_path = "HKLM:\SOFTWARE\Wow6432Node\Microsoft\Windows\CurrentVersion\Uninstall"
if ([System.IntPtr]::Size -eq 4) {
# This is a 32-bit Windows system, so we only check for 32-bit programs, which will be
# at the native registry location.
[PSObject []]$packages = Get-ChildItem -Path $uninstall_native_path |
Get-ItemProperty |
Select-Object -Property @{Name="name"; Expression={$_."DisplayName"}},
@{Name="version"; Expression={$_."DisplayVersion"}},
@{Name="publisher"; Expression={$_."Publisher"}},
@{Name="arch"; Expression={ "Win32" }} |
Where-Object { $_.name }
} else {
# This is a 64-bit Windows system, so we check for 64-bit programs in the native
# registry location, and also for 32-bit programs under Wow6432Node.
[PSObject []]$packages = Get-ChildItem -Path $uninstall_native_path |
Get-ItemProperty |
Select-Object -Property @{Name="name"; Expression={$_."DisplayName"}},
@{Name="version"; Expression={$_."DisplayVersion"}},
@{Name="publisher"; Expression={$_."Publisher"}},
@{Name="arch"; Expression={ "Win64" }} |
Where-Object { $_.name }
$packages += Get-ChildItem -Path $uninstall_wow6432_path |
Get-ItemProperty |
Select-Object -Property @{Name="name"; Expression={$_."DisplayName"}},
@{Name="version"; Expression={$_."DisplayVersion"}},
@{Name="publisher"; Expression={$_."Publisher"}},
@{Name="arch"; Expression={ "Win32" }} |
Where-Object { $_.name }
}
$result = New-Object psobject @{
ansible_facts = New-Object psobject @{
packages = $packages
}
changed = $false
}
Exit-Json $result;

View File

@ -1,30 +0,0 @@
#!powershell
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# WANT_JSON
# POWERSHELL_COMMON
$result = New-Object psobject @{
ansible_facts = New-Object psobject @{
services = Get-Service |
Select-Object -Property @{Name="name"; Expression={$_."DisplayName"}},
@{Name="win_svc_name"; Expression={$_."Name"}},
@{Name="state"; Expression={$_."Status".ToString().ToLower()}}
}
changed = $false
}
Exit-Json $result;

View File

@ -203,6 +203,9 @@ JOB_EVENT_MAX_QUEUE_SIZE = 10000
# The number of job events to migrate per-transaction when moving from int -> bigint
JOB_EVENT_MIGRATION_CHUNK_SIZE = 1000000
# The maximum allowed jobs to start on a given task manager cycle
START_TASK_LIMIT = 100
# Disallow sending session cookies over insecure connections
SESSION_COOKIE_SECURE = True
@ -477,6 +480,7 @@ SOCIAL_AUTH_SAML_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + (
'awx.sso.pipeline.update_user_orgs',
'awx.sso.pipeline.update_user_teams',
)
SAML_AUTO_CREATE_OBJECTS = True
SOCIAL_AUTH_LOGIN_URL = '/'
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/sso/complete/'
@ -789,7 +793,7 @@ ASGI_APPLICATION = "awx.main.routing.application"
CHANNEL_LAYERS = {
"default": {
"BACKEND": "awx.main.consumers.ExpiringRedisChannelLayer",
"BACKEND": "channels_redis.core.RedisChannelLayer",
"CONFIG": {
"hosts": [BROKER_URL],
"capacity": 10000,

View File

@ -919,6 +919,17 @@ def get_saml_entity_id():
return settings.TOWER_URL_BASE
register(
'SAML_AUTO_CREATE_OBJECTS',
field_class=fields.BooleanField,
default=True,
label=_('Automatically Create Organizations and Teams on SAML Login'),
help_text=_('When enabled (the default), mapped Organizations and Teams '
'will be created automatically on successful SAML login.'),
category=_('SAML'),
category_slug='saml',
)
register(
'SOCIAL_AUTH_SAML_CALLBACK_URL',
field_class=fields.CharField,

View File

@ -10,6 +10,7 @@ import logging
from social_core.exceptions import AuthException
# Django
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext_lazy as _
from django.db.models import Q
@ -80,11 +81,18 @@ def _update_m2m_from_expression(user, related, expr, remove=True):
def _update_org_from_attr(user, related, attr, remove, remove_admins, remove_auditors):
from awx.main.models import Organization
from django.conf import settings
org_ids = []
for org_name in attr:
org = Organization.objects.get_or_create(name=org_name)[0]
try:
if settings.SAML_AUTO_CREATE_OBJECTS:
org = Organization.objects.get_or_create(name=org_name)[0]
else:
org = Organization.objects.get(name=org_name)
except ObjectDoesNotExist:
continue
org_ids.append(org.id)
getattr(org, related).members.add(user)
@ -199,11 +207,24 @@ def update_user_teams_by_saml_attr(backend, details, user=None, *args, **kwargs)
if organization_alias:
organization_name = organization_alias
org = Organization.objects.get_or_create(name=organization_name)[0]
try:
if settings.SAML_AUTO_CREATE_OBJECTS:
org = Organization.objects.get_or_create(name=organization_name)[0]
else:
org = Organization.objects.get(name=organization_name)
except ObjectDoesNotExist:
continue
if team_alias:
team_name = team_alias
team = Team.objects.get_or_create(name=team_name, organization=org)[0]
try:
if settings.SAML_AUTO_CREATE_OBJECTS:
team = Team.objects.get_or_create(name=team_name, organization=org)[0]
else:
team = Team.objects.get(name=team_name, organization=org)
except ObjectDoesNotExist:
continue
team_ids.append(team.id)
team.member_role.members.add(user)

View File

@ -174,8 +174,15 @@ class TestSAMLAttr():
return (o1, o2, o3)
@pytest.fixture
def mock_settings(self):
def mock_settings(self, request):
fixture_args = request.node.get_closest_marker('fixture_args')
if fixture_args and 'autocreate' in fixture_args.kwargs:
autocreate = fixture_args.kwargs['autocreate']
else:
autocreate = True
class MockSettings():
SAML_AUTO_CREATE_OBJECTS = autocreate
SOCIAL_AUTH_SAML_ORGANIZATION_ATTR = {
'saml_attr': 'memberOf',
'saml_admin_attr': 'admins',
@ -304,3 +311,41 @@ class TestSAMLAttr():
assert Team.objects.get(
name='Yellow_Alias', organization__name='Default4_Alias').member_role.members.count() == 1
@pytest.mark.fixture_args(autocreate=False)
def test_autocreate_disabled(self, users, kwargs, mock_settings):
kwargs['response']['attributes']['memberOf'] = ['Default1', 'Default2', 'Default3']
kwargs['response']['attributes']['groups'] = ['Blue', 'Red', 'Green']
with mock.patch('django.conf.settings', mock_settings):
for u in users:
update_user_orgs_by_saml_attr(None, None, u, **kwargs)
update_user_teams_by_saml_attr(None, None, u, **kwargs)
assert Organization.objects.count() == 0
assert Team.objects.count() == 0
# precreate everything
o1 = Organization.objects.create(name='Default1')
o2 = Organization.objects.create(name='Default2')
o3 = Organization.objects.create(name='Default3')
Team.objects.create(name='Blue', organization_id=o1.id)
Team.objects.create(name='Blue', organization_id=o2.id)
Team.objects.create(name='Blue', organization_id=o3.id)
Team.objects.create(name='Red', organization_id=o1.id)
Team.objects.create(name='Green', organization_id=o1.id)
Team.objects.create(name='Green', organization_id=o3.id)
for u in users:
update_user_orgs_by_saml_attr(None, None, u, **kwargs)
update_user_teams_by_saml_attr(None, None, u, **kwargs)
assert o1.member_role.members.count() == 3
assert o2.member_role.members.count() == 3
assert o3.member_role.members.count() == 3
assert Team.objects.get(name='Blue', organization__name='Default1').member_role.members.count() == 3
assert Team.objects.get(name='Blue', organization__name='Default2').member_role.members.count() == 3
assert Team.objects.get(name='Blue', organization__name='Default3').member_role.members.count() == 3
assert Team.objects.get(name='Red', organization__name='Default1').member_role.members.count() == 3
assert Team.objects.get(name='Green', organization__name='Default1').member_role.members.count() == 3
assert Team.objects.get(name='Green', organization__name='Default3').member_role.members.count() == 3

View File

@ -24,6 +24,10 @@ class JobTemplates extends SchedulesMixin(
return this.http.post(`${this.baseUrl}${id}/launch/`, data);
}
readTemplateOptions(id) {
return this.http.options(`${this.baseUrl}/${id}/`);
}
readLaunch(id) {
return this.http.get(`${this.baseUrl}${id}/launch/`);
}

View File

@ -12,6 +12,10 @@ class WorkflowJobTemplates extends SchedulesMixin(NotificationsMixin(Base)) {
return this.http.get(`${this.baseUrl}${id}/webhook_key/`);
}
readWorkflowJobTemplateOptions(id) {
return this.http.options(`${this.baseUrl}/${id}/`);
}
updateWebhookKey(id) {
return this.http.post(`${this.baseUrl}${id}/webhook_key/`);
}

View File

@ -0,0 +1,74 @@
import React from 'react';
import {
string,
oneOfType,
object,
func,
bool,
node,
oneOf,
number,
} from 'prop-types';
import { useField } from 'formik';
import { FormGroup } from '@patternfly/react-core';
import CodeMirrorInput from './CodeMirrorInput';
import { FieldTooltip } from '../FormField';
function CodeMirrorField({
id,
name,
label,
tooltip,
helperText,
validate,
isRequired,
mode,
...rest
}) {
const [field, meta, helpers] = useField({ name, validate });
const isValid = !(meta.touched && meta.error);
return (
<FormGroup
id={`${id}-field`}
fieldId={id}
helperText={helperText}
helperTextInvalid={meta.error}
isRequired={isRequired}
validated={isValid ? 'default' : 'error'}
label={label}
labelIcon={<FieldTooltip content={tooltip} />}
>
<CodeMirrorInput
id={id}
{...rest}
{...field}
onChange={value => {
helpers.setValue(value);
}}
mode={mode}
/>
</FormGroup>
);
}
CodeMirrorField.propTypes = {
helperText: string,
id: string.isRequired,
name: string.isRequired,
label: oneOfType([object, string]).isRequired,
validate: func,
isRequired: bool,
tooltip: node,
mode: oneOf(['javascript', 'yaml', 'jinja2']).isRequired,
rows: number,
};
CodeMirrorField.defaultProps = {
helperText: '',
validate: () => {},
isRequired: false,
tooltip: null,
rows: 5,
};
export default CodeMirrorField;

View File

@ -107,6 +107,7 @@ CodeMirrorInput.propTypes = {
hasErrors: bool,
fullHeight: bool,
rows: number,
className: string,
};
CodeMirrorInput.defaultProps = {
readOnly: false,
@ -114,6 +115,7 @@ CodeMirrorInput.defaultProps = {
rows: 6,
fullHeight: false,
hasErrors: false,
className: '',
};
export default CodeMirrorInput;

View File

@ -1,6 +1,7 @@
import CodeMirrorInput from './CodeMirrorInput';
export default CodeMirrorInput;
export { default as CodeMirrorField } from './CodeMirrorField';
export { default as VariablesDetail } from './VariablesDetail';
export { default as VariablesInput } from './VariablesInput';
export { default as VariablesField } from './VariablesField';

View File

@ -23,6 +23,7 @@ function FieldWithPrompt({
promptId,
promptName,
tooltip,
isDisabled,
}) {
return (
<div className="pf-c-form__group">
@ -39,6 +40,7 @@ function FieldWithPrompt({
{tooltip && <FieldTooltip content={tooltip} />}
</div>
<StyledCheckboxField
isDisabled={isDisabled}
id={promptId}
label={i18n._(t`Prompt on launch`)}
name={promptName}

View File

@ -0,0 +1,70 @@
import React from 'react';
import PropTypes from 'prop-types';
import { useField } from 'formik';
import { FormGroup, TextArea } from '@patternfly/react-core';
import FieldTooltip from './FieldTooltip';
function ArrayTextField(props) {
const {
id,
helperText,
name,
label,
tooltip,
tooltipMaxWidth,
validate,
isRequired,
type,
...rest
} = props;
const [field, meta, helpers] = useField({ name, validate });
const isValid = !(meta.touched && meta.error);
const value = field.value || [];
return (
<FormGroup
fieldId={id}
helperText={helperText}
helperTextInvalid={meta.error}
isRequired={isRequired}
validated={isValid ? 'default' : 'error'}
label={label}
labelIcon={<FieldTooltip content={tooltip} maxWidth={tooltipMaxWidth} />}
>
<TextArea
id={id}
isRequired={isRequired}
validated={isValid ? 'default' : 'error'}
resizeOrientation="vertical"
{...rest}
{...field}
value={value.join('\n')}
onChange={val => {
helpers.setValue(val.split('\n').map(v => v.trim()));
}}
/>
</FormGroup>
);
}
ArrayTextField.propTypes = {
helperText: PropTypes.string,
id: PropTypes.string.isRequired,
name: PropTypes.string.isRequired,
label: PropTypes.oneOfType([PropTypes.object, PropTypes.string]).isRequired,
validate: PropTypes.func,
isRequired: PropTypes.bool,
tooltip: PropTypes.node,
tooltipMaxWidth: PropTypes.string,
};
ArrayTextField.defaultProps = {
helperText: '',
validate: () => {},
isRequired: false,
tooltip: null,
tooltipMaxWidth: '',
};
export default ArrayTextField;

View File

@ -9,10 +9,19 @@ const QuestionCircleIcon = styled(PFQuestionCircleIcon)`
margin-left: 10px;
`;
function CheckboxField({ id, name, label, tooltip, validate, ...rest }) {
function CheckboxField({
id,
name,
label,
tooltip,
validate,
isDisabled,
...rest
}) {
const [field] = useField({ name, validate });
return (
<Checkbox
isDisabled={isDisabled}
aria-label={label}
label={
<span>

View File

@ -1,6 +1,7 @@
import React, { useState, useEffect } from 'react';
import { useFormikContext } from 'formik';
import { Alert } from '@patternfly/react-core';
import { FormFullWidthLayout } from '../FormLayout';
const findErrorStrings = (obj, messages = []) => {
if (typeof obj === 'string') {
@ -35,7 +36,18 @@ function FormSubmitError({ error }) {
typeof error.response.data === 'object' &&
Object.keys(error.response.data).length > 0
) {
const errorMessages = error.response.data;
const errorMessages = {};
Object.keys(error.response.data).forEach(fieldName => {
const errors = error.response.data[fieldName];
if (!errors) {
return;
}
if (Array.isArray(errors.length)) {
errorMessages[fieldName] = errors.join(' ');
} else {
errorMessages[fieldName] = errors;
}
});
setErrors(errorMessages);
const messages = findErrorStrings(error.response.data);
@ -52,15 +64,17 @@ function FormSubmitError({ error }) {
}
return (
<Alert
variant="danger"
isInline
title={
Array.isArray(errorMessage)
? errorMessage.map(msg => <div key={msg}>{msg}</div>)
: errorMessage
}
/>
<FormFullWidthLayout>
<Alert
variant="danger"
isInline
title={
Array.isArray(errorMessage)
? errorMessage.map(msg => <div key={msg}>{msg}</div>)
: errorMessage
}
/>
</FormFullWidthLayout>
);
}

View File

@ -4,3 +4,4 @@ export { default as FieldTooltip } from './FieldTooltip';
export { default as PasswordField } from './PasswordField';
export { default as PasswordInput } from './PasswordInput';
export { default as FormSubmitError } from './FormSubmitError';
export { default as ArrayTextField } from './ArrayTextField';

View File

@ -33,6 +33,7 @@ function CredentialLookup({
history,
i18n,
tooltip,
isDisabled,
}) {
const {
result: { count, credentials, relatedSearchableKeys, searchableKeys },
@ -108,6 +109,7 @@ function CredentialLookup({
onChange={onChange}
required={required}
qsConfig={QS_CONFIG}
isDisabled={isDisabled}
renderOptionsList={({ state, dispatch, canDelete }) => (
<OptionsList
value={state.selectedItems}

View File

@ -10,6 +10,7 @@ import OptionsList from '../OptionsList';
import useRequest from '../../util/useRequest';
import { getQSConfig, parseQueryString } from '../../util/qs';
import LookupErrorMessage from './shared/LookupErrorMessage';
import FieldWithPrompt from '../FieldWithPrompt';
const QS_CONFIG = getQSConfig('inventory', {
page: 1,
@ -17,9 +18,26 @@ const QS_CONFIG = getQSConfig('inventory', {
order_by: 'name',
});
function InventoryLookup({ value, onChange, onBlur, required, i18n, history }) {
function InventoryLookup({
value,
onChange,
onBlur,
i18n,
history,
required,
isPromptableField,
fieldId,
promptId,
promptName,
}) {
const {
result: { inventories, count, relatedSearchableKeys, searchableKeys },
result: {
inventories,
count,
relatedSearchableKeys,
searchableKeys,
canEdit,
},
request: fetchInventories,
error,
isLoading,
@ -39,16 +57,86 @@ function InventoryLookup({ value, onChange, onBlur, required, i18n, history }) {
searchableKeys: Object.keys(
actionsResponse.data.actions?.GET || {}
).filter(key => actionsResponse.data.actions?.GET[key].filterable),
canEdit: Boolean(actionsResponse.data.actions.POST),
};
}, [history.location]),
{ inventories: [], count: 0, relatedSearchableKeys: [], searchableKeys: [] }
{
inventories: [],
count: 0,
relatedSearchableKeys: [],
searchableKeys: [],
canEdit: false,
}
);
useEffect(() => {
fetchInventories();
}, [fetchInventories]);
return (
return isPromptableField ? (
<>
<FieldWithPrompt
fieldId={fieldId}
isRequired={required}
label={i18n._(t`Inventory`)}
promptId={promptId}
promptName={promptName}
isDisabled={!canEdit}
tooltip={i18n._(t`Select the inventory containing the hosts
you want this job to manage.`)}
>
<Lookup
id="inventory-lookup"
header={i18n._(t`Inventory`)}
value={value}
onChange={onChange}
onBlur={onBlur}
required={required}
isLoading={isLoading}
isDisabled={!canEdit}
qsConfig={QS_CONFIG}
renderOptionsList={({ state, dispatch, canDelete }) => (
<OptionsList
value={state.selectedItems}
options={inventories}
optionCount={count}
searchColumns={[
{
name: i18n._(t`Name`),
key: 'name__icontains',
isDefault: true,
},
{
name: i18n._(t`Created By (Username)`),
key: 'created_by__username__icontains',
},
{
name: i18n._(t`Modified By (Username)`),
key: 'modified_by__username__icontains',
},
]}
sortColumns={[
{
name: i18n._(t`Name`),
key: 'name',
},
]}
searchableKeys={searchableKeys}
relatedSearchableKeys={relatedSearchableKeys}
multiple={state.multiple}
header={i18n._(t`Inventory`)}
name="inventory"
qsConfig={QS_CONFIG}
readOnly={!canDelete}
selectItem={item => dispatch({ type: 'SELECT_ITEM', item })}
deselectItem={item => dispatch({ type: 'DESELECT_ITEM', item })}
/>
)}
/>
<LookupErrorMessage error={error} />
</FieldWithPrompt>
</>
) : (
<>
<Lookup
id="inventory-lookup"
@ -58,6 +146,7 @@ function InventoryLookup({ value, onChange, onBlur, required, i18n, history }) {
onBlur={onBlur}
required={required}
isLoading={isLoading}
isDisabled={!canEdit}
qsConfig={QS_CONFIG}
renderOptionsList={({ state, dispatch, canDelete }) => (
<OptionsList

View File

@ -27,6 +27,8 @@ import { QSConfig } from '../../types';
const ChipHolder = styled.div`
--pf-c-form-control--Height: auto;
background-color: ${props =>
props.isDisabled ? 'var(--pf-global--disabled-color--300)' : null};
`;
function Lookup(props) {
const {
@ -43,6 +45,7 @@ function Lookup(props) {
renderOptionsList,
history,
i18n,
isDisabled,
} = props;
const [state, dispatch] = useReducer(
@ -88,7 +91,8 @@ function Lookup(props) {
};
const { isModalOpen, selectedItems } = state;
const canDelete = !required || (multiple && value.length > 1);
const canDelete =
(!required || (multiple && value.length > 1)) && !isDisabled;
let items = [];
if (multiple) {
items = value;
@ -103,11 +107,11 @@ function Lookup(props) {
id={id}
onClick={() => dispatch({ type: 'TOGGLE_MODAL' })}
variant={ButtonVariant.control}
isDisabled={isLoading}
isDisabled={isLoading || isDisabled}
>
<SearchIcon />
</Button>
<ChipHolder className="pf-c-form-control">
<ChipHolder isDisabled={isDisabled} className="pf-c-form-control">
<ChipGroup numChips={5} totalChips={items.length}>
{items.map(item =>
renderItemChip({

View File

@ -32,7 +32,7 @@ function ProjectLookup({
history,
}) {
const {
result: { projects, count, relatedSearchableKeys, searchableKeys },
result: { projects, count, relatedSearchableKeys, searchableKeys, canEdit },
request: fetchProjects,
error,
isLoading,
@ -55,6 +55,7 @@ function ProjectLookup({
searchableKeys: Object.keys(
actionsResponse.data.actions?.GET || {}
).filter(key => actionsResponse.data.actions?.GET[key].filterable),
canEdit: Boolean(actionsResponse.data.actions.POST),
};
}, [history.location.search, autocomplete]),
{
@ -62,6 +63,7 @@ function ProjectLookup({
projects: [],
relatedSearchableKeys: [],
searchableKeys: [],
canEdit: false,
}
);
@ -87,6 +89,7 @@ function ProjectLookup({
onChange={onChange}
required={required}
isLoading={isLoading}
isDisabled={!canEdit}
qsConfig={QS_CONFIG}
renderOptionsList={({ state, dispatch, canDelete }) => (
<OptionsList

View File

@ -6,3 +6,4 @@ export { default as MultiCredentialsLookup } from './MultiCredentialsLookup';
export { default as CredentialLookup } from './CredentialLookup';
export { default as ApplicationLookup } from './ApplicationLookup';
export { default as HostFilterLookup } from './HostFilterLookup';
export { default as OrganizationLookup } from './OrganizationLookup';

View File

@ -11,12 +11,22 @@ export default function useSyncedSelectValue(value, onChange) {
const [selections, setSelections] = useState([]);
useEffect(() => {
const newOptions = [];
if (value !== selections && options.length) {
const syncedValue = value.map(item =>
options.find(i => i.id === item.id)
);
const syncedValue = value.map(item => {
const match = options.find(i => {
return i.id === item.id;
});
if (!match) {
newOptions.push(item);
}
return match || item;
});
setSelections(syncedValue);
}
if (newOptions.length > 0) {
setOptions(options.concat(newOptions));
}
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [value, options]);
@ -27,7 +37,6 @@ export default function useSyncedSelectValue(value, onChange) {
onChange(selections.concat(item));
}
};
return {
selections: options.length ? addToStringToObjects(selections) : [],
onSelect,

View File

@ -104,7 +104,7 @@ function CredentialFormFields({
error={orgMeta.error}
/>
<FormGroup
fieldId="credential-credentialType"
fieldId="credential-Type"
helperTextInvalid={credTypeMeta.error}
isRequired
validated={
@ -114,7 +114,7 @@ function CredentialFormFields({
>
<AnsibleSelect
{...credTypeField}
id="credential_type"
id="credential-type"
data={[
{
value: '',
@ -224,6 +224,7 @@ function CredentialForm({
<FormFullWidthLayout>
<ActionGroup>
<Button
id="credential-form-save-button"
aria-label={i18n._(t`Save`)}
variant="primary"
type="button"
@ -235,6 +236,7 @@ function CredentialForm({
credentialTypes[formik.values.credential_type]?.kind ===
'external' && (
<Button
id="credential-form-test-button"
aria-label={i18n._(t`Test`)}
variant="secondary"
type="button"
@ -245,6 +247,7 @@ function CredentialForm({
</Button>
)}
<Button
id="credential-form-cancel-button"
aria-label={i18n._(t`Cancel`)}
variant="secondary"
type="button"

View File

@ -136,23 +136,24 @@ describe('<CredentialForm />', () => {
test('should display cred type subform when scm type select has a value', async () => {
await act(async () => {
await wrapper
.find('AnsibleSelect[id="credential_type"]')
.find('AnsibleSelect[id="credential-type"]')
.invoke('onChange')(null, 1);
});
wrapper.update();
machineFieldExpects();
await act(async () => {
await wrapper
.find('AnsibleSelect[id="credential_type"]')
.find('AnsibleSelect[id="credential-type"]')
.invoke('onChange')(null, 2);
});
wrapper.update();
sourceFieldExpects();
});
test('should update expected fields when gce service account json file uploaded', async () => {
await act(async () => {
await wrapper
.find('AnsibleSelect[id="credential_type"]')
.find('AnsibleSelect[id="credential-type"]')
.invoke('onChange')(null, 10);
});
wrapper.update();
@ -163,7 +164,7 @@ describe('<CredentialForm />', () => {
wrapper.find('textarea#credential-ssh_key_data').prop('value')
).toBe('');
await act(async () => {
wrapper.find('FileUpload').invoke('onChange')({
wrapper.find('FileUpload#credential-gce-file').invoke('onChange')({
name: 'foo.json',
text: () =>
'{"client_email":"testemail@ansible.com","project_id":"test123","private_key":"-----BEGIN PRIVATE KEY-----\\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\\n-----END PRIVATE KEY-----\\n"}',
@ -182,9 +183,12 @@ describe('<CredentialForm />', () => {
'-----BEGIN PRIVATE KEY-----\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n-----END PRIVATE KEY-----\n'
);
});
test('should clear expected fields when file clear button clicked', async () => {
await act(async () => {
wrapper.find('FileUploadField').invoke('onClearButtonClick')();
wrapper
.find('FileUploadField#credential-gce-file')
.invoke('onClearButtonClick')();
});
wrapper.update();
expect(wrapper.find('input#credential-username').prop('value')).toBe('');
@ -193,10 +197,24 @@ describe('<CredentialForm />', () => {
wrapper.find('textarea#credential-ssh_key_data').prop('value')
).toBe('');
});
test('should update field when RSA Private Key file uploaded', async () => {
await act(async () => {
wrapper.find('FileUpload#credential-ssh_key_data').invoke('onChange')(
'-----BEGIN PRIVATE KEY-----\\nBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\\n-----END PRIVATE KEY-----\\n',
'foo.key'
);
});
wrapper.update();
expect(
wrapper.find('textarea#credential-ssh_key_data').prop('value')
).toBe(
'-----BEGIN PRIVATE KEY-----\\nBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\\n-----END PRIVATE KEY-----\\n'
);
});
test('should show error when error thrown parsing JSON', async () => {
await act(async () => {
await wrapper
.find('AnsibleSelect[id="credential_type"]')
.find('AnsibleSelect[id="credential-type"]')
.invoke('onChange')(null, 10);
});
wrapper.update();
@ -204,7 +222,7 @@ describe('<CredentialForm />', () => {
'Select a JSON formatted service account key to autopopulate the following fields.'
);
await act(async () => {
wrapper.find('FileUpload').invoke('onChange')({
wrapper.find('FileUpload#credential-gce-file').invoke('onChange')({
name: 'foo.json',
text: () => '{not good json}',
});
@ -227,7 +245,7 @@ describe('<CredentialForm />', () => {
test('should show Test button when external credential type is selected', async () => {
await act(async () => {
await wrapper
.find('AnsibleSelect[id="credential_type"]')
.find('AnsibleSelect[id="credential-type"]')
.invoke('onChange')(null, 21);
});
wrapper.update();

View File

@ -1,12 +1,13 @@
import React from 'react';
import React, { useState } from 'react';
import { useField, useFormikContext } from 'formik';
import { shape, string } from 'prop-types';
import styled from 'styled-components';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import {
FileUpload as PFFileUpload,
FormGroup,
InputGroup,
TextArea,
TextInput,
} from '@patternfly/react-core';
import { FieldTooltip, PasswordInput } from '../../../../components/FormField';
@ -16,19 +17,32 @@ import { required } from '../../../../util/validators';
import { CredentialPluginField } from './CredentialPlugins';
import BecomeMethodField from './BecomeMethodField';
const FileUpload = styled(PFFileUpload)`
flex-grow: 1;
`;
function CredentialInput({ fieldOptions, credentialKind, ...rest }) {
const [subFormField, meta] = useField(`inputs.${fieldOptions.id}`);
const [fileName, setFileName] = useState('');
const [fileIsUploading, setFileIsUploading] = useState(false);
const [subFormField, meta, helpers] = useField(`inputs.${fieldOptions.id}`);
const isValid = !(meta.touched && meta.error);
if (fieldOptions.multiline) {
const handleFileChange = (value, filename) => {
helpers.setValue(value);
setFileName(filename);
};
return (
<TextArea
<FileUpload
{...subFormField}
id={`credential-${fieldOptions.id}`}
rows={6}
resizeOrientation="vertical"
onChange={(value, event) => {
subFormField.onChange(event);
}}
type="text"
filename={fileName}
onChange={handleFileChange}
onReadStarted={() => setFileIsUploading(true)}
onReadFinished={() => setFileIsUploading(false)}
isLoading={fileIsUploading}
allowEditingUploadedText
validated={isValid ? 'default' : 'error'}
/>
);

View File

@ -17,10 +17,10 @@ import { InstanceGroupsAPI } from '../../api';
import RoutedTabs from '../../components/RoutedTabs';
import ContentError from '../../components/ContentError';
import ContentLoading from '../../components/ContentLoading';
import JobList from '../../components/JobList';
import ContainerGroupDetails from './ContainerGroupDetails';
import ContainerGroupEdit from './ContainerGroupEdit';
import Jobs from './Jobs';
function ContainerGroup({ i18n, setBreadcrumb }) {
const { id } = useParams();
@ -117,7 +117,10 @@ function ContainerGroup({ i18n, setBreadcrumb }) {
<ContainerGroupDetails />
</Route>
<Route path="/instance_groups/container_group/:id/jobs">
<Jobs />
<JobList
showTypeColumn
defaultParams={{ instance_group: instanceGroup.id }}
/>
</Route>
</>
)}

View File

@ -1,14 +0,0 @@
import React from 'react';
import { Card, PageSection } from '@patternfly/react-core';
function Jobs() {
return (
<PageSection>
<Card>
<div>Jobs</div>
</Card>
</PageSection>
);
}
export default Jobs;

View File

@ -1 +0,0 @@
export { default } from './Jobs';

View File

@ -24,24 +24,30 @@ function NotificationTemplate({ setBreadcrumb, i18n }) {
const match = useRouteMatch();
const location = useLocation();
const {
result: template,
result: { template, defaultMessages },
isLoading,
error,
request: fetchTemplate,
} = useRequest(
useCallback(async () => {
const { data } = await NotificationTemplatesAPI.readDetail(templateId);
setBreadcrumb(data);
return data;
const [detail, options] = await Promise.all([
NotificationTemplatesAPI.readDetail(templateId),
NotificationTemplatesAPI.readOptions(),
]);
setBreadcrumb(detail.data);
return {
template: detail.data,
defaultMessages: options.data.actions.POST.messages,
};
}, [templateId, setBreadcrumb]),
null
{ template: null, defaultMessages: null }
);
useEffect(() => {
fetchTemplate();
}, [fetchTemplate]);
}, [fetchTemplate, location.pathname]);
if (error) {
if (!isLoading && error) {
return (
<PageSection>
<Card>
@ -60,7 +66,7 @@ function NotificationTemplate({ setBreadcrumb, i18n }) {
);
}
const showCardHeader = !isLoading && !location.pathname.endsWith('edit');
const showCardHeader = !location.pathname.endsWith('edit');
const tabs = [
{
name: (
@ -93,6 +99,7 @@ function NotificationTemplate({ setBreadcrumb, i18n }) {
<Route path="/notification_templates/:id/edit">
<NotificationTemplateEdit
template={template}
defaultMessages={defaultMessages}
isLoading={isLoading}
/>
</Route>

View File

@ -1,5 +1,86 @@
import React from 'react';
import React, { useState, useEffect, useCallback } from 'react';
import PropTypes from 'prop-types';
import { useHistory, Link } from 'react-router-dom';
import { t } from '@lingui/macro';
import { withI18n } from '@lingui/react';
import { Card, PageSection } from '@patternfly/react-core';
import { CardBody } from '../../components/Card';
import { NotificationTemplatesAPI } from '../../api';
import useRequest from '../../util/useRequest';
import ContentError from '../../components/ContentError';
import NotificationTemplateForm from './shared/NotificationTemplateForm';
export default function NotificationTemplateAdd() {
return <div />;
function NotificationTemplateAdd({ i18n }) {
const history = useHistory();
const [formError, setFormError] = useState(null);
const {
result: defaultMessages,
error,
request: fetchDefaultMessages,
} = useRequest(
useCallback(async () => {
const { data } = await NotificationTemplatesAPI.readOptions();
return data.actions.POST.messages;
}, [])
);
useEffect(() => {
fetchDefaultMessages();
}, [fetchDefaultMessages]);
const handleSubmit = async values => {
try {
const { data } = await NotificationTemplatesAPI.create(values);
history.push(`/notification_templates/${data.id}`);
} catch (err) {
setFormError(err);
}
};
const handleCancel = () => {
history.push('/notification_templates');
};
if (error) {
return (
<PageSection>
<Card>
<ContentError error={error}>
{error.response.status === 404 && (
<span>
{i18n._(t`Notification Template not found.`)}{' '}
<Link to="/notification_templates">
{i18n._(t`View all Notification Templates.`)}
</Link>
</span>
)}
</ContentError>
</Card>
</PageSection>
);
}
return (
<PageSection>
<Card>
<CardBody>
{defaultMessages && (
<NotificationTemplateForm
defaultMessages={defaultMessages}
onSubmit={handleSubmit}
onCancel={handleCancel}
submitError={formError}
/>
)}
</CardBody>
</Card>
</PageSection>
);
}
NotificationTemplateAdd.contextTypes = {
custom_virtualenvs: PropTypes.arrayOf(PropTypes.string),
};
export { NotificationTemplateAdd as _NotificationTemplateAdd };
export default withI18n()(NotificationTemplateAdd);

View File

@ -234,7 +234,7 @@ function NotificationTemplateDetail({ i18n, template }) {
<Detail
label={i18n._(t`Username`)}
value={configuration.rocketchat_username}
dataCy="nt-detail-pagerduty-rocketchat-username"
dataCy="nt-detail-rocketchat-username"
/>
<Detail
label={i18n._(t`Icon URL`)}

View File

@ -2,33 +2,17 @@ import React, { useState } from 'react';
import PropTypes from 'prop-types';
import { useHistory } from 'react-router-dom';
import { CardBody } from '../../../components/Card';
import { OrganizationsAPI } from '../../../api';
import { Config } from '../../../contexts/Config';
import { NotificationTemplatesAPI } from '../../../api';
import NotificationTemplateForm from '../shared/NotificationTemplateForm';
function NotificationTemplateEdit({ template }) {
function NotificationTemplateEdit({ template, defaultMessages }) {
const detailsUrl = `/notification_templates/${template.id}/details`;
const history = useHistory();
const [formError, setFormError] = useState(null);
const handleSubmit = async (
values,
groupsToAssociate,
groupsToDisassociate
) => {
const handleSubmit = async values => {
try {
await OrganizationsAPI.update(template.id, values);
await Promise.all(
groupsToAssociate.map(id =>
OrganizationsAPI.associateInstanceGroup(template.id, id)
)
);
await Promise.all(
groupsToDisassociate.map(id =>
OrganizationsAPI.disassociateInstanceGroup(template.id, id)
)
);
await NotificationTemplatesAPI.update(template.id, values);
history.push(detailsUrl);
} catch (error) {
setFormError(error);
@ -41,17 +25,13 @@ function NotificationTemplateEdit({ template }) {
return (
<CardBody>
<Config>
{({ me }) => (
<NotificationTemplateForm
template={template}
onSubmit={handleSubmit}
onCancel={handleCancel}
me={me || {}}
submitError={formError}
/>
)}
</Config>
<NotificationTemplateForm
template={template}
defaultMessages={defaultMessages}
onSubmit={handleSubmit}
onCancel={handleCancel}
submitError={formError}
/>
</CardBody>
);
}

View File

@ -0,0 +1,234 @@
import 'styled-components/macro';
import React, { useEffect, useRef } from 'react';
import { withI18n } from '@lingui/react';
import { t, Trans } from '@lingui/macro';
import { useField, useFormikContext } from 'formik';
import { Switch, Text } from '@patternfly/react-core';
import {
FormFullWidthLayout,
SubFormLayout,
} from '../../../components/FormLayout';
import CodeMirrorField from '../../../components/CodeMirrorInput/CodeMirrorField';
function CustomMessagesSubForm({ defaultMessages, type, i18n }) {
const [useCustomField, , useCustomHelpers] = useField('useCustomMessages');
const showMessages = type !== 'webhook';
const showBodies = ['email', 'pagerduty', 'webhook'].includes(type);
const { setFieldValue } = useFormikContext();
const mountedRef = useRef(null);
useEffect(
function resetToDefaultMessages() {
if (!mountedRef.current) {
mountedRef.current = true;
return;
}
const defs = defaultMessages[type];
const resetFields = (name, defaults) => {
setFieldValue(`${name}.message`, defaults.message || '');
setFieldValue(`${name}.body`, defaults.body || '');
};
resetFields('messages.started', defs.started);
resetFields('messages.success', defs.success);
resetFields('messages.error', defs.error);
resetFields(
'messages.workflow_approval.approved',
defs.workflow_approval.approved
);
resetFields(
'messages.workflow_approval.denied',
defs.workflow_approval.denied
);
resetFields(
'messages.workflow_approval.running',
defs.workflow_approval.running
);
resetFields(
'messages.workflow_approval.timed_out',
defs.workflow_approval.timed_out
);
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[type, setFieldValue]
);
return (
<>
<Switch
id="toggle-custom-messages"
label={i18n._(t`Customize messages…`)}
isChecked={!!useCustomField.value}
onChange={() => useCustomHelpers.setValue(!useCustomField.value)}
/>
{useCustomField.value && (
<SubFormLayout>
<Text
className="pf-c-content"
css="margin-bottom: var(--pf-c-content--MarginBottom)"
>
<small>
<Trans>
Use custom messages to change the content of notifications sent
when a job starts, succeeds, or fails. Use curly braces to
access information about the job:{' '}
<code>
{'{{'} job_friendly_name {'}}'}
</code>
,{' '}
<code>
{'{{'} url {'}}'}
</code>
, or attributes of the job such as{' '}
<code>
{'{{'} job.status {'}}'}
</code>
. You may apply a number of possible variables in the message.
Refer to the{' '}
<a
href="https://docs.ansible.com/ansible-tower/latest/html/userguide/notifications.html#create-custom-notifications"
target="_blank"
rel="noopener noreferrer"
>
Ansible Tower documentation
</a>{' '}
for more details.
</Trans>
</small>
</Text>
<FormFullWidthLayout>
{showMessages && (
<CodeMirrorField
id="start-message"
name="messages.started.message"
label={i18n._(t`Start message`)}
mode="jinja2"
rows={2}
/>
)}
{showBodies && (
<CodeMirrorField
id="start-body"
name="messages.started.body"
label={i18n._(t`Start message body`)}
mode="jinja2"
rows={6}
/>
)}
{showMessages && (
<CodeMirrorField
id="success-message"
name="messages.success.message"
label={i18n._(t`Success message`)}
mode="jinja2"
rows={2}
/>
)}
{showBodies && (
<CodeMirrorField
id="success-body"
name="messages.success.body"
label={i18n._(t`Success message body`)}
mode="jinja2"
rows={6}
/>
)}
{showMessages && (
<CodeMirrorField
id="error-message"
name="messages.error.message"
label={i18n._(t`Error message`)}
mode="jinja2"
rows={2}
/>
)}
{showBodies && (
<CodeMirrorField
id="error-body"
name="messages.error.body"
label={i18n._(t`Error message body`)}
mode="jinja2"
rows={6}
/>
)}
{showMessages && (
<CodeMirrorField
id="wf-approved-message"
name="messages.workflow_approval.approved.message"
label={i18n._(t`Workflow approved message`)}
mode="jinja2"
rows={2}
/>
)}
{showBodies && (
<CodeMirrorField
id="wf-approved-body"
name="messages.workflow_approval.approved.body"
label={i18n._(t`Workflow approved message body`)}
mode="jinja2"
rows={6}
/>
)}
{showMessages && (
<CodeMirrorField
id="wf-denied-message"
name="messages.workflow_approval.denied.message"
label={i18n._(t`Workflow denied message`)}
mode="jinja2"
rows={2}
/>
)}
{showBodies && (
<CodeMirrorField
id="wf-denied-body"
name="messages.workflow_approval.denied.body"
label={i18n._(t`Workflow denied message body`)}
mode="jinja2"
rows={6}
/>
)}
{showMessages && (
<CodeMirrorField
id="wf-running-message"
name="messages.workflow_approval.running.message"
label={i18n._(t`Workflow pending message`)}
mode="jinja2"
rows={2}
/>
)}
{showBodies && (
<CodeMirrorField
id="wf-running-body"
name="messages.workflow_approval.running.body"
label={i18n._(t`Workflow pending message body`)}
mode="jinja2"
rows={6}
/>
)}
{showMessages && (
<CodeMirrorField
id="wf-timed-out-message"
name="messages.workflow_approval.timed_out.message"
label={i18n._(t`Workflow timed out message`)}
mode="jinja2"
rows={2}
/>
)}
{showBodies && (
<CodeMirrorField
id="wf-timed-out-body"
name="messages.workflow_approval.timed_out.body"
label={i18n._(t`Workflow timed out message body`)}
mode="jinja2"
rows={6}
/>
)}
</FormFullWidthLayout>
</SubFormLayout>
)}
</>
);
}
export default withI18n()(CustomMessagesSubForm);

View File

@ -1,3 +1,326 @@
export default function NotificationTemplateForm() {
//
import React from 'react';
import { shape, func } from 'prop-types';
import { Formik, useField } from 'formik';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import { Form, FormGroup } from '@patternfly/react-core';
import AnsibleSelect from '../../../components/AnsibleSelect';
import FormField, { FormSubmitError } from '../../../components/FormField';
import FormActionGroup from '../../../components/FormActionGroup/FormActionGroup';
import { OrganizationLookup } from '../../../components/Lookup';
import { required } from '../../../util/validators';
import { FormColumnLayout } from '../../../components/FormLayout';
import TypeInputsSubForm from './TypeInputsSubForm';
import CustomMessagesSubForm from './CustomMessagesSubForm';
import typeFieldNames, { initialConfigValues } from './typeFieldNames';
function NotificationTemplateFormFields({ i18n, defaultMessages }) {
const [orgField, orgMeta, orgHelpers] = useField('organization');
const [typeField, typeMeta] = useField({
name: 'notification_type',
validate: required(i18n._(t`Select a value for this field`), i18n),
});
return (
<>
<FormField
id="notification-name"
name="name"
type="text"
label={i18n._(t`Name`)}
validate={required(null, i18n)}
isRequired
/>
<FormField
id="notification-description"
name="description"
type="text"
label={i18n._(t`Description`)}
/>
<OrganizationLookup
helperTextInvalid={orgMeta.error}
isValid={!orgMeta.touched || !orgMeta.error}
onBlur={() => orgHelpers.setTouched()}
onChange={value => {
orgHelpers.setValue(value);
}}
value={orgField.value}
touched={orgMeta.touched}
error={orgMeta.error}
required
/>
<FormGroup
fieldId="notification-type"
helperTextInvalid={typeMeta.error}
isRequired
validated={!typeMeta.touched || !typeMeta.error ? 'default' : 'error'}
label={i18n._(t`Type`)}
>
<AnsibleSelect
{...typeField}
id="notification-type"
isValid={!typeMeta.touched || !typeMeta.error}
data={[
{
value: '',
key: 'none',
label: i18n._(t`Choose a Notification Type`),
isDisabled: true,
},
{ value: 'email', key: 'email', label: i18n._(t`E-mail`) },
{ value: 'grafana', key: 'grafana', label: 'Grafana' },
{ value: 'irc', key: 'irc', label: 'IRC' },
{ value: 'mattermost', key: 'mattermost', label: 'Mattermost' },
{ value: 'pagerduty', key: 'pagerduty', label: 'Pagerduty' },
{ value: 'rocketchat', key: 'rocketchat', label: 'Rocket.Chat' },
{ value: 'slack', key: 'slack', label: 'Slack' },
{ value: 'twilio', key: 'twilio', label: 'Twilio' },
{ value: 'webhook', key: 'webhook', label: 'Webhook' },
]}
/>
</FormGroup>
{typeField.value && <TypeInputsSubForm type={typeField.value} />}
<CustomMessagesSubForm
defaultMessages={defaultMessages}
type={typeField.value}
/>
</>
);
}
function NotificationTemplateForm({
template,
defaultMessages,
onSubmit,
onCancel,
submitError,
i18n,
}) {
const handleSubmit = values => {
onSubmit(
normalizeFields(
{
...values,
organization: values.organization?.id,
},
defaultMessages
)
);
};
let emailOptions = '';
if (template.notification_type === 'email') {
emailOptions = template.notification_configuration?.use_ssl ? 'ssl' : 'tls';
}
const messages = template.messages || { workflow_approval: {} };
const defs = defaultMessages[template.notification_type || 'email'];
const mergeDefaultMessages = (templ = {}, def) => {
return {
message: templ.message || def.message || '',
body: templ.body || def.body || '',
};
};
const { headers } = template?.notification_configuration || {};
return (
<Formik
initialValues={{
name: template.name,
description: template.description,
notification_type: template.notification_type,
notification_configuration: {
...initialConfigValues,
...template.notification_configuration,
headers: headers ? JSON.stringify(headers, null, 2) : null,
},
emailOptions,
organization: template.summary_fields?.organization,
messages: {
started: { ...mergeDefaultMessages(messages.started, defs.started) },
success: { ...mergeDefaultMessages(messages.success, defs.success) },
error: { ...mergeDefaultMessages(messages.error, defs.error) },
workflow_approval: {
approved: {
...mergeDefaultMessages(
messages.workflow_approval.approved,
defs.workflow_approval.approved
),
},
denied: {
...mergeDefaultMessages(
messages.workflow_approval.denied,
defs.workflow_approval.denied
),
},
running: {
...mergeDefaultMessages(
messages.workflow_approval.running,
defs.workflow_approval.running
),
},
timed_out: {
...mergeDefaultMessages(
messages.workflow_approval.timed_out,
defs.workflow_approval.timed_out
),
},
},
},
useCustomMessages: hasCustomMessages(messages, defs),
}}
onSubmit={handleSubmit}
>
{formik => (
<Form autoComplete="off" onSubmit={formik.handleSubmit}>
<FormColumnLayout>
<NotificationTemplateFormFields
i18n={i18n}
defaultMessages={defaultMessages}
/>
<FormSubmitError error={submitError} />
<FormActionGroup
onCancel={onCancel}
onSubmit={formik.handleSubmit}
/>
</FormColumnLayout>
</Form>
)}
</Formik>
);
}
NotificationTemplateForm.propTypes = {
template: shape(),
defaultMessages: shape().isRequired,
onSubmit: func.isRequired,
onCancel: func.isRequired,
submitError: shape(),
};
NotificationTemplateForm.defaultProps = {
template: {
name: '',
description: '',
notification_type: '',
},
submitError: null,
};
export default withI18n()(NotificationTemplateForm);
function hasCustomMessages(messages, defaults) {
return (
isCustomized(messages.started, defaults.started) ||
isCustomized(messages.success, defaults.success) ||
isCustomized(messages.error, defaults.error) ||
isCustomized(
messages.workflow_approval.approved,
defaults.workflow_approval.approved
) ||
isCustomized(
messages.workflow_approval.denied,
defaults.workflow_approval.denied
) ||
isCustomized(
messages.workflow_approval.running,
defaults.workflow_approval.running
) ||
isCustomized(
messages.workflow_approval.timed_out,
defaults.workflow_approval.timed_out
)
);
}
function isCustomized(message, defaultMessage) {
if (!message) {
return false;
}
if (message.message && message.message !== defaultMessage.message) {
return true;
}
if (message.body && message.body !== defaultMessage.body) {
return true;
}
return false;
}
function normalizeFields(values, defaultMessages) {
return normalizeTypeFields(normalizeMessageFields(values, defaultMessages));
}
/* If the user filled in some of the Type Details fields, then switched
* to a different notification type, unecessary fields may be set in the
* notification_configuration this function strips them off */
function normalizeTypeFields(values) {
const stripped = {};
const fields = typeFieldNames[values.notification_type];
fields.forEach(fieldName => {
if (typeof values.notification_configuration[fieldName] !== 'undefined') {
stripped[fieldName] = values.notification_configuration[fieldName];
}
});
if (values.notification_type === 'email') {
stripped.use_ssl = values.emailOptions === 'ssl';
stripped.use_tls = !stripped.use_ssl;
}
if (values.notification_type === 'webhook') {
stripped.headers = stripped.headers ? JSON.parse(stripped.headers) : {};
}
const { emailOptions, ...rest } = values;
return {
...rest,
notification_configuration: stripped,
};
}
function normalizeMessageFields(values, defaults) {
const { useCustomMessages, ...rest } = values;
if (!useCustomMessages) {
return {
...rest,
messages: null,
};
}
const { messages } = values;
const defs = defaults[values.notification_type];
const nullIfDefault = (m, d) => {
return {
message: m.message === d.message ? null : m.message,
body: m.body === d.body ? null : m.body,
};
};
const nonDefaultMessages = {
started: nullIfDefault(messages.started, defs.started),
success: nullIfDefault(messages.success, defs.success),
error: nullIfDefault(messages.error, defs.error),
workflow_approval: {
approved: nullIfDefault(
messages.workflow_approval.approved,
defs.workflow_approval.approved
),
denied: nullIfDefault(
messages.workflow_approval.denied,
defs.workflow_approval.denied
),
running: nullIfDefault(
messages.workflow_approval.running,
defs.workflow_approval.running
),
timed_out: nullIfDefault(
messages.workflow_approval.timed_out,
defs.workflow_approval.timed_out
),
},
};
return {
...rest,
messages: nonDefaultMessages,
};
}

View File

@ -0,0 +1,118 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
import NotificationTemplateForm from './NotificationTemplateForm';
jest.mock('../../../api/models/NotificationTemplates');
const template = {
id: 3,
notification_type: 'slack',
name: 'Test Notification',
description: 'a sample notification',
url: '/notification_templates/3',
organization: 1,
summary_fields: {
user_capabilities: {
edit: true,
},
recent_notifications: [
{
status: 'success',
},
],
organization: {
id: 1,
name: 'The Organization',
},
},
};
const messageDef = {
message: 'default message',
body: 'default body',
};
const defaults = {
started: messageDef,
success: messageDef,
error: messageDef,
workflow_approval: {
approved: messageDef,
denied: messageDef,
running: messageDef,
timed_out: messageDef,
},
};
const defaultMessages = {
email: defaults,
slack: defaults,
twilio: defaults,
};
describe('<NotificationTemplateForm />', () => {
test('should render form fields', () => {
const wrapper = mountWithContexts(
<NotificationTemplateForm
template={template}
defaultMessages={defaultMessages}
detailUrl="/notification_templates/3/detail"
onSubmit={jest.fn()}
onCancel={jest.fn()}
/>
);
expect(wrapper.find('input#notification-name').prop('value')).toEqual(
'Test Notification'
);
expect(
wrapper.find('input#notification-description').prop('value')
).toEqual('a sample notification');
expect(wrapper.find('OrganizationLookup').prop('value')).toEqual({
id: 1,
name: 'The Organization',
});
expect(wrapper.find('AnsibleSelect').prop('value')).toEqual('slack');
expect(wrapper.find('TypeInputsSubForm').prop('type')).toEqual('slack');
expect(wrapper.find('CustomMessagesSubForm').prop('type')).toEqual('slack');
expect(
wrapper.find('CustomMessagesSubForm').prop('defaultMessages')
).toEqual(defaultMessages);
});
test('should submit', async () => {
const handleSubmit = jest.fn();
const wrapper = mountWithContexts(
<NotificationTemplateForm
template={{
...template,
notification_configuration: {
channels: ['#foo'],
token: 'abc123',
},
}}
defaultMessages={defaultMessages}
detailUrl="/notification_templates/3/detail"
onSubmit={handleSubmit}
onCancel={jest.fn()}
/>
);
await act(async () => {
wrapper.find('FormActionGroup').invoke('onSubmit')();
});
wrapper.update();
expect(handleSubmit).toHaveBeenCalledWith({
name: 'Test Notification',
description: 'a sample notification',
organization: 1,
notification_type: 'slack',
notification_configuration: {
channels: ['#foo'],
hex_color: '',
token: 'abc123',
},
messages: null,
});
});
});

View File

@ -0,0 +1,509 @@
import React from 'react';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import { useField } from 'formik';
import { FormGroup, Title } from '@patternfly/react-core';
import {
FormColumnLayout,
FormFullWidthLayout,
SubFormLayout,
} from '../../../components/FormLayout';
import FormField, {
PasswordField,
CheckboxField,
ArrayTextField,
} from '../../../components/FormField';
import AnsibleSelect from '../../../components/AnsibleSelect';
import { CodeMirrorField } from '../../../components/CodeMirrorInput';
import {
combine,
required,
requiredEmail,
url,
minMaxValue,
} from '../../../util/validators';
import { NotificationType } from '../../../types';
const TypeFields = {
email: EmailFields,
grafana: GrafanaFields,
irc: IRCFields,
mattermost: MattermostFields,
pagerduty: PagerdutyFields,
rocketchat: RocketChatFields,
slack: SlackFields,
twilio: TwilioFields,
webhook: WebhookFields,
};
function TypeInputsSubForm({ type, i18n }) {
const Fields = TypeFields[type];
return (
<SubFormLayout>
<Title size="md" headingLevel="h4">
{i18n._(t`Type Details`)}
</Title>
<FormColumnLayout>
<Fields i18n={i18n} />
</FormColumnLayout>
</SubFormLayout>
);
}
TypeInputsSubForm.propTypes = {
type: NotificationType.isRequired,
};
export default withI18n()(TypeInputsSubForm);
function EmailFields({ i18n }) {
const [optionsField, optionsMeta] = useField({
name: 'emailOptions',
validate: required(i18n._(t`Select a value for this field`), i18n),
});
return (
<>
<FormField
id="email-username"
label={i18n._(t`Username`)}
name="notification_configuration.username"
type="text"
/>
<PasswordField
id="email-password"
label={i18n._(t`Password`)}
name="notification_configuration.password"
/>
<FormField
id="email-host"
label={i18n._(t`Host`)}
name="notification_configuration.host"
type="text"
validate={required(null, i18n)}
isRequired
/>
<ArrayTextField
id="email-recipients"
label={i18n._(t`Recipient list`)}
name="notification_configuration.recipients"
type="textarea"
validate={required(null, i18n)}
isRequired
rows={3}
tooltip={i18n._(t`Enter one email address per line to create a recipient
list for this type of notification.`)}
/>
<FormField
id="email-sender"
label={i18n._(t`Sender e-mail`)}
name="notification_configuration.sender"
type="text"
validate={requiredEmail(i18n)}
isRequired
/>
<FormField
id="email-port"
label={i18n._(t`Port`)}
name="notification_configuration.port"
type="number"
validate={combine([required(null, i18n), minMaxValue(1, 65535, i18n)])}
isRequired
min="0"
max="65535"
/>
<FormField
id="email-timeout"
label={i18n._(t`Timeout`)}
name="notification_configuration.timeout"
type="number"
validate={combine([required(null, i18n), minMaxValue(1, 120, i18n)])}
isRequired
min="1"
max="120"
tooltip={i18n._(t`The amount of time (in seconds) before the email
notification stops trying to reach the host and times out. Ranges
from 1 to 120 seconds.`)}
/>
<FormGroup
fieldId="email-options"
helperTextInvalid={optionsMeta.error}
isRequired
validated={
!optionsMeta.touched || !optionsMeta.error ? 'default' : 'error'
}
label={i18n._(t`E-mail options`)}
>
<AnsibleSelect
{...optionsField}
id="email-options"
data={[
{
value: '',
key: '',
label: i18n._(t`Choose an email option`),
isDisabled: true,
},
{ value: 'tls', key: 'tls', label: i18n._(t`Use TLS`) },
{ value: 'ssl', key: 'ssl', label: i18n._(t`Use SSL`) },
]}
/>
</FormGroup>
</>
);
}
function GrafanaFields({ i18n }) {
return (
<>
<FormField
id="grafana-url"
label={i18n._(t`Grafana URL`)}
name="notification_configuration.grafana_url"
type="text"
validate={required(null, i18n)}
isRequired
tooltip={i18n._(t`The base URL of the Grafana server - the
/api/annotations endpoint will be added automatically to the base
Grafana URL.`)}
/>
<PasswordField
id="grafana-key"
label={i18n._(t`Grafana API key`)}
name="notification_configuration.grafana_key"
validate={required(null, i18n)}
isRequired
/>
<FormField
id="grafana-dashboard-id"
label={i18n._(t`ID of the dashboard (optional)`)}
name="notification_configuration.dashboardId"
type="text"
/>
<FormField
id="grafana-panel-id"
label={i18n._(t`ID of the panel (optional)`)}
name="notification_configuration.panelId"
type="text"
/>
<ArrayTextField
id="grafana-tags"
label={i18n._(t`Tags for the annotation (optional)`)}
name="notification_configuration.annotation_tags"
type="textarea"
rows={3}
tooltip={i18n._(t`Enter one Annotation Tag per line, without commas.`)}
/>
<CheckboxField
id="grafana-ssl"
label={i18n._(t`Disable SSL verification`)}
name="notification_configuration.grafana_no_verify_ssl"
/>
</>
);
}
function IRCFields({ i18n }) {
return (
<>
<PasswordField
id="irc-password"
label={i18n._(t`IRC server password`)}
name="notification_configuration.password"
/>
<FormField
id="irc-port"
label={i18n._(t`IRC server port`)}
name="notification_configuration.port"
type="number"
validate={required(null, i18n)}
isRequired
min="0"
/>
<FormField
id="irc-server"
label={i18n._(t`IRC server address`)}
name="notification_configuration.server"
type="text"
validate={required(null, i18n)}
isRequired
/>
<FormField
id="irc-nickname"
label={i18n._(t`IRC nick`)}
name="notification_configuration.nickname"
type="text"
validate={required(null, i18n)}
isRequired
/>
<ArrayTextField
id="irc-targets"
label={i18n._(t`Destination channels or users`)}
name="notification_configuration.targets"
type="textarea"
validate={required(null, i18n)}
isRequired
tooltip={i18n._(t`Enter one IRC channel or username per line. The pound
symbol (#) for channels, and the at (@) symbol for users, are not
required.`)}
/>
<CheckboxField
id="grafana-ssl"
label={i18n._(t`Disable SSL verification`)}
name="notification_configuration.use_ssl"
/>
</>
);
}
function MattermostFields({ i18n }) {
return (
<>
<FormField
id="mattermost-url"
label={i18n._(t`Target URL`)}
name="notification_configuration.mattermost_url"
type="text"
validate={combine([required(null, i18n), url(i18n)])}
isRequired
/>
<FormField
id="mattermost-username"
label={i18n._(t`Username`)}
name="notification_configuration.mattermost_username"
type="text"
/>
<FormField
id="mattermost-channel"
label={i18n._(t`Channel`)}
name="notification_configuration.mattermost_channel"
type="text"
/>
<FormField
id="mattermost-icon"
label={i18n._(t`Icon URL`)}
name="notification_configuration.mattermost_icon_url"
type="text"
validate={url(i18n)}
/>
<CheckboxField
id="mattermost-ssl"
label={i18n._(t`Disable SSL verification`)}
name="notification_configuration.mattermost_no_verify_ssl"
/>
</>
);
}
function PagerdutyFields({ i18n }) {
return (
<>
<PasswordField
id="pagerduty-token"
label={i18n._(t`API Token`)}
name="notification_configuration.token"
validate={required(null, i18n)}
isRequired
/>
<FormField
id="pagerduty-subdomain"
label={i18n._(t`Pagerduty subdomain`)}
name="notification_configuration.subdomain"
type="text"
validate={required(null, i18n)}
isRequired
/>
<FormField
id="pagerduty-service-key"
label={i18n._(t`API service/integration key`)}
name="notification_configuration.service_key"
type="text"
validate={required(null, i18n)}
isRequired
/>
<FormField
id="pagerduty-identifier"
label={i18n._(t`Client identifier`)}
name="notification_configuration.client_name"
type="text"
validate={required(null, i18n)}
isRequired
/>
</>
);
}
function RocketChatFields({ i18n }) {
return (
<>
<FormField
id="rocketchat-url"
label={i18n._(t`Target URL`)}
name="notification_configuration.rocketchat_url"
type="text"
validate={combine([required(null, i18n), url(i18n)])}
isRequired
/>
<FormField
id="rocketchat-username"
label={i18n._(t`Username`)}
name="notification_configuration.rocketchat_username"
type="text"
/>
<FormField
id="rocketchat-icon-url"
label={i18n._(t`Icon URL`)}
name="notification_configuration.rocketchat_icon_url"
type="text"
validate={url(i18n)}
/>
<CheckboxField
id="rocketchat-ssl"
label={i18n._(t`Disable SSL verification`)}
name="notification_configuration.rocketchat_no_verify_ssl"
/>
</>
);
}
function SlackFields({ i18n }) {
return (
<>
<ArrayTextField
id="slack-channels"
label={i18n._(t`Destination channels`)}
name="notification_configuration.channels"
type="textarea"
validate={required(null, i18n)}
isRequired
tooltip={i18n._(t`Enter one Slack channel per line. The pound symbol (#)
is required for channels.`)}
/>
<PasswordField
id="slack-token"
label={i18n._(t`Token`)}
name="notification_configuration.token"
validate={required(null, i18n)}
isRequired
/>
<FormField
id="slack-color"
label={i18n._(t`Notification color`)}
name="notification_configuration.hex_color"
type="text"
tooltip={i18n._(t`Specify a notification color. Acceptable colors are hex
color code (example: #3af or #789abc).`)}
/>
</>
);
}
function TwilioFields({ i18n }) {
return (
<>
<PasswordField
id="twilio-token"
label={i18n._(t`Account token`)}
name="notification_configuration.account_token"
validate={required(null, i18n)}
isRequired
/>
<FormField
id="twilio-from-phone"
label={i18n._(t`Source phone number`)}
name="notification_configuration.from_number"
type="text"
validate={required(null, i18n)}
isRequired
tooltip={i18n._(t`Enter the number associated with the "Messaging
Service" in Twilio in the format +18005550199.`)}
/>
<ArrayTextField
id="twilio-destination-numbers"
label={i18n._(t`Destination SMS number(s)`)}
name="notification_configuration.to_numbers"
type="textarea"
validate={required(null, i18n)}
isRequired
tooltip={i18n._(t`Enter one phone number per line to specify where to
route SMS messages.`)}
/>
<FormField
id="twilio-account-sid"
label={i18n._(t`Account SID`)}
name="notification_configuration.account_sid"
type="text"
validate={required(null, i18n)}
isRequired
/>
</>
);
}
function WebhookFields({ i18n }) {
const [methodField, methodMeta] = useField({
name: 'notification_configuration.http_method',
validate: required(i18n._(t`Select a value for this field`), i18n),
});
return (
<>
<FormField
id="webhook-username"
label={i18n._(t`Username`)}
name="notification_configuration.username"
type="text"
/>
<PasswordField
id="webhook-password"
label={i18n._(t`Basic auth password`)}
name="notification_configuration.password"
/>
<FormField
id="webhook-url"
label={i18n._(t`Target URL`)}
name="notification_configuration.url"
type="text"
validate={combine([required(null, i18n), url(i18n)])}
isRequired
/>
<CheckboxField
id="webhook-ssl"
label={i18n._(t`Disable SSL verification`)}
name="notification_configuration.disable_ssl_verification"
/>
<FormFullWidthLayout>
<CodeMirrorField
id="webhook-headers"
name="notification_configuration.headers"
label={i18n._(t`HTTP Headers`)}
mode="javascript"
tooltip={i18n._(t`Specify HTTP Headers in JSON format. Refer to
the Ansible Tower documentation for example syntax.`)}
rows={5}
/>
</FormFullWidthLayout>
<FormGroup
fieldId="webhook-http-method"
helperTextInvalid={methodMeta.error}
isRequired
validated={
!methodMeta.touched || !methodMeta.error ? 'default' : 'error'
}
label={i18n._(t`E-mail options`)}
>
<AnsibleSelect
{...methodField}
id="webhook-http-method"
data={[
{
value: '',
key: '',
label: i18n._(t`Choose an HTTP method`),
isDisabled: true,
},
{ value: 'POST', key: 'post', label: i18n._(t`POST`) },
{ value: 'PUT', key: 'put', label: i18n._(t`PUT`) },
]}
/>
</FormGroup>
</>
);
}

View File

@ -0,0 +1,56 @@
const typeFieldNames = {
email: [
'username',
'password',
'host',
'recipients',
'sender',
'port',
'timeout',
],
grafana: [
'grafana_url',
'grafana_key',
'dashboardId',
'panelId',
'annotation_tags',
'grafana_no_verify_ssl',
],
irc: ['password', 'port', 'server', 'nickname', 'targets', 'use_ssl'],
mattermost: [
'mattermost_url',
'mattermost_username',
'mattermost_channel',
'mattermost_icon_url',
'mattermost_no_verify_ssl',
],
pagerduty: ['token', 'subdomain', 'service_key', 'client_name'],
rocketchat: [
'rocketchat_url',
'rocketchat_username',
'rocketchat_icon_url',
'rocketchat_no_verify_ssl',
],
slack: ['channels', 'token', 'hex_color'],
twilio: ['account_token', 'from_number', 'to_numbers', 'account_sid'],
webhook: [
'username',
'password',
'url',
'disable_ssl_verification',
'headers',
'http_method',
],
};
export default typeFieldNames;
const initialConfigValues = {};
Object.keys(typeFieldNames).forEach(key => {
typeFieldNames[key].forEach(fieldName => {
const isBoolean = fieldName.includes('_ssl');
initialConfigValues[fieldName] = isBoolean ? false : '';
});
});
export { initialConfigValues };

View File

@ -4,13 +4,11 @@ import { withRouter, Redirect } from 'react-router-dom';
import { CardBody } from '../../../components/Card';
import ContentError from '../../../components/ContentError';
import ContentLoading from '../../../components/ContentLoading';
import { JobTemplatesAPI, ProjectsAPI } from '../../../api';
import { JobTemplatesAPI } from '../../../api';
import { JobTemplate } from '../../../types';
import { getAddedAndRemoved } from '../../../util/lists';
import JobTemplateForm from '../shared/JobTemplateForm';
const loadRelatedProjectPlaybooks = async project =>
ProjectsAPI.readPlaybooks(project);
class JobTemplateEdit extends Component {
static propTypes = {
template: JobTemplate.isRequired,
@ -43,17 +41,8 @@ class JobTemplateEdit extends Component {
}
async loadRelated() {
const {
template: { project },
} = this.props;
this.setState({ contentError: null, hasContentLoading: true });
try {
if (project) {
const { data: playbook = [] } = await loadRelatedProjectPlaybooks(
project
);
this.setState({ relatedProjectPlaybooks: playbook });
}
const [relatedCredentials] = await this.loadRelatedCredentials();
this.setState({
relatedCredentials,

View File

@ -12,6 +12,7 @@ import {
JobTemplatesAPI,
LabelsAPI,
ProjectsAPI,
InventoriesAPI,
} from '../../../api';
import JobTemplateEdit from './JobTemplateEdit';
@ -181,6 +182,12 @@ JobTemplatesAPI.readCredentials.mockResolvedValue({
ProjectsAPI.readPlaybooks.mockResolvedValue({
data: mockRelatedProjectPlaybooks,
});
InventoriesAPI.readOptions.mockResolvedValue({
data: { actions: { GET: {}, POST: {} } },
});
ProjectsAPI.readOptions.mockResolvedValue({
data: { actions: { GET: {}, POST: {} } },
});
LabelsAPI.read.mockResolvedValue({ data: { results: [] } });
CredentialsAPI.read.mockResolvedValue({
data: {

View File

@ -36,18 +36,23 @@ function Template({ i18n, me, setBreadcrumb }) {
request: loadTemplateAndRoles,
} = useRequest(
useCallback(async () => {
const [{ data }, notifAdminRes] = await Promise.all([
const [{ data }, actions, notifAdminRes] = await Promise.all([
JobTemplatesAPI.readDetail(templateId),
JobTemplatesAPI.readTemplateOptions(templateId),
OrganizationsAPI.read({
page_size: 1,
role_level: 'notification_admin_role',
}),
]);
if (data.webhook_service && data?.related?.webhook_key) {
const {
data: { webhook_key },
} = await JobTemplatesAPI.readWebhookKey(templateId);
data.webhook_key = webhook_key;
if (actions.data.actions.PUT) {
if (data.webhook_service && data?.related?.webhook_key) {
const {
data: { webhook_key },
} = await JobTemplatesAPI.readWebhookKey(templateId);
data.webhook_key = webhook_key;
}
}
setBreadcrumb(data);

View File

@ -18,11 +18,16 @@ const mockMe = {
is_system_auditor: false,
};
describe('<Template />', () => {
let wrapper;
beforeEach(() => {
JobTemplatesAPI.readDetail.mockResolvedValue({
data: mockJobTemplateData,
});
JobTemplatesAPI.readTemplateOptions.mockResolvedValue({
data: {
actions: { PUT: true },
},
});
OrganizationsAPI.read.mockResolvedValue({
data: {
count: 1,
@ -35,21 +40,33 @@ describe('<Template />', () => {
],
},
});
JobTemplatesAPI.readWebhookKey.mockResolvedValue({
data: {
webhook_key: 'key',
},
});
});
afterEach(() => {
jest.clearAllMocks();
wrapper.unmount();
});
test('initially renders succesfully', async () => {
await act(async () => {
mountWithContexts(<Template setBreadcrumb={() => {}} me={mockMe} />);
wrapper = mountWithContexts(
<Template setBreadcrumb={() => {}} me={mockMe} />
);
});
});
test('When component mounts API is called and the response is put in state', async () => {
await act(async () => {
mountWithContexts(<Template setBreadcrumb={() => {}} me={mockMe} />);
wrapper = mountWithContexts(
<Template setBreadcrumb={() => {}} me={mockMe} />
);
});
expect(JobTemplatesAPI.readDetail).toBeCalled();
expect(OrganizationsAPI.read).toBeCalled();
});
test('notifications tab shown for admins', async done => {
let wrapper;
await act(async () => {
wrapper = mountWithContexts(
<Template setBreadcrumb={() => {}} me={mockMe} />
@ -74,7 +91,6 @@ describe('<Template />', () => {
},
});
let wrapper;
await act(async () => {
wrapper = mountWithContexts(
<Template setBreadcrumb={() => {}} me={mockMe} />
@ -93,7 +109,7 @@ describe('<Template />', () => {
const history = createMemoryHistory({
initialEntries: ['/templates/job_template/1/foobar'],
});
let wrapper;
await act(async () => {
wrapper = mountWithContexts(
<Template setBreadcrumb={() => {}} me={mockMe} />,
@ -117,4 +133,62 @@ describe('<Template />', () => {
await waitForElement(wrapper, 'ContentError', el => el.length === 1);
});
test('should call to get webhook key', async () => {
const history = createMemoryHistory({
initialEntries: ['/templates/job_template/1/foobar'],
});
await act(async () => {
wrapper = mountWithContexts(
<Template setBreadcrumb={() => {}} me={mockMe} />,
{
context: {
router: {
history,
route: {
location: history.location,
match: {
params: { id: 1 },
url: '/templates/job_template/1/foobar',
path: '/templates/job_template/1/foobar',
},
},
},
},
}
);
});
expect(JobTemplatesAPI.readWebhookKey).toHaveBeenCalled();
});
test('should not call to get webhook key', async () => {
JobTemplatesAPI.readTemplateOptions.mockResolvedValueOnce({
data: {
actions: {},
},
});
const history = createMemoryHistory({
initialEntries: ['/templates/job_template/1/foobar'],
});
await act(async () => {
wrapper = mountWithContexts(
<Template setBreadcrumb={() => {}} me={mockMe} />,
{
context: {
router: {
history,
route: {
location: history.location,
match: {
params: { id: 1 },
url: '/templates/job_template/1/foobar',
path: '/templates/job_template/1/foobar',
},
},
},
},
}
);
});
expect(JobTemplatesAPI.readWebhookKey).not.toHaveBeenCalled();
});
});

View File

@ -56,10 +56,20 @@ class WorkflowJobTemplate extends Component {
this.setState({ contentError: null });
try {
const { data } = await WorkflowJobTemplatesAPI.readDetail(id);
const [
{ data },
{
data: { actions },
},
] = await Promise.all([
WorkflowJobTemplatesAPI.readDetail(id),
WorkflowJobTemplatesAPI.readWorkflowJobTemplateOptions(id),
]);
let webhookKey;
if (data?.webhook_service && data?.related?.webhook_key) {
webhookKey = await WorkflowJobTemplatesAPI.readWebhookKey(id);
if (actions.PUT) {
if (data?.webhook_service && data?.related?.webhook_key) {
webhookKey = await WorkflowJobTemplatesAPI.readWebhookKey(id);
}
}
if (data?.summary_fields?.webhook_credential) {
const {

View File

@ -59,6 +59,7 @@ describe('<WorkflowJobTemplate/>', () => {
},
},
});
WorkflowJobTemplatesAPI.readWebhookKey.mockResolvedValue({
data: { webhook_key: 'WebHook Key' },
});
@ -74,6 +75,9 @@ describe('<WorkflowJobTemplate/>', () => {
});
});
beforeEach(() => {
WorkflowJobTemplatesAPI.readWorkflowJobTemplateOptions.mockResolvedValue({
data: { actions: { PUT: {} } },
});
history = createMemoryHistory({
initialEntries: ['/templates/workflow_job_template/1/details'],
});
@ -95,13 +99,18 @@ describe('<WorkflowJobTemplate/>', () => {
);
});
});
afterEach(() => {
jest.clearAllMocks();
wrapper.unmount();
});
test('calls api to get workflow job template data', async () => {
expect(wrapper.find('WorkflowJobTemplate').length).toBe(1);
expect(WorkflowJobTemplatesAPI.readDetail).toBeCalledWith('1');
wrapper.update();
await sleep(0);
expect(WorkflowJobTemplatesAPI.readWebhookKey).toBeCalledWith('1');
expect(WorkflowJobTemplatesAPI.readWorkflowJobTemplateOptions).toBeCalled();
expect(CredentialsAPI.readDetail).toBeCalledWith(1234567);
expect(OrganizationsAPI.read).toBeCalledWith({
page_size: 1,
@ -144,4 +153,12 @@ describe('<WorkflowJobTemplate/>', () => {
tabs.forEach(t => expect(tc.prop(`aria-label=[${t}]`)));
});
});
test('should not call for webhook key', async () => {
WorkflowJobTemplatesAPI.readWorkflowJobTemplateOptions.mockResolvedValueOnce(
{
data: { actions: {} },
}
);
expect(WorkflowJobTemplatesAPI.readWebhookKey).not.toBeCalled();
});
});

View File

@ -39,7 +39,7 @@ import {
ProjectLookup,
MultiCredentialsLookup,
} from '../../../components/Lookup';
import { JobTemplatesAPI, ProjectsAPI } from '../../../api';
import { JobTemplatesAPI } from '../../../api';
import LabelSelect from './LabelSelect';
import PlaybookSelect from './PlaybookSelect';
import WebhookSubForm from './WebhookSubForm';
@ -100,18 +100,6 @@ function JobTemplateForm({
'webhook_credential'
);
const {
request: fetchProject,
error: projectContentError,
contentLoading: hasProjectLoading,
} = useRequest(
useCallback(async () => {
if (template?.project) {
await ProjectsAPI.readDetail(template?.project);
}
}, [template])
);
const {
request: loadRelatedInstanceGroups,
error: instanceGroupError,
@ -127,10 +115,6 @@ function JobTemplateForm({
}, [setFieldValue, template])
);
useEffect(() => {
fetchProject();
}, [fetchProject]);
useEffect(() => {
loadRelatedInstanceGroups();
}, [loadRelatedInstanceGroups]);
@ -204,16 +188,12 @@ function JobTemplateForm({
callbackUrl = `${origin}${path}`;
}
if (instanceGroupLoading || hasProjectLoading) {
if (instanceGroupLoading) {
return <ContentLoading />;
}
if (contentError || instanceGroupError || projectContentError) {
return (
<ContentError
error={contentError || instanceGroupError || projectContentError}
/>
);
if (contentError || instanceGroupError) {
return <ContentError error={contentError || instanceGroupError} />;
}
return (
@ -254,17 +234,15 @@ function JobTemplateForm({
}}
/>
</FieldWithPrompt>
<FieldWithPrompt
fieldId="template-inventory"
isRequired={!askInventoryOnLaunchField.value}
label={i18n._(t`Inventory`)}
promptId="template-ask-inventory-on-launch"
promptName="ask_inventory_on_launch"
tooltip={i18n._(t`Select the inventory containing the hosts
you want this job to manage.`)}
>
<>
<InventoryLookup
value={inventory}
fieldId="template-inventory"
promptId="template-ask-inventory-on-launch"
promptName="ask_inventory_on_launch"
isPromptableField
tooltip={i18n._(t`Select the inventory containing the hosts
you want this job to manage.`)}
onBlur={() => inventoryHelpers.setTouched()}
onChange={value => {
inventoryHelpers.setValue(value ? value.id : null);
@ -283,7 +261,7 @@ function JobTemplateForm({
{inventoryMeta.error}
</div>
)}
</FieldWithPrompt>
</>
<ProjectLookup
value={projectField.value}
onBlur={() => projectHelpers.setTouched()}

View File

@ -14,6 +14,7 @@ import {
ProjectsAPI,
CredentialsAPI,
CredentialTypesAPI,
InventoriesAPI,
} from '../../../api';
jest.mock('../../../api');
@ -111,14 +112,23 @@ describe('<JobTemplateForm />', () => {
JobTemplatesAPI.updateWebhookKey.mockReturnValue({
data: { webhook_key: 'webhook key' },
});
ProjectsAPI.readPlaybooks.mockReturnValue({
data: ['debug.yml'],
JobTemplatesAPI.updateWebhookKey.mockReturnValue({
data: { webhook_key: 'webhook key' },
});
ProjectsAPI.readDetail.mockReturnValue({
name: 'foo',
id: 1,
allow_override: true,
});
ProjectsAPI.readPlaybooks.mockReturnValue({
data: ['debug.yml'],
});
InventoriesAPI.readOptions.mockResolvedValue({
data: { actions: { GET: {}, POST: {} } },
});
ProjectsAPI.readOptions.mockResolvedValue({
data: { actions: { GET: {}, POST: {} } },
});
});
afterEach(() => {

View File

@ -1,4 +1,4 @@
import React, { useCallback, useEffect } from 'react';
import React, { useCallback, useEffect, useState } from 'react';
import { number, string, oneOfType } from 'prop-types';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
@ -7,6 +7,7 @@ import { ProjectsAPI } from '../../../api';
import useRequest from '../../../util/useRequest';
function PlaybookSelect({ projectId, isValid, field, onBlur, onError, i18n }) {
const [isDisabled, setIsDisabled] = useState(false);
const {
result: options,
request: fetchOptions,
@ -18,6 +19,7 @@ function PlaybookSelect({ projectId, isValid, field, onBlur, onError, i18n }) {
return [];
}
const { data } = await ProjectsAPI.readPlaybooks(projectId);
const opts = (data || []).map(playbook => ({
value: playbook,
key: playbook,
@ -42,18 +44,30 @@ function PlaybookSelect({ projectId, isValid, field, onBlur, onError, i18n }) {
useEffect(() => {
if (error) {
onError(error);
if (error.response.status === 403) {
setIsDisabled(true);
} else {
onError(error);
}
}
}, [error, onError]);
const isDisabledData = [
{
value: field.value || '',
label: field.value || '',
key: 1,
isDisabled: true,
},
];
return (
<AnsibleSelect
id="template-playbook"
data={options}
data={isDisabled ? isDisabledData : options}
isValid={isValid}
{...field}
onBlur={onBlur}
isDisabled={isLoading}
isDisabled={isLoading || isDisabled}
/>
);
}

View File

@ -110,23 +110,21 @@ function WorkflowJobTemplateForm({
value={organizationField.value}
isValid={!organizationMeta.error}
/>
<FieldWithPrompt
fieldId="wfjt-inventory"
label={i18n._(t`Inventory`)}
promptId="wfjt-ask-inventory-on-launch"
promptName="ask_inventory_on_launch"
tooltip={i18n._(
t`Select an inventory for the workflow. This inventory is applied to all job template nodes that prompt for an inventory.`
)}
>
<>
<InventoryLookup
promptId="wfjt-ask-inventory-on-launch"
promptName="ask_inventory_on_launch"
tooltip={i18n._(
t`Select an inventory for the workflow. This inventory is applied to all job template nodes that prompt for an inventory.`
)}
fieldId="wfjt-inventory"
isPromptableField
value={inventoryField.value}
onBlur={() => inventoryHelpers.setTouched()}
onChange={value => {
inventoryHelpers.setValue(value);
}}
required={askInventoryOnLaunchField.value}
required={!askInventoryOnLaunchField.value}
touched={inventoryMeta.touched}
error={inventoryMeta.error}
/>
@ -139,8 +137,7 @@ function WorkflowJobTemplateForm({
{inventoryMeta.error}
</div>
)}
</FieldWithPrompt>
</>
<FieldWithPrompt
fieldId="wjft-limit"
label={i18n._(t`Limit`)}

View File

@ -11,6 +11,8 @@ import {
LabelsAPI,
OrganizationsAPI,
InventoriesAPI,
ProjectsAPI,
CredentialTypesAPI,
} from '../../../api';
jest.mock('../../../api/models/CredentialTypes');
@ -18,6 +20,8 @@ jest.mock('../../../api/models/WorkflowJobTemplates');
jest.mock('../../../api/models/Labels');
jest.mock('../../../api/models/Organizations');
jest.mock('../../../api/models/Inventories');
jest.mock('../../../api/models/Projects');
jest.mock('../../../api/models/Credentials');
describe('<WorkflowJobTemplateForm/>', () => {
let wrapper;
@ -71,6 +75,15 @@ describe('<WorkflowJobTemplateForm/>', () => {
{ id: 2, name: 'Bar' },
],
});
CredentialTypesAPI.read.mockResolvedValue({
data: { results: [{ id: 1 }] },
});
InventoriesAPI.readOptions.mockResolvedValue({
data: { actions: { GET: {}, POST: {} } },
});
ProjectsAPI.readOptions.mockResolvedValue({
data: { actions: { GET: {}, POST: {} } },
});
history = createMemoryHistory({
initialEntries: ['/templates/workflow_job_template/6/edit'],

View File

@ -24,7 +24,8 @@
"instance_groups": "/api/v2/job_templates/7/instance_groups/",
"slice_workflow_jobs": "/api/v2/job_templates/7/slice_workflow_jobs/",
"copy": "/api/v2/job_templates/7/copy/",
"webhook_receiver": "/api/v2/job_templates/7/github/"
"webhook_receiver": "/api/v2/job_templates/7/github/",
"webhook_key": "/api/v2/job_templates/7/webhook_key/"
},
"summary_fields": {
"inventory": {

View File

@ -367,3 +367,27 @@ export const CredentialType = shape({
namespace: string,
inputs: shape({}).isRequired,
});
export const NotificationType = oneOf([
'email',
'grafana',
'irc',
'mattermost',
'pagerduty',
'rocketchat',
'slack',
'twilio',
'webhook',
]);
export const NotificationTemplate = shape({
id: number.isRequired,
name: string.isRequired,
description: string,
url: string.isRequired,
organization: number.isRequired,
notification_type: NotificationType,
summary_fields: shape({
organization: Organization,
}),
});

View File

@ -76,6 +76,24 @@ export function integer(i18n) {
};
}
export function url(i18n) {
return value => {
if (!value) {
return undefined;
}
// URL regex from https://urlregex.com/
if (
// eslint-disable-next-line max-len
!/((([A-Za-z]{3,9}:(?:\/\/)?)(?:[-;:&=+$,\w]+@)?[A-Za-z0-9.-]+|(?:www\.|[-;:&=+$,\w]+@)[A-Za-z0-9.-]+)((?:\/[+~%/.\w\-_]*)?\??(?:[-+=&;%@.\w_]*)#?(?:[.!/\\\w]*))?)/.test(
value
)
) {
return i18n._(t`Please enter a valid URL`);
}
return undefined;
};
}
export function combine(validators) {
return value => {
for (let i = 0; i < validators.length; i++) {

View File

@ -4,6 +4,7 @@ import {
maxLength,
noWhiteSpace,
integer,
url,
combine,
regExp,
} from './validators';
@ -111,6 +112,26 @@ describe('validators', () => {
});
});
test('url should reject incomplete url', () => {
expect(url(i18n)('abcd')).toEqual({
id: 'Please enter a valid URL',
});
});
test('url should accept fully qualified url', () => {
expect(url(i18n)('http://example.com/foo')).toBeUndefined();
});
test('url should accept url with query params', () => {
expect(url(i18n)('https://example.com/foo?bar=baz')).toBeUndefined();
});
test('url should reject short protocol', () => {
expect(url(i18n)('h://example.com/foo')).toEqual({
id: 'Please enter a valid URL',
});
});
test('combine should run all validators', () => {
const validators = [required(null, i18n), noWhiteSpace(i18n)];
expect(combine(validators)('')).toEqual({

View File

@ -25,6 +25,11 @@ class TowerAPIModule(TowerModule):
}
session = None
cookie_jar = CookieJar()
IDENTITY_FIELDS = {
'users': 'username',
'workflow_job_template_nodes': 'identifier',
'instances': 'hostname'
}
def __init__(self, argument_spec, direct_params=None, error_callback=None, warn_callback=None, **kwargs):
kwargs['supports_check_mode'] = True
@ -42,6 +47,30 @@ class TowerAPIModule(TowerModule):
}
return exceptions.get(name, '{0}s'.format(name))
@staticmethod
def get_name_field_from_endpoint(endpoint):
return TowerAPIModule.IDENTITY_FIELDS.get(endpoint, 'name')
def get_item_name(self, item, allow_unknown=False):
if item:
if 'name' in item:
return item['name']
for field_name in TowerAPIModule.IDENTITY_FIELDS.values():
if field_name in item:
return item[field_name]
if item.get('type', None) in ('o_auth2_access_token', 'credential_input_source'):
return item['id']
if allow_unknown:
return 'unknown'
if item:
self.exit_json(msg='Cannot determine identity field for {0} object.'.format(item.get('type', 'unknown')))
else:
self.exit_json(msg='Cannot determine identity field for Undefined object.')
def head_endpoint(self, endpoint, *args, **kwargs):
return self.make_request('HEAD', endpoint, **kwargs)
@ -88,7 +117,21 @@ class TowerAPIModule(TowerModule):
response['json']['next'] = next_page
return response
def get_one(self, endpoint, *args, **kwargs):
def get_one(self, endpoint, name_or_id=None, *args, **kwargs):
if name_or_id:
name_field = self.get_name_field_from_endpoint(endpoint)
new_args = kwargs.get('data', {}).copy()
if name_field in new_args:
self.fail_json(msg="You can't specify the field {0} in your search data if using the name_or_id field".format(name_field))
new_args['or__{0}'.format(name_field)] = name_or_id
try:
new_args['or__id'] = int(name_or_id)
except ValueError:
# If we get a value error, then we didn't have an integer so we can just pass and fall down to the fail
pass
kwargs['data'] = new_args
response = self.get_endpoint(endpoint, *args, **kwargs)
if response['status_code'] != 200:
fail_msg = "Got a {0} response when trying to get one from {1}".format(response['status_code'], endpoint)
@ -102,16 +145,19 @@ class TowerAPIModule(TowerModule):
if response['json']['count'] == 0:
return None
elif response['json']['count'] > 1:
if name_or_id:
# Since we did a name or ID search and got > 1 return something if the id matches
for asset in response['json']['results']:
if asset['id'] == name_or_id:
return asset
# We got > 1 and either didn't find something by ID (which means multiple names)
# Or we weren't running with a or search and just got back too many to begin with.
self.fail_json(msg="An unexpected number of items was returned from the API ({0})".format(response['json']['count']))
return response['json']['results'][0]
def get_one_by_name_or_id(self, endpoint, name_or_id):
name_field = 'name'
if endpoint == 'users':
name_field = 'username'
elif endpoint == 'instances':
name_field = 'hostname'
name_field = self.get_name_field_from_endpoint(endpoint)
query_params = {'or__{0}'.format(name_field): name_or_id}
try:
@ -319,24 +365,10 @@ class TowerAPIModule(TowerModule):
item_url = existing_item['url']
item_type = existing_item['type']
item_id = existing_item['id']
item_name = self.get_item_name(existing_item, allow_unknown=True)
except KeyError as ke:
self.fail_json(msg="Unable to process delete of item due to missing data {0}".format(ke))
if 'name' in existing_item:
item_name = existing_item['name']
elif 'username' in existing_item:
item_name = existing_item['username']
elif 'identifier' in existing_item:
item_name = existing_item['identifier']
elif item_type == 'o_auth2_access_token':
# An oauth2 token has no name, instead we will use its id for any of the messages
item_name = existing_item['id']
elif item_type == 'credential_input_source':
# An credential_input_source has no name, instead we will use its id for any of the messages
item_name = existing_item['id']
else:
self.fail_json(msg="Unable to process delete of {0} due to missing name".format(item_type))
response = self.delete_endpoint(item_url)
if response['status_code'] in [202, 204]:
@ -409,12 +441,7 @@ class TowerAPIModule(TowerModule):
# We have to rely on item_type being passed in since we don't have an existing item that declares its type
# We will pull the item_name out from the new_item, if it exists
for key in ('name', 'username', 'identifier', 'hostname'):
if key in new_item:
item_name = new_item[key]
break
else:
item_name = 'unknown'
item_name = self.get_item_name(new_item, allow_unknown=True)
response = self.post_endpoint(endpoint, **{'data': new_item})
if response['status_code'] == 201:

View File

@ -365,13 +365,12 @@ def main():
# Attempt to look up the object based on the provided name, credential type and optional organization
lookup_data = {
'name': name,
'credential_type': cred_type_id,
}
if organization:
lookup_data['organization'] = org_id
credential = module.get_one('credentials', **{'data': lookup_data})
credential = module.get_one('credentials', name_or_id=name, **{'data': lookup_data})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@ -397,7 +396,7 @@ def main():
# Create the data that gets sent for create and update
credential_fields = {
'name': new_name if new_name else name,
'name': new_name if new_name else (module.get_item_name(credential) if credential else name),
'credential_type': cred_type_id,
}
if has_inputs:

View File

@ -115,7 +115,6 @@ def main():
# These will be passed into the create/updates
credential_type_params = {
'name': new_name if new_name else name,
'managed_by_tower': False,
}
if kind:
@ -128,16 +127,14 @@ def main():
credential_type_params['injectors'] = module.params.get('injectors')
# Attempt to look up credential_type based on the provided name
credential_type = module.get_one('credential_types', **{
'data': {
'name': name,
}
})
credential_type = module.get_one('credential_types', name_or_id=name)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(credential_type)
credential_type_params['name'] = new_name if new_name else (module.get_item_name(credential_type) if credential_type else name)
# If the state was present and we can let the module build or update the existing credential type, this will return on its own
module.create_or_update_if_needed(credential_type, credential_type_params, endpoint='credential_types', item_type='credential type')

View File

@ -108,9 +108,8 @@ def main():
inventory_id = module.resolve_name_to_id('inventories', inventory)
# Attempt to look up the object based on the provided name and inventory ID
group = module.get_one('groups', **{
group = module.get_one('groups', name_or_id=name, **{
'data': {
'name': name,
'inventory': inventory_id
}
})
@ -121,7 +120,7 @@ def main():
# Create the data that gets sent for create and update
group_fields = {
'name': new_name if new_name else name,
'name': new_name if new_name else (module.get_item_name(group) if group else name),
'inventory': inventory_id,
}
if description is not None:
@ -136,8 +135,8 @@ def main():
continue
id_list = []
for sub_name in name_list:
sub_obj = module.get_one(resource, **{
'data': {'inventory': inventory_id, 'name': sub_name}
sub_obj = module.get_one(resource, name_or_id=sub_name, **{
'data': {'inventory': inventory_id},
})
if sub_obj is None:
module.fail_json(msg='Could not find {0} with name {1}'.format(resource, sub_name))

View File

@ -104,9 +104,8 @@ def main():
inventory_id = module.resolve_name_to_id('inventories', inventory)
# Attempt to look up host based on the provided name and inventory ID
host = module.get_one('hosts', **{
host = module.get_one('hosts', name_or_id=name, **{
'data': {
'name': name,
'inventory': inventory_id
}
})
@ -117,7 +116,7 @@ def main():
# Create the data that gets sent for create and update
host_fields = {
'name': new_name if new_name else name,
'name': new_name if new_name else (module.get_item_name(host) if host else name),
'inventory': inventory_id,
'enabled': enabled,
}

View File

@ -109,11 +109,7 @@ def main():
state = module.params.get('state')
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('instance_groups', **{
'data': {
'name': name,
}
})
existing_item = module.get_one('instance_groups', name_or_id=name)
if state is 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@ -131,7 +127,7 @@ def main():
# Create the data that gets sent for create and update
new_fields = {}
new_fields['name'] = new_name if new_name else name
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
if credential is not None:
new_fields['credential'] = credential_id
if policy_instance_percentage is not None:

View File

@ -109,9 +109,8 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up inventory based on the provided name and org ID
inventory = module.get_one('inventories', **{
inventory = module.get_one('inventories', name_or_id=name, **{
'data': {
'name': name,
'organization': org_id
}
})
@ -122,7 +121,7 @@ def main():
# Create the data that gets sent for create and update
inventory_fields = {
'name': name,
'name': module.get_item_name(inventory) if inventory else name,
'organization': org_id,
'kind': kind,
'host_filter': host_filter,

View File

@ -128,6 +128,10 @@ options:
- list of notifications to send on error
type: list
elements: str
organization:
description:
- Name of the inventory source's inventory's organization.
type: str
extends_documentation_fragment: awx.awx.auth
'''
@ -140,6 +144,7 @@ EXAMPLES = '''
credential: previously-created-credential
overwrite: True
update_on_launch: True
organization: Default
source_vars:
private: false
'''
@ -168,6 +173,7 @@ def main():
enabled_value=dict(),
host_filter=dict(),
credential=dict(),
organization=dict(),
overwrite=dict(type='bool'),
overwrite_vars=dict(type='bool'),
custom_virtualenv=dict(),
@ -190,23 +196,29 @@ def main():
name = module.params.get('name')
new_name = module.params.get('new_name')
inventory = module.params.get('inventory')
organization = module.params.get('organization')
source_script = module.params.get('source_script')
credential = module.params.get('credential')
source_project = module.params.get('source_project')
state = module.params.get('state')
# Attempt to look up inventory source based on the provided name and inventory ID
inventory_id = module.resolve_name_to_id('inventories', inventory)
inventory_source = module.get_one('inventory_sources', **{
lookup_data = {}
if organization:
lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)
inventory_object = module.get_one('inventories', name_or_id=inventory, data=lookup_data)
if not inventory_object:
module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))
inventory_source_object = module.get_one('inventory_sources', name_or_id=name, **{
'data': {
'name': name,
'inventory': inventory_id,
'inventory': inventory_object['id'],
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(inventory_source)
module.delete_if_needed(inventory_source_object)
# Attempt to look up associated field items the user specified.
association_fields = {}
@ -232,7 +244,7 @@ def main():
# Create the data that gets sent for create and update
inventory_source_fields = {
'name': new_name if new_name else name,
'inventory': inventory_id,
'inventory': inventory_object['id'],
}
# Attempt to look up the related items the user specified (these will fail the module if not found)
@ -261,12 +273,12 @@ def main():
inventory_source_fields['source_vars'] = dumps(inventory_source_fields['source_vars'])
# Sanity check on arguments
if state == 'present' and not inventory_source and not inventory_source_fields['source']:
if state == 'present' and not inventory_source_object and not inventory_source_fields['source']:
module.fail_json(msg="If creating a new inventory source, the source param must be present")
# If the state was present we can let the module build or update the existing inventory_source, this will return on its own
# If the state was present we can let the module build or update the existing inventory_source_object, this will return on its own
module.create_or_update_if_needed(
inventory_source, inventory_source_fields,
inventory_source_object, inventory_source_fields,
endpoint='inventory_sources', item_type='inventory source',
associations=association_fields
)

View File

@ -0,0 +1,153 @@
#!/usr/bin/python
# coding: utf-8 -*-
# (c) 2020, Bianca Henderson <bianca@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_inventory_source_update
author: "Bianca Henderson (@beeankha)"
short_description: Update inventory source(s).
description:
- Update Ansible Tower inventory source(s). See
U(https://www.ansible.com/tower) for an overview.
options:
inventory:
description:
- Name of the inventory that contains the inventory source(s) to update.
required: True
type: str
inventory_source:
description:
- The name of the inventory source to update.
required: True
type: str
organization:
description:
- Name of the inventory source's inventory's organization.
type: str
wait:
description:
- Wait for the job to complete.
default: False
type: bool
interval:
description:
- The interval to request an update from Tower.
required: False
default: 1
type: float
timeout:
description:
- If waiting for the job to complete this will abort after this
amount of seconds
type: int
extends_documentation_fragment: awx.awx.auth
'''
EXAMPLES = '''
- name: Update a single inventory source
tower_inventory_source_update:
inventory: "My Inventory"
inventory_source: "Example Inventory Source"
organization: Default
- name: Update all inventory sources
tower_inventory_source_update:
inventory: "My Other Inventory"
inventory_source: "{{ item }}"
loop: "{{ query('awx.awx.tower_api', 'inventory_sources', query_params={ 'inventory': 30 }, return_ids=True ) }}"
'''
RETURN = '''
id:
description: id of the inventory update
returned: success
type: int
sample: 86
status:
description: status of the inventory update
returned: success
type: str
sample: pending
'''
from ..module_utils.tower_api import TowerAPIModule
def main():
# Any additional arguments that are not fields of the item can be added here
argument_spec = dict(
inventory=dict(required=True),
inventory_source=dict(required=True),
organization=dict(),
wait=dict(default=False, type='bool'),
interval=dict(default=1.0, type='float'),
timeout=dict(default=None, type='int'),
)
# Create a module for ourselves
module = TowerAPIModule(argument_spec=argument_spec)
# Extract our parameters
inventory = module.params.get('inventory')
inventory_source = module.params.get('inventory_source')
organization = module.params.get('organization')
wait = module.params.get('wait')
interval = module.params.get('interval')
timeout = module.params.get('timeout')
lookup_data = {'name': inventory}
if organization:
lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)
inventory_object = module.get_one('inventories', data=lookup_data)
if not inventory_object:
module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))
inventory_source_object = module.get_one('inventory_sources', **{
'data': {
'name': inventory_source,
'inventory': inventory_object['id'],
}
})
if not inventory_source_object:
module.fail_json(msg='The specified inventory source was not found.')
# Sync the inventory source(s)
inventory_source_update_results = module.post_endpoint(inventory_source_object['related']['update'], **{'data': {}})
if inventory_source_update_results['status_code'] != 202:
module.fail_json(msg="Failed to update inventory source, see response for details", **{'response': inventory_source_update_results})
module.json_output['changed'] = True
module.json_output['id'] = inventory_source_update_results['json']['id']
module.json_output['status'] = inventory_source_update_results['json']['status']
if not wait:
module.exit_json(**module.json_output)
# Invoke wait function
module.wait_on_url(
url=inventory_source_update_results['json']['url'],
object_name=inventory_object,
object_type='inventory_update',
timeout=timeout, interval=interval
)
module.exit_json(**module.json_output)
if __name__ == '__main__':
main()

View File

@ -201,11 +201,7 @@ def main():
post_data['credentials'].append(module.resolve_name_to_id('credentials', credential))
# Attempt to look up job_template based on the provided name
job_template = module.get_one('job_templates', **{
'data': {
'name': name,
}
})
job_template = module.get_one('job_templates', name_or_id=name)
if job_template is None:
module.fail_json(msg="Unable to find job template by name {0}".format(name))

View File

@ -409,7 +409,7 @@ def main():
credentials.append(credential)
new_fields = {}
search_fields = {'name': name}
search_fields = {}
# Attempt to look up the related items the user specified (these will fail the module if not found)
organization_id = None
@ -419,14 +419,14 @@ def main():
search_fields['organization'] = new_fields['organization'] = organization_id
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('job_templates', **{'data': search_fields})
existing_item = module.get_one('job_templates', name_or_id=name, **{'data': search_fields})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
# Create the data that gets sent for create and update
new_fields['name'] = new_name if new_name else name
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
for field_name in (
'description', 'job_type', 'playbook', 'scm_branch', 'forks', 'limit', 'verbosity',
'job_tags', 'force_handlers', 'skip_tags', 'start_at_task', 'timeout', 'use_fact_cache',
@ -453,9 +453,8 @@ def main():
new_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)
if project is not None:
if organization_id is not None:
project_data = module.get_one('projects', **{
project_data = module.get_one('projects', name_or_id=project, **{
'data': {
'name': project,
'organization': organization_id,
}
})

View File

@ -80,16 +80,15 @@ def main():
organization_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('labels', **{
existing_item = module.get_one('labels', name_or_id=name, **{
'data': {
'name': name,
'organization': organization_id,
}
})
# Create the data that gets sent for create and update
new_fields = {}
new_fields['name'] = new_name if new_name else name
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
if organization:
new_fields['organization'] = organization_id

View File

@ -380,9 +380,8 @@ def main():
organization_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('notification_templates', **{
existing_item = module.get_one('notification_templates', name_or_id=name, **{
'data': {
'name': name,
'organization': organization_id,
}
})
@ -404,7 +403,7 @@ def main():
new_fields = {}
if final_notification_configuration:
new_fields['notification_configuration'] = final_notification_configuration
new_fields['name'] = new_name if new_name else name
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
if description is not None:
new_fields['description'] = description
if organization is not None:

View File

@ -117,11 +117,7 @@ def main():
state = module.params.get('state')
# Attempt to look up organization based on the provided name
organization = module.get_one('organizations', **{
'data': {
'name': name,
}
})
organization = module.get_one('organizations', name_or_id=name)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@ -154,7 +150,7 @@ def main():
association_fields['notification_templates_approvals'].append(module.resolve_name_to_id('notification_templates', item))
# Create the data that gets sent for create and update
org_fields = {'name': name}
org_fields = {'name': module.get_item_name(organization) if organization else name}
if description is not None:
org_fields['description'] = description
if custom_virtualenv is not None:

View File

@ -239,9 +239,8 @@ def main():
credential = module.resolve_name_to_id('credentials', credential)
# Attempt to look up project based on the provided name and org ID
project = module.get_one('projects', **{
project = module.get_one('projects', name_or_id=name, **{
'data': {
'name': name,
'organization': org_id
}
})
@ -273,7 +272,7 @@ def main():
# Create the data that gets sent for create and update
project_fields = {
'name': name,
'name': module.get_item_name(project) if project else name,
'scm_type': scm_type,
'scm_url': scm_url,
'scm_branch': scm_branch,

View File

@ -0,0 +1,144 @@
#!/usr/bin/python
# coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_project_update
author: "Sean Sullivan (@sean-m-sullivan)"
short_description: Update a Project in Ansible Tower
description:
- Update a Ansible Tower Project. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name or id of the project to update.
required: True
type: str
aliases:
- project
organization:
description:
- Organization the project exists in.
- Used to help lookup the object, cannot be modified using this module.
- If not provided, will lookup by name only, which does not work with duplicates.
type: str
wait:
description:
- Wait for the project to update.
default: True
type: bool
interval:
description:
- The interval to request an update from Tower.
required: False
default: 1
type: float
timeout:
description:
- If waiting for the project to update this will abort after this
amount of seconds
type: int
extends_documentation_fragment: awx.awx.auth
'''
RETURN = '''
id:
description: project id of the updated project
returned: success
type: int
sample: 86
status:
description: status of the updated project
returned: success
type: str
sample: pending
'''
EXAMPLES = '''
- name: Launch a project with a timeout of 10 seconds
tower_project_update:
project: "Networking Project"
timeout: 10
- name: Launch a Project with extra_vars without waiting
tower_project_update:
project: "Networking Project"
wait: False
'''
from ..module_utils.tower_api import TowerAPIModule
import json
import time
def main():
# Any additional arguments that are not fields of the item can be added here
argument_spec = dict(
name=dict(required=True, aliases=['project']),
organization=dict(),
wait=dict(default=True, type='bool'),
interval=dict(default=1.0, type='float'),
timeout=dict(default=None, type='int'),
)
# Create a module for ourselves
module = TowerAPIModule(argument_spec=argument_spec)
# Extract our parameters
name = module.params.get('name')
organization = module.params.get('organization')
wait = module.params.get('wait')
interval = module.params.get('interval')
timeout = module.params.get('timeout')
# Attempt to look up project based on the provided name or id
lookup_data = {}
if organization:
lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)
project = module.get_one('projects', name_or_id=name, data=lookup_data)
if project is None:
module.fail_json(msg="Unable to find project")
# Update the project
result = module.post_endpoint(project['related']['update'])
if result['status_code'] != 202:
module.fail_json(msg="Failed to update project, see response for details", response=result)
module.json_output['changed'] = True
module.json_output['id'] = result['json']['id']
module.json_output['status'] = result['json']['status']
if not wait:
module.exit_json(**module.json_output)
# Grab our start time to compare against for the timeout
start = time.time()
if not wait:
module.exit_json(**module.json_output)
# Invoke wait function
module.wait_on_url(
url=result['json']['url'],
object_name=module.get_item_name(project),
object_type='Project Update',
timeout=timeout, interval=interval
)
module.exit_json(**module.json_output)
if __name__ == '__main__':
main()

View File

@ -190,17 +190,13 @@ def main():
unified_job_template_id = module.resolve_name_to_id('unified_job_templates', unified_job_template)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('schedules', **{
'data': {
'name': name,
}
})
existing_item = module.get_one('schedules', name_or_id=name)
# Create the data that gets sent for create and update
new_fields = {}
if rrule is not None:
new_fields['rrule'] = rrule
new_fields['name'] = new_name if new_name else name
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
if description is not None:
new_fields['description'] = description
if extra_data is not None:

View File

@ -87,9 +87,8 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up team based on the provided name and org ID
team = module.get_one('teams', **{
team = module.get_one('teams', name_or_id=name, **{
'data': {
'name': name,
'organization': org_id
}
})
@ -100,7 +99,7 @@ def main():
# Create the data that gets sent for create and update
team_fields = {
'name': new_name if new_name else name,
'name': new_name if new_name else (module.get_item_name(team) if team else name),
'organization': org_id
}
if description is not None:

View File

@ -134,11 +134,7 @@ def main():
# Attempt to look up the related items the user specified (these will fail the module if not found)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('users', **{
'data': {
'username': username,
}
})
existing_item = module.get_one('users', name_or_id=username)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@ -147,7 +143,7 @@ def main():
# Create the data that gets sent for create and update
new_fields = {}
if username:
new_fields['username'] = username
new_fields['username'] = module.get_item_name(existing_item) if existing_item else username
if first_name:
new_fields['first_name'] = first_name
if last_name:

View File

@ -190,7 +190,7 @@ def main():
state = module.params.get('state')
new_fields = {}
search_fields = {'name': name}
search_fields = {}
# Attempt to look up the related items the user specified (these will fail the module if not found)
organization = module.params.get('organization')
@ -199,7 +199,7 @@ def main():
search_fields['organization'] = new_fields['organization'] = organization_id
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('workflow_job_templates', **{'data': search_fields})
existing_item = module.get_one('workflow_job_templates', name_or_id=name, **{'data': search_fields})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@ -214,7 +214,7 @@ def main():
new_fields['webhook_credential'] = module.resolve_name_to_id('webhook_credential', webhook_credential)
# Create the data that gets sent for create and update
new_fields['name'] = new_name if new_name else name
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
for field_name in (
'description', 'survey_enabled', 'allow_simultaneous',
'limit', 'scm_branch', 'extra_vars',

View File

@ -198,12 +198,14 @@ def main():
workflow_job_template = module.params.get('workflow_job_template')
workflow_job_template_id = None
if workflow_job_template:
wfjt_search_fields = {'name': workflow_job_template}
wfjt_search_fields = {}
organization = module.params.get('organization')
if organization:
organization_id = module.resolve_name_to_id('organizations', organization)
wfjt_search_fields['organization'] = organization_id
wfjt_data = module.get_one('workflow_job_templates', **{'data': wfjt_search_fields})
wfjt_data = module.get_one('workflow_job_templates', name_or_id=workflow_job_template, **{
'data': wfjt_search_fields
})
if wfjt_data is None:
module.fail_json(msg="The workflow {0} in organization {1} was not found on the Tower server".format(
workflow_job_template, organization

View File

@ -138,10 +138,10 @@ def main():
post_data['inventory'] = module.resolve_name_to_id('inventories', inventory)
# Attempt to look up job_template based on the provided name
lookup_data = {'name': name}
lookup_data = {}
if organization:
lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)
workflow_job_template = module.get_one('workflow_job_templates', data=lookup_data)
workflow_job_template = module.get_one('workflow_job_templates', name_or_id=name, data=lookup_data)
if workflow_job_template is None:
module.fail_json(msg="Unable to find workflow job template")

View File

@ -15,7 +15,7 @@ import re
# Normally a read-only endpoint should not have a module (i.e. /api/v2/me) but sometimes we reuse a name
# For example, we have a tower_role module but /api/v2/roles is a read only endpoint.
# This list indicates which read-only endpoints have associated modules with them.
read_only_endpoints_with_modules = ['tower_settings', 'tower_role']
read_only_endpoints_with_modules = ['tower_settings', 'tower_role', 'tower_project_update']
# If a module should not be created for an endpoint and the endpoint is not read-only add it here
# THINK HARD ABOUT DOING THIS
@ -23,9 +23,9 @@ no_module_for_endpoint = []
# Some modules work on the related fields of an endpoint. These modules will not have an auto-associated endpoint
no_endpoint_for_module = [
'tower_import', 'tower_meta', 'tower_export', 'tower_job_launch', 'tower_job_wait', 'tower_job_list',
'tower_license', 'tower_ping', 'tower_receive', 'tower_send', 'tower_workflow_launch', 'tower_job_cancel',
'tower_workflow_template',
'tower_import', 'tower_meta', 'tower_export', 'tower_inventory_source_update', 'tower_job_launch', 'tower_job_wait',
'tower_job_list', 'tower_license', 'tower_ping', 'tower_receive', 'tower_send', 'tower_workflow_launch',
'tower_job_cancel', 'tower_workflow_template',
]
# Global module parameters we can ignore
@ -43,7 +43,8 @@ no_api_parameter_ok = {
# /survey spec is now how we handle associations
# We take an organization here to help with the lookups only
'tower_job_template': ['survey_spec', 'organization'],
# Organization is how we looking job templates
'tower_inventory_source': ['organization'],
# Organization is how we are looking up job templates
'tower_workflow_job_template_node': ['organization'],
# Survey is how we handle associations
'tower_workflow_job_template': ['survey'],

View File

@ -80,7 +80,8 @@ def test_create_inventory_source_multiple_orgs(run_module, admin_user):
result = run_module('tower_inventory_source', dict(
name='Test Inventory Source',
inventory=inv2.id,
inventory=inv2.name,
organization='test-org-number-two',
source='ec2',
state='present'
), admin_user)

View File

@ -82,9 +82,9 @@
that:
- "result is changed"
- name: Re-create the Org-specific credential (new school)
- name: Re-create the Org-specific credential (new school) with an ID
tower_credential:
name: "{{ ssh_cred_name1 }}"
name: "{{ result.id }}"
organization: Default
credential_type: 'Machine'
state: present

View File

@ -1,105 +1,113 @@
---
- name: Generate a random string for test
set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined
- name: Generate names
set_fact:
src_cred_name: src_cred
target_cred_name: target_cred
src_cred_name: "AWX-Collection-tests-tower_credential_input_source-src_cred-{{ test_id }}"
target_cred_name: "AWX-Collection-tests-tower_credential_input_source-target_cred-{{ test_id }}"
- name: Add Tower credential Lookup
tower_credential:
description: Credential for Testing Source
name: "{{ src_cred_name }}"
credential_type: CyberArk AIM Central Credential Provider Lookup
inputs:
url: "https://cyberark.example.com"
app_id: "My-App-ID"
organization: Default
register: result
- block:
- name: Add Tower credential Lookup
tower_credential:
description: Credential for Testing Source
name: "{{ src_cred_name }}"
credential_type: CyberArk AIM Central Credential Provider Lookup
inputs:
url: "https://cyberark.example.com"
app_id: "My-App-ID"
organization: Default
register: src_cred_result
- assert:
that:
- "result is changed"
- assert:
that:
- "src_cred_result is changed"
- name: Add Tower credential Target
tower_credential:
description: Credential for Testing Target
name: "{{ target_cred_name }}"
credential_type: Machine
inputs:
username: user
organization: Default
register: result
- name: Add Tower credential Target
tower_credential:
description: Credential for Testing Target
name: "{{ target_cred_name }}"
credential_type: Machine
inputs:
username: user
organization: Default
register: target_cred_result
- assert:
that:
- "result is changed"
- assert:
that:
- "target_cred_result is changed"
- name: Add credential Input Source
tower_credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_name }}"
source_credential: "{{ src_cred_name }}"
metadata:
object_query: "Safe=MY_SAFE;Object=AWX-user"
object_query_format: "Exact"
state: present
- name: Add credential Input Source
tower_credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_result.id }}"
source_credential: "{{ src_cred_result.id }}"
metadata:
object_query: "Safe=MY_SAFE;Object=AWX-user"
object_query_format: "Exact"
state: present
register: result
- assert:
that:
- "result is changed"
- assert:
that:
- "result is changed"
- name: Add Second Tower credential Lookup
tower_credential:
description: Credential for Testing Source Change
name: "{{ src_cred_name }}-2"
credential_type: CyberArk AIM Central Credential Provider Lookup
inputs:
url: "https://cyberark-prod.example.com"
app_id: "My-App-ID"
organization: Default
register: result
- name: Add Second Tower credential Lookup
tower_credential:
description: Credential for Testing Source Change
name: "{{ src_cred_name }}-2"
credential_type: CyberArk AIM Central Credential Provider Lookup
inputs:
url: "https://cyberark-prod.example.com"
app_id: "My-App-ID"
organization: Default
register: result
- name: Change credential Input Source
tower_credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_name }}"
source_credential: "{{ src_cred_name }}-2"
state: present
- name: Change credential Input Source
tower_credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_name }}"
source_credential: "{{ src_cred_name }}-2"
state: present
- assert:
that:
- "result is changed"
- assert:
that:
- "result is changed"
- name: Remove a Tower credential source
tower_credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_name }}"
state: absent
register: result
always:
- name: Remove a Tower credential source
tower_credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_name }}"
state: absent
register: result
- assert:
that:
- "result is changed"
- assert:
that:
- "result is changed"
- name: Remove Tower credential Lookup
tower_credential:
name: "{{ src_cred_name }}"
organization: Default
credential_type: CyberArk AIM Central Credential Provider Lookup
state: absent
register: result
- name: Remove Tower credential Lookup
tower_credential:
name: "{{ src_cred_name }}"
organization: Default
credential_type: CyberArk AIM Central Credential Provider Lookup
state: absent
register: result
- name: Remove Alt Tower credential Lookup
tower_credential:
name: "{{ src_cred_name }}-2"
organization: Default
credential_type: CyberArk AIM Central Credential Provider Lookup
state: absent
register: result
- name: Remove Alt Tower credential Lookup
tower_credential:
name: "{{ src_cred_name }}-2"
organization: Default
credential_type: CyberArk AIM Central Credential Provider Lookup
state: absent
register: result
- name: Remove Tower credential
tower_credential:
name: "{{ target_cred_name }}"
organization: Default
credential_type: Machine
state: absent
register: result
- name: Remove Tower credential
tower_credential:
name: "{{ target_cred_name }}"
organization: Default
credential_type: Machine
state: absent
register: result

View File

@ -18,7 +18,7 @@
- name: Remove a Tower credential type
tower_credential_type:
name: "{{ cred_type_name }}"
name: "{{ result.id }}"
state: absent
register: result

View File

@ -14,11 +14,12 @@
name: "{{ inv_name }}"
organization: Default
state: present
register: result
- name: Create a Group
tower_group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
inventory: "{{ result.id }}"
state: present
variables:
foo: bar
@ -30,7 +31,7 @@
- name: Delete a Group
tower_group:
name: "{{ group_name1 }}"
name: "{{ result.id }}"
inventory: "{{ inv_name }}"
state: absent
register: result

View File

@ -9,11 +9,12 @@
name: "{{ inv_name }}"
organization: Default
state: present
register: result
- name: Create a Host
tower_host:
name: "{{ host_name }}"
inventory: "{{ inv_name }}"
inventory: "{{ result.id }}"
state: present
variables:
foo: bar
@ -25,7 +26,7 @@
- name: Delete a Host
tower_host:
name: "{{ host_name }}"
name: "{{ result.id }}"
inventory: "{{ inv_name }}"
state: absent
register: result

View File

@ -19,11 +19,11 @@
host: "https://openshift.org"
bearer_token: "asdf1234"
verify_ssl: false
register: result
register: cred_result
- assert:
that:
- "result is changed"
- "cred_result is changed"
- name: Create an Instance Group
tower_instance_group:
@ -37,10 +37,22 @@
that:
- "result is changed"
- name: Update an Instance Group
tower_instance_group:
name: "{{ result.id }}"
policy_instance_percentage: 34
policy_instance_minimum: 24
state: present
register: result
- assert:
that:
- "result is changed"
- name: Create a container group
tower_instance_group:
name: "{{ group_name2 }}"
credential: "{{ cred_name1 }}"
credential: "{{ cred_result.id }}"
register: result
- assert:

View File

@ -29,7 +29,7 @@
tower_inventory:
name: "{{ inv_name1 }}"
organization: Default
insights_credential: "{{ cred_name1 }}"
insights_credential: "{{ result.id }}"
state: present
register: result
@ -39,7 +39,7 @@
- name: Test Inventory module idempotency
tower_inventory:
name: "{{ inv_name1 }}"
name: "{{ result.id }}"
organization: Default
insights_credential: "{{ cred_name1 }}"
state: present

View File

@ -16,6 +16,7 @@
host: https://example.org:5000
password: passw0rd
domain: test
register: credential_result
- name: Add a Tower inventory
tower_inventory:
@ -28,7 +29,7 @@
name: "{{ openstack_inv_source }}"
description: Source for Test inventory
inventory: "{{ openstack_inv }}"
credential: "{{ openstack_cred }}"
credential: "{{ credential_result.id }}"
overwrite: true
update_on_launch: true
source_vars:
@ -42,7 +43,7 @@
- name: Delete the inventory source with an invalid cred, source_project, sourece_script specified
tower_inventory_source:
name: "{{ openstack_inv_source }}"
name: "{{ result.id }}"
inventory: "{{ openstack_inv }}"
credential: "Does Not Exit"
source_project: "Does Not Exist"

View File

@ -0,0 +1,116 @@
---
- name: Generate a test ID
set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
- name: Generate names
set_fact:
project_name: "AWX-Collection-tests-tower_inventory_source_update-project-{{ test_id }}"
inv_name: "AWX-Collection-tests-tower_inventory_source_update-inv-{{ test_id }}"
inv_source1: "AWX-Collection-tests-tower_inventory_source_update-source1-{{ test_id }}"
inv_source2: "AWX-Collection-tests-tower_inventory_source_update-source2-{{ test_id }}"
inv_source3: "AWX-Collection-tests-tower_inventory_source_update-source3-{{ test_id }}"
org_name: "AWX-Collection-tests-tower_inventory_source_update-org-{{ test_id }}"
- block:
- name: "Create a new organization"
tower_organization:
name: "{{ org_name }}"
register: created_org
- name: Create a git project without credentials
tower_project:
name: "{{ project_name }}"
organization: "{{ org_name }}"
scm_type: git
scm_url: https://github.com/ansible/test-playbooks
wait: true
- name: Create an Inventory
tower_inventory:
name: "{{ inv_name }}"
organization: "{{ org_name }}"
state: present
- name: Create another inventory w/ same name, different org
tower_inventory:
name: "{{ inv_name }}"
organization: Default
state: present
register: created_inventory
- name: Create an Inventory Source (specifically connected to the randomly generated org)
tower_inventory_source:
name: "{{ inv_source1 }}"
source: scm
source_project: "{{ project_name }}"
source_path: inventories/inventory.ini
description: Source for Test inventory
organization: "{{ created_org.id }}"
inventory: "{{ inv_name }}"
- name: Create Another Inventory Source
tower_inventory_source:
name: "{{ inv_source2 }}"
source: scm
source_project: "{{ project_name }}"
source_path: inventories/create_10_hosts.ini
description: Source for Test inventory
organization: Default
inventory: "{{ inv_name }}"
- name: Create Yet Another Inventory Source (to make lookup plugin find multiple inv sources)
tower_inventory_source:
name: "{{ inv_source3 }}"
source: scm
source_project: "{{ project_name }}"
source_path: inventories/create_100_hosts.ini
description: Source for Test inventory
organization: Default
inventory: "{{ inv_name }}"
- name: Test Inventory Source Update
tower_inventory_source_update:
inventory: "{{ inv_name }}"
inventory_source: "{{ inv_source2 }}"
organization: Default
register: result
- assert:
that:
- "result is changed"
- name: Test Inventory Source Update for All Sources
tower_inventory_source_update:
inventory: "{{ inv_name }}"
inventory_source: "{{ item.name }}"
organization: Default
wait: true
loop: "{{ query('awx.awx.tower_api', 'inventory_sources', query_params={ 'inventory': created_inventory.id }, expect_objects=True, return_objects=True) }}"
loop_control:
label: "{{ item.name }}"
register: result
- assert:
that:
- "result is changed"
always:
- name: Delete Inventory
tower_inventory:
name: "{{ inv_name }}"
organization: Default
state: absent
- name: Delete Project
tower_project:
name: "{{ project_name }}"
organization: Default
state: absent
- name: "Remove the organization"
tower_organization:
name: "{{ org_name }}"
state: absent

View File

@ -22,14 +22,14 @@
state: present
scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples.git
register: result
register: proj_result
- name: Create Credential1
tower_credential:
name: "{{ cred1 }}"
organization: Default
kind: tower
register: cred1_result
- name: Create Credential2
tower_credential:
@ -84,19 +84,19 @@
credentials: ["{{ cred1 }}", "{{ cred2 }}"]
job_type: run
state: present
register: result
register: jt1_result
- assert:
that:
- "result is changed"
- "jt1_result is changed"
- name: Add a credential to this JT
tower_job_template:
name: "{{ jt1 }}"
project: "{{ proj1 }}"
project: "{{ proj_result.id }}"
playbook: hello_world.yml
credentials:
- "{{ cred1 }}"
- "{{ cred1_result.id }}"
register: result
- assert:
@ -105,7 +105,7 @@
- name: Try to add the same credential to this JT
tower_job_template:
name: "{{ jt1 }}"
name: "{{ jt1_result.id }}"
project: "{{ proj1 }}"
playbook: hello_world.yml
credentials:

Some files were not shown because too many files have changed in this diff Show More