mirror of
https://github.com/ansible/awx.git
synced 2026-01-12 02:19:58 -03:30
commit
f35ce93fb1
@ -20,7 +20,7 @@ recursive-exclude awx/ui/client *
|
||||
recursive-exclude awx/settings local_settings.py*
|
||||
include tools/scripts/request_tower_configuration.sh
|
||||
include tools/scripts/request_tower_configuration.ps1
|
||||
include tools/scripts/ansible-tower-service
|
||||
include tools/scripts/automation-controller-service
|
||||
include tools/scripts/failure-event-handler
|
||||
include tools/scripts/awx-python
|
||||
include awx/playbooks/library/mkfifo.py
|
||||
|
||||
2
Makefile
2
Makefile
@ -31,7 +31,7 @@ DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio,pycurl
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0
|
||||
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0 wheel==0.36.2
|
||||
|
||||
# Determine appropriate shasum command
|
||||
UNAME_S := $(shell uname -s)
|
||||
|
||||
@ -68,12 +68,12 @@ class Command(BaseCommand):
|
||||
print('Demo Credential, Inventory, and Job Template added.')
|
||||
changed = True
|
||||
|
||||
default_ee = settings.AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE
|
||||
ee, created = ExecutionEnvironment.objects.get_or_create(name='Default EE', defaults={'image': default_ee, 'managed_by_tower': True})
|
||||
for ee in reversed(settings.DEFAULT_EXECUTION_ENVIRONMENTS):
|
||||
_, created = ExecutionEnvironment.objects.get_or_create(name=ee['name'], defaults={'image': ee['image'], 'managed_by_tower': True})
|
||||
|
||||
if created:
|
||||
changed = True
|
||||
print('Default Execution Environment registered.')
|
||||
print('Default Execution Environment(s) registered.')
|
||||
|
||||
if changed:
|
||||
print('(changed: True)')
|
||||
|
||||
@ -29,6 +29,7 @@ from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
# other AWX imports
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
from awx.main.utils import ignore_inventory_computed_fields, get_licenser
|
||||
from awx.main.utils.execution_environments import get_execution_environment_default
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import STANDARD_INVENTORY_UPDATE_ENV
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
@ -90,7 +91,7 @@ class AnsibleInventoryLoader(object):
|
||||
bargs.extend(['-v', '{0}:{0}:Z'.format(self.source)])
|
||||
for key, value in STANDARD_INVENTORY_UPDATE_ENV.items():
|
||||
bargs.extend(['-e', '{0}={1}'.format(key, value)])
|
||||
bargs.extend([settings.AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE])
|
||||
bargs.extend([get_execution_environment_default().image])
|
||||
bargs.extend(['ansible-inventory', '-i', self.source])
|
||||
bargs.extend(['--playbook-dir', functioning_dir(self.source)])
|
||||
if self.verbosity:
|
||||
|
||||
18
awx/main/migrations/0135_schedule_sort_fallback_to_id.py
Normal file
18
awx/main/migrations/0135_schedule_sort_fallback_to_id.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2.16 on 2021-03-29 15:30
|
||||
|
||||
from django.db import migrations
|
||||
import django.db.models.expressions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0134_unifiedjob_ansible_version'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='schedule',
|
||||
options={'ordering': [django.db.models.expressions.OrderBy(django.db.models.expressions.F('next_run'), descending=True, nulls_last=True), 'id']},
|
||||
),
|
||||
]
|
||||
@ -115,6 +115,6 @@ def kubernetes_bearer_token(cred, env, private_data_dir):
|
||||
with os.fdopen(handle, 'w') as f:
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
f.write(cred.get_input('ssl_ca_cert'))
|
||||
env['K8S_AUTH_SSL_CA_CERT'] = path
|
||||
env['K8S_AUTH_SSL_CA_CERT'] = os.path.join('/runner', os.path.basename(path))
|
||||
else:
|
||||
env['K8S_AUTH_VERIFY_SSL'] = 'False'
|
||||
|
||||
@ -1227,6 +1227,10 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
||||
null=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_container_group_task(self):
|
||||
return bool(self.instance_group and self.instance_group.is_container_group)
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
return 'inventory_source'
|
||||
|
||||
|
||||
@ -21,6 +21,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.models.base import prevent_search
|
||||
from awx.main.models.rbac import Role, RoleAncestorEntry, get_roles_on_resource
|
||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
|
||||
from awx.main.utils.execution_environments import get_execution_environment_default
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
||||
from awx.main.fields import JSONField, AskForField
|
||||
@ -461,13 +462,6 @@ class ExecutionEnvironmentMixin(models.Model):
|
||||
help_text=_('The container image to be used for execution.'),
|
||||
)
|
||||
|
||||
def get_execution_environment_default(self):
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
|
||||
if settings.DEFAULT_EXECUTION_ENVIRONMENT is not None:
|
||||
return settings.DEFAULT_EXECUTION_ENVIRONMENT
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed_by_tower=True).first()
|
||||
|
||||
def resolve_execution_environment(self):
|
||||
"""
|
||||
Return the execution environment that should be used when creating a new job.
|
||||
@ -482,7 +476,7 @@ class ExecutionEnvironmentMixin(models.Model):
|
||||
if self.inventory.organization.default_environment is not None:
|
||||
return self.inventory.organization.default_environment
|
||||
|
||||
return self.get_execution_environment_default()
|
||||
return get_execution_environment_default()
|
||||
|
||||
|
||||
class CustomVirtualEnvMixin(models.Model):
|
||||
|
||||
@ -63,7 +63,7 @@ class ScheduleManager(ScheduleFilterMethods, models.Manager):
|
||||
class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ['-next_run']
|
||||
ordering = [models.F('next_run').desc(nulls_last=True), 'id']
|
||||
unique_together = ('unified_job_template', 'name')
|
||||
|
||||
objects = ScheduleManager()
|
||||
|
||||
@ -97,6 +97,7 @@ from awx.main.utils import (
|
||||
deepmerge,
|
||||
parse_yaml_or_json,
|
||||
)
|
||||
from awx.main.utils.execution_environments import get_execution_environment_default
|
||||
from awx.main.utils.ansible import read_ansible_config
|
||||
from awx.main.utils.external_logging import reconfigure_rsyslog
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
@ -1806,13 +1807,14 @@ class RunJob(BaseTask):
|
||||
logger.debug('Performing fresh clone of {} on this instance.'.format(job.project))
|
||||
sync_needs.append(source_update_tag)
|
||||
elif job.project.scm_type == 'git' and job.project.scm_revision and (not branch_override):
|
||||
git_repo = git.Repo(project_path)
|
||||
try:
|
||||
git_repo = git.Repo(project_path)
|
||||
|
||||
if job_revision == git_repo.head.commit.hexsha:
|
||||
logger.debug('Skipping project sync for {} because commit is locally available'.format(job.log_format))
|
||||
else:
|
||||
sync_needs.append(source_update_tag)
|
||||
except (ValueError, BadGitName):
|
||||
except (ValueError, BadGitName, git.exc.InvalidGitRepositoryError):
|
||||
logger.debug('Needed commit for {} not in local source tree, will sync with remote'.format(job.log_format))
|
||||
sync_needs.append(source_update_tag)
|
||||
else:
|
||||
@ -2505,7 +2507,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
args.append(container_location)
|
||||
|
||||
args.append('--output')
|
||||
args.append(os.path.join('/runner', 'artifacts', 'output.json'))
|
||||
args.append(os.path.join('/runner', 'artifacts', str(inventory_update.id), 'output.json'))
|
||||
|
||||
if os.path.isdir(source_location):
|
||||
playbook_dir = container_location
|
||||
@ -3010,7 +3012,7 @@ class AWXReceptorJob:
|
||||
return self._run_internal(receptor_ctl)
|
||||
finally:
|
||||
# Make sure to always release the work unit if we established it
|
||||
if self.unit_id is not None:
|
||||
if self.unit_id is not None and not settings.AWX_CONTAINER_GROUP_KEEP_POD:
|
||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||
|
||||
def _run_internal(self, receptor_ctl):
|
||||
@ -3126,11 +3128,23 @@ class AWXReceptorJob:
|
||||
|
||||
@property
|
||||
def pod_definition(self):
|
||||
if self.task:
|
||||
ee = self.task.instance.resolve_execution_environment()
|
||||
else:
|
||||
ee = get_execution_environment_default()
|
||||
|
||||
default_pod_spec = {
|
||||
"apiVersion": "v1",
|
||||
"kind": "Pod",
|
||||
"metadata": {"namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE},
|
||||
"spec": {"containers": [{"image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE, "name": 'worker', "args": ['ansible-runner', 'worker']}]},
|
||||
"spec": {
|
||||
"containers": [
|
||||
{
|
||||
"image": ee.image,
|
||||
"name": 'worker',
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
pod_spec_override = {}
|
||||
|
||||
@ -140,7 +140,7 @@ def test_delete_instance_group_jobs_running(delete, instance_group_jobs_running,
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_rename_tower_instance_group_prevented(delete, options, tower_instance_group, instance_group, user, patch):
|
||||
def test_delete_rename_tower_instance_group_prevented(delete, options, tower_instance_group, instance_group, user, patch, execution_environment):
|
||||
url = reverse("api:instance_group_detail", kwargs={'pk': tower_instance_group.pk})
|
||||
super_user = user('bob', True)
|
||||
|
||||
|
||||
@ -829,5 +829,5 @@ def slice_job_factory(slice_jt_factory):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def execution_environment(organization):
|
||||
return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", organization=organization)
|
||||
def execution_environment():
|
||||
return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", managed_by_tower=True)
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
import subprocess
|
||||
import base64
|
||||
from collections import namedtuple
|
||||
|
||||
from unittest import mock # noqa
|
||||
import pytest
|
||||
|
||||
from awx.main.scheduler.kubernetes import PodManager
|
||||
from awx.main.tasks import AWXReceptorJob
|
||||
from awx.main.utils import (
|
||||
create_temporary_fifo,
|
||||
)
|
||||
@ -34,7 +35,7 @@ def test_containerized_job(containerized_job):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_kubectl_ssl_verification(containerized_job):
|
||||
def test_kubectl_ssl_verification(containerized_job, execution_environment):
|
||||
cred = containerized_job.instance_group.credential
|
||||
cred.inputs['verify_ssl'] = True
|
||||
key_material = subprocess.run('openssl genrsa 2> /dev/null', shell=True, check=True, stdout=subprocess.PIPE)
|
||||
@ -46,6 +47,8 @@ def test_kubectl_ssl_verification(containerized_job):
|
||||
cert = subprocess.run(cmd.strip(), shell=True, check=True, stdout=subprocess.PIPE)
|
||||
cred.inputs['ssl_ca_cert'] = cert.stdout
|
||||
cred.save()
|
||||
pm = PodManager(containerized_job)
|
||||
ca_data = pm.kube_config['clusters'][0]['cluster']['certificate-authority-data']
|
||||
RunJob = namedtuple('RunJob', ['instance', 'build_execution_environment_params'])
|
||||
rj = RunJob(instance=containerized_job, build_execution_environment_params=lambda x: {})
|
||||
receptor_job = AWXReceptorJob(rj, runner_params={'settings': {}})
|
||||
ca_data = receptor_job.kube_config['clusters'][0]['cluster']['certificate-authority-data']
|
||||
assert cert.stdout == base64.b64decode(ca_data.encode())
|
||||
|
||||
@ -1,49 +0,0 @@
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.models import (
|
||||
InstanceGroup,
|
||||
Job,
|
||||
JobTemplate,
|
||||
Project,
|
||||
Inventory,
|
||||
)
|
||||
from awx.main.scheduler.kubernetes import PodManager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def container_group():
|
||||
instance_group = InstanceGroup(name='container-group', id=1)
|
||||
|
||||
return instance_group
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job(container_group):
|
||||
return Job(pk=1, id=1, project=Project(), instance_group=container_group, inventory=Inventory(), job_template=JobTemplate(id=1, name='foo'))
|
||||
|
||||
|
||||
def test_default_pod_spec(job):
|
||||
default_image = PodManager(job).pod_definition['spec']['containers'][0]['image']
|
||||
assert default_image == settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE
|
||||
|
||||
|
||||
def test_custom_pod_spec(job):
|
||||
job.instance_group.pod_spec_override = """
|
||||
spec:
|
||||
containers:
|
||||
- image: my-custom-image
|
||||
"""
|
||||
custom_image = PodManager(job).pod_definition['spec']['containers'][0]['image']
|
||||
assert custom_image == 'my-custom-image'
|
||||
|
||||
|
||||
def test_pod_manager_namespace_property(job):
|
||||
pm = PodManager(job)
|
||||
assert pm.namespace == settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE
|
||||
|
||||
job.instance_group.pod_spec_override = """
|
||||
metadata:
|
||||
namespace: my-namespace
|
||||
"""
|
||||
assert PodManager(job).namespace == 'my-namespace'
|
||||
@ -1003,7 +1003,8 @@ class TestJobCredentials(TestJobExecution):
|
||||
|
||||
if verify:
|
||||
assert env['K8S_AUTH_VERIFY_SSL'] == 'True'
|
||||
cert = open(env['K8S_AUTH_SSL_CA_CERT'], 'r').read()
|
||||
local_path = os.path.join(private_data_dir, os.path.basename(env['K8S_AUTH_SSL_CA_CERT']))
|
||||
cert = open(local_path, 'r').read()
|
||||
assert cert == 'CERTDATA'
|
||||
else:
|
||||
assert env['K8S_AUTH_VERIFY_SSL'] == 'False'
|
||||
|
||||
9
awx/main/utils/execution_environments.py
Normal file
9
awx/main/utils/execution_environments.py
Normal file
@ -0,0 +1,9 @@
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
|
||||
|
||||
def get_execution_environment_default():
|
||||
if settings.DEFAULT_EXECUTION_ENVIRONMENT is not None:
|
||||
return settings.DEFAULT_EXECUTION_ENVIRONMENT
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed_by_tower=True).first()
|
||||
@ -68,17 +68,12 @@ DATABASES = {
|
||||
# the K8S cluster where awx itself is running)
|
||||
IS_K8S = False
|
||||
|
||||
# TODO: remove this setting in favor of a default execution environment
|
||||
AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE = 'quay.io/ansible/awx-ee'
|
||||
|
||||
AWX_CONTAINER_GROUP_KEEP_POD = False
|
||||
AWX_CONTAINER_GROUP_K8S_API_TIMEOUT = 10
|
||||
AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES = 100
|
||||
AWX_CONTAINER_GROUP_POD_LAUNCH_RETRY_DELAY = 5
|
||||
AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE = os.getenv('MY_POD_NAMESPACE', 'default')
|
||||
|
||||
# TODO: remove this setting in favor of a default execution environment
|
||||
AWX_CONTAINER_GROUP_DEFAULT_IMAGE = AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/dev/topics/i18n/
|
||||
#
|
||||
@ -182,8 +177,15 @@ REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
PROXY_IP_ALLOWED_LIST = []
|
||||
|
||||
CUSTOM_VENV_PATHS = []
|
||||
|
||||
# Warning: this is a placeholder for a configure tower-in-tower setting
|
||||
# This should not be set via a file.
|
||||
DEFAULT_EXECUTION_ENVIRONMENT = None
|
||||
|
||||
# This list is used for creating default EEs when running awx-manage create_preload_data.
|
||||
# Should be ordered from highest to lowest precedence.
|
||||
DEFAULT_EXECUTION_ENVIRONMENTS = [{'name': 'AWX EE 0.1.1', 'image': 'quay.io/ansible/awx-ee:0.1.1'}]
|
||||
|
||||
# Note: This setting may be overridden by database settings.
|
||||
STDOUT_MAX_BYTES_DISPLAY = 1048576
|
||||
|
||||
|
||||
@ -77,7 +77,8 @@
|
||||
"resizeOrientation",
|
||||
"src",
|
||||
"theme",
|
||||
"gridColumns"
|
||||
"gridColumns",
|
||||
"rows"
|
||||
],
|
||||
"ignore": ["Ansible", "Tower", "JSON", "YAML", "lg"],
|
||||
"ignoreComponent": [
|
||||
|
||||
@ -5,6 +5,16 @@ class ExecutionEnvironments extends Base {
|
||||
super(http);
|
||||
this.baseUrl = '/api/v2/execution_environments/';
|
||||
}
|
||||
|
||||
readUnifiedJobTemplates(id, params) {
|
||||
return this.http.get(`${this.baseUrl}${id}/unified_job_templates/`, {
|
||||
params,
|
||||
});
|
||||
}
|
||||
|
||||
readUnifiedJobTemplateOptions(id) {
|
||||
return this.http.options(`${this.baseUrl}${id}/unified_job_templates/`);
|
||||
}
|
||||
}
|
||||
|
||||
export default ExecutionEnvironments;
|
||||
|
||||
@ -36,10 +36,8 @@ class Organizations extends InstanceGroupsMixin(NotificationsMixin(Base)) {
|
||||
});
|
||||
}
|
||||
|
||||
readExecutionEnvironmentsOptions(id, params) {
|
||||
return this.http.options(`${this.baseUrl}${id}/execution_environments/`, {
|
||||
params,
|
||||
});
|
||||
readExecutionEnvironmentsOptions(id) {
|
||||
return this.http.options(`${this.baseUrl}${id}/execution_environments/`);
|
||||
}
|
||||
|
||||
createUser(id, data) {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import React, { useEffect, useRef, useCallback } from 'react';
|
||||
import { oneOf, bool, number, string, func } from 'prop-types';
|
||||
import { oneOf, bool, number, string, func, oneOfType } from 'prop-types';
|
||||
import ReactAce from 'react-ace';
|
||||
import 'ace-builds/src-noconflict/mode-json';
|
||||
import 'ace-builds/src-noconflict/mode-javascript';
|
||||
@ -77,6 +77,13 @@ function CodeEditor({
|
||||
className,
|
||||
i18n,
|
||||
}) {
|
||||
if (rows && typeof rows !== 'number' && rows !== 'auto') {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warning(
|
||||
`CodeEditor: Unexpected value for 'rows': ${rows}; expected number or 'auto'`
|
||||
);
|
||||
}
|
||||
|
||||
const wrapper = useRef(null);
|
||||
const editor = useRef(null);
|
||||
|
||||
@ -117,7 +124,8 @@ function CodeEditor({
|
||||
jinja2: 'django',
|
||||
};
|
||||
|
||||
const numRows = fullHeight ? value.split('\n').length : rows;
|
||||
const numRows = rows === 'auto' ? value.split('\n').length : rows;
|
||||
const height = fullHeight ? '50vh' : `${numRows * LINE_HEIGHT + PADDING}px`;
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -132,7 +140,7 @@ function CodeEditor({
|
||||
editorProps={{ $blockScrolling: true }}
|
||||
fontSize={16}
|
||||
width="100%"
|
||||
height={`${numRows * LINE_HEIGHT + PADDING}px`}
|
||||
height={height}
|
||||
hasErrors={hasErrors}
|
||||
setOptions={{
|
||||
readOnly,
|
||||
@ -178,7 +186,7 @@ CodeEditor.propTypes = {
|
||||
readOnly: bool,
|
||||
hasErrors: bool,
|
||||
fullHeight: bool,
|
||||
rows: number,
|
||||
rows: oneOfType([number, string]),
|
||||
className: string,
|
||||
};
|
||||
CodeEditor.defaultProps = {
|
||||
|
||||
@ -1,8 +1,16 @@
|
||||
import 'styled-components/macro';
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { node, number, oneOfType, shape, string, arrayOf } from 'prop-types';
|
||||
import { Trans, withI18n } from '@lingui/react';
|
||||
import { Split, SplitItem, TextListItemVariants } from '@patternfly/react-core';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import {
|
||||
Split,
|
||||
SplitItem,
|
||||
TextListItemVariants,
|
||||
Button,
|
||||
Modal,
|
||||
} from '@patternfly/react-core';
|
||||
import { ExpandArrowsAltIcon } from '@patternfly/react-icons';
|
||||
import { DetailName, DetailValue } from '../DetailList';
|
||||
import MultiButtonToggle from '../MultiButtonToggle';
|
||||
import Popover from '../Popover';
|
||||
@ -29,13 +37,14 @@ function getValueAsMode(value, mode) {
|
||||
return mode === YAML_MODE ? jsonToYaml(value) : yamlToJson(value);
|
||||
}
|
||||
|
||||
function VariablesDetail({ dataCy, helpText, value, label, rows, fullHeight }) {
|
||||
function VariablesDetail({ dataCy, helpText, value, label, rows, i18n }) {
|
||||
const [mode, setMode] = useState(
|
||||
isJsonObject(value) || isJsonString(value) ? JSON_MODE : YAML_MODE
|
||||
);
|
||||
const [currentValue, setCurrentValue] = useState(
|
||||
isJsonObject(value) ? JSON.stringify(value, null, 2) : value || '---'
|
||||
);
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
const [error, setError] = useState(null);
|
||||
|
||||
useEffect(() => {
|
||||
@ -60,7 +69,112 @@ function VariablesDetail({ dataCy, helpText, value, label, rows, fullHeight }) {
|
||||
fullWidth
|
||||
css="grid-column: 1 / -1"
|
||||
>
|
||||
<Split hasGutter>
|
||||
<ModeToggle
|
||||
label={label}
|
||||
helpText={helpText}
|
||||
dataCy={dataCy}
|
||||
mode={mode}
|
||||
setMode={setMode}
|
||||
currentValue={currentValue}
|
||||
setCurrentValue={setCurrentValue}
|
||||
setError={setError}
|
||||
onExpand={() => setIsExpanded(true)}
|
||||
i18n={i18n}
|
||||
/>
|
||||
</DetailName>
|
||||
<DetailValue
|
||||
data-cy={valueCy}
|
||||
component={TextListItemVariants.dd}
|
||||
fullWidth
|
||||
css="grid-column: 1 / -1; margin-top: -20px"
|
||||
>
|
||||
<CodeEditor
|
||||
mode={mode}
|
||||
value={currentValue}
|
||||
readOnly
|
||||
rows={rows}
|
||||
css="margin-top: 10px"
|
||||
/>
|
||||
{error && (
|
||||
<div
|
||||
css="color: var(--pf-global--danger-color--100);
|
||||
font-size: var(--pf-global--FontSize--sm"
|
||||
>
|
||||
{i18n._(t`Error:`)} {error.message}
|
||||
</div>
|
||||
)}
|
||||
</DetailValue>
|
||||
<Modal
|
||||
variant="xlarge"
|
||||
title={label}
|
||||
isOpen={isExpanded}
|
||||
onClose={() => setIsExpanded(false)}
|
||||
actions={[
|
||||
<Button
|
||||
aria-label={i18n._(t`Done`)}
|
||||
key="select"
|
||||
variant="primary"
|
||||
onClick={() => setIsExpanded(false)}
|
||||
ouiaId={`${dataCy}-unexpand`}
|
||||
>
|
||||
{i18n._(t`Done`)}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
<div className="pf-c-form">
|
||||
<ModeToggle
|
||||
label={label}
|
||||
helpText={helpText}
|
||||
dataCy={dataCy}
|
||||
mode={mode}
|
||||
setMode={setMode}
|
||||
currentValue={currentValue}
|
||||
setCurrentValue={setCurrentValue}
|
||||
setError={setError}
|
||||
i18n={i18n}
|
||||
/>
|
||||
<CodeEditor
|
||||
mode={mode}
|
||||
value={currentValue}
|
||||
readOnly
|
||||
rows={rows}
|
||||
fullHeight
|
||||
css="margin-top: 10px"
|
||||
/>
|
||||
</div>
|
||||
</Modal>
|
||||
</>
|
||||
);
|
||||
}
|
||||
VariablesDetail.propTypes = {
|
||||
value: oneOfType([shape({}), arrayOf(string), string]).isRequired,
|
||||
label: node.isRequired,
|
||||
rows: oneOfType([number, string]),
|
||||
dataCy: string,
|
||||
helpText: string,
|
||||
};
|
||||
VariablesDetail.defaultProps = {
|
||||
rows: null,
|
||||
dataCy: '',
|
||||
helpText: '',
|
||||
};
|
||||
|
||||
function ModeToggle({
|
||||
label,
|
||||
helpText,
|
||||
dataCy,
|
||||
currentValue,
|
||||
setCurrentValue,
|
||||
mode,
|
||||
setMode,
|
||||
setError,
|
||||
onExpand,
|
||||
i18n,
|
||||
}) {
|
||||
return (
|
||||
<Split hasGutter>
|
||||
<SplitItem isFilled>
|
||||
<Split hasGutter css="align-items: baseline">
|
||||
<SplitItem>
|
||||
<div className="pf-c-form__label">
|
||||
<span
|
||||
@ -92,44 +206,21 @@ function VariablesDetail({ dataCy, helpText, value, label, rows, fullHeight }) {
|
||||
/>
|
||||
</SplitItem>
|
||||
</Split>
|
||||
</DetailName>
|
||||
<DetailValue
|
||||
data-cy={valueCy}
|
||||
component={TextListItemVariants.dd}
|
||||
fullWidth
|
||||
css="grid-column: 1 / -1; margin-top: -20px"
|
||||
>
|
||||
<CodeEditor
|
||||
mode={mode}
|
||||
value={currentValue}
|
||||
readOnly
|
||||
rows={rows}
|
||||
fullHeight={fullHeight}
|
||||
css="margin-top: 10px"
|
||||
/>
|
||||
{error && (
|
||||
<div
|
||||
css="color: var(--pf-global--danger-color--100);
|
||||
font-size: var(--pf-global--FontSize--sm"
|
||||
</SplitItem>
|
||||
{onExpand && (
|
||||
<SplitItem>
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Expand input`)}
|
||||
onClick={onExpand}
|
||||
ouiaId={`${dataCy}-expand`}
|
||||
>
|
||||
<Trans>Error:</Trans> {error.message}
|
||||
</div>
|
||||
)}
|
||||
</DetailValue>
|
||||
</>
|
||||
<ExpandArrowsAltIcon />
|
||||
</Button>
|
||||
</SplitItem>
|
||||
)}
|
||||
</Split>
|
||||
);
|
||||
}
|
||||
VariablesDetail.propTypes = {
|
||||
value: oneOfType([shape({}), arrayOf(string), string]).isRequired,
|
||||
label: node.isRequired,
|
||||
rows: number,
|
||||
dataCy: string,
|
||||
helpText: string,
|
||||
};
|
||||
VariablesDetail.defaultProps = {
|
||||
rows: null,
|
||||
dataCy: '',
|
||||
helpText: '',
|
||||
};
|
||||
|
||||
export default withI18n()(VariablesDetail);
|
||||
|
||||
@ -4,7 +4,8 @@ import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { useField } from 'formik';
|
||||
import styled from 'styled-components';
|
||||
import { Split, SplitItem } from '@patternfly/react-core';
|
||||
import { Split, SplitItem, Button, Modal } from '@patternfly/react-core';
|
||||
import { ExpandArrowsAltIcon } from '@patternfly/react-icons';
|
||||
import { CheckboxField } from '../FormField';
|
||||
import MultiButtonToggle from '../MultiButtonToggle';
|
||||
import { yamlToJson, jsonToYaml, isJsonString } from '../../util/yaml';
|
||||
@ -20,6 +21,7 @@ const FieldHeader = styled.div`
|
||||
|
||||
const StyledCheckboxField = styled(CheckboxField)`
|
||||
--pf-c-check__label--FontSize: var(--pf-c-form__label--FontSize);
|
||||
margin-left: auto;
|
||||
`;
|
||||
|
||||
function VariablesField({
|
||||
@ -31,10 +33,92 @@ function VariablesField({
|
||||
promptId,
|
||||
tooltip,
|
||||
}) {
|
||||
const [field, meta, helpers] = useField(name);
|
||||
const [field, meta] = useField(name);
|
||||
const [mode, setMode] = useState(
|
||||
isJsonString(field.value) ? JSON_MODE : YAML_MODE
|
||||
);
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
|
||||
return (
|
||||
<>
|
||||
<VariablesFieldInternals
|
||||
i18n={i18n}
|
||||
id={id}
|
||||
name={name}
|
||||
label={label}
|
||||
readOnly={readOnly}
|
||||
promptId={promptId}
|
||||
tooltip={tooltip}
|
||||
onExpand={() => setIsExpanded(true)}
|
||||
mode={mode}
|
||||
setMode={setMode}
|
||||
/>
|
||||
<Modal
|
||||
variant="xlarge"
|
||||
title={label}
|
||||
isOpen={isExpanded}
|
||||
onClose={() => setIsExpanded(false)}
|
||||
actions={[
|
||||
<Button
|
||||
aria-label={i18n._(t`Done`)}
|
||||
key="select"
|
||||
variant="primary"
|
||||
onClick={() => setIsExpanded(false)}
|
||||
ouiaId={`${id}-variables-unexpand`}
|
||||
>
|
||||
{i18n._(t`Done`)}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
<div className="pf-c-form">
|
||||
<VariablesFieldInternals
|
||||
i18n={i18n}
|
||||
id={`${id}-expanded`}
|
||||
name={name}
|
||||
label={label}
|
||||
readOnly={readOnly}
|
||||
promptId={promptId}
|
||||
tooltip={tooltip}
|
||||
fullHeight
|
||||
mode={mode}
|
||||
setMode={setMode}
|
||||
/>
|
||||
</div>
|
||||
</Modal>
|
||||
{meta.error ? (
|
||||
<div className="pf-c-form__helper-text pf-m-error" aria-live="polite">
|
||||
{meta.error}
|
||||
</div>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
}
|
||||
VariablesField.propTypes = {
|
||||
id: string.isRequired,
|
||||
name: string.isRequired,
|
||||
label: string.isRequired,
|
||||
readOnly: bool,
|
||||
promptId: string,
|
||||
};
|
||||
VariablesField.defaultProps = {
|
||||
readOnly: false,
|
||||
promptId: null,
|
||||
};
|
||||
|
||||
function VariablesFieldInternals({
|
||||
i18n,
|
||||
id,
|
||||
name,
|
||||
label,
|
||||
readOnly,
|
||||
promptId,
|
||||
tooltip,
|
||||
fullHeight,
|
||||
mode,
|
||||
setMode,
|
||||
onExpand,
|
||||
}) {
|
||||
const [field, meta, helpers] = useField(name);
|
||||
|
||||
return (
|
||||
<div className="pf-c-form__group">
|
||||
@ -75,6 +159,16 @@ function VariablesField({
|
||||
name="ask_variables_on_launch"
|
||||
/>
|
||||
)}
|
||||
{onExpand && (
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Expand input`)}
|
||||
onClick={onExpand}
|
||||
ouiaId={`${id}-variables-expand`}
|
||||
>
|
||||
<ExpandArrowsAltIcon />
|
||||
</Button>
|
||||
)}
|
||||
</FieldHeader>
|
||||
<CodeEditor
|
||||
mode={mode}
|
||||
@ -83,26 +177,11 @@ function VariablesField({
|
||||
onChange={newVal => {
|
||||
helpers.setValue(newVal);
|
||||
}}
|
||||
fullHeight={fullHeight}
|
||||
hasErrors={!!meta.error}
|
||||
/>
|
||||
{meta.error ? (
|
||||
<div className="pf-c-form__helper-text pf-m-error" aria-live="polite">
|
||||
{meta.error}
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
VariablesField.propTypes = {
|
||||
id: string.isRequired,
|
||||
name: string.isRequired,
|
||||
label: string.isRequired,
|
||||
readOnly: bool,
|
||||
promptId: string,
|
||||
};
|
||||
VariablesField.defaultProps = {
|
||||
readOnly: false,
|
||||
promptId: null,
|
||||
};
|
||||
|
||||
export default withI18n()(VariablesField);
|
||||
|
||||
@ -32,7 +32,7 @@ describe('VariablesField', () => {
|
||||
</Formik>
|
||||
);
|
||||
const buttons = wrapper.find('Button');
|
||||
expect(buttons).toHaveLength(2);
|
||||
expect(buttons).toHaveLength(3);
|
||||
expect(buttons.at(0).prop('variant')).toEqual('primary');
|
||||
expect(buttons.at(1).prop('variant')).toEqual('secondary');
|
||||
await act(async () => {
|
||||
@ -136,4 +136,27 @@ describe('VariablesField', () => {
|
||||
|
||||
expect(wrapper.find('CodeEditor').prop('mode')).toEqual('javascript');
|
||||
});
|
||||
|
||||
it('should open modal when expanded', async () => {
|
||||
const value = '---';
|
||||
const wrapper = mountWithContexts(
|
||||
<Formik initialValues={{ variables: value }} onSubmit={jest.fn()}>
|
||||
{formik => (
|
||||
<form onSubmit={formik.handleSubmit}>
|
||||
<VariablesField id="the-field" name="variables" label="Variables" />
|
||||
<button type="submit" id="submit">
|
||||
Submit
|
||||
</button>
|
||||
</form>
|
||||
)}
|
||||
</Formik>
|
||||
);
|
||||
expect(wrapper.find('Modal').prop('isOpen')).toEqual(false);
|
||||
|
||||
wrapper.find('Button[variant="plain"]').invoke('onClick')();
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('Modal').prop('isOpen')).toEqual(true);
|
||||
expect(wrapper.find('Modal CodeEditor')).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
@ -14,15 +14,7 @@ import { DetailName, DetailValue } from './Detail';
|
||||
import CodeEditor from '../CodeEditor';
|
||||
import Popover from '../Popover';
|
||||
|
||||
function CodeDetail({
|
||||
value,
|
||||
label,
|
||||
mode,
|
||||
rows,
|
||||
fullHeight,
|
||||
helpText,
|
||||
dataCy,
|
||||
}) {
|
||||
function CodeDetail({ value, label, mode, rows, helpText, dataCy }) {
|
||||
const labelCy = dataCy ? `${dataCy}-label` : null;
|
||||
const valueCy = dataCy ? `${dataCy}-value` : null;
|
||||
|
||||
@ -57,7 +49,6 @@ function CodeDetail({
|
||||
value={value}
|
||||
readOnly
|
||||
rows={rows}
|
||||
fullHeight={fullHeight}
|
||||
css="margin-top: 10px"
|
||||
/>
|
||||
</DetailValue>
|
||||
@ -69,7 +60,7 @@ CodeDetail.propTypes = {
|
||||
label: node.isRequired,
|
||||
dataCy: string,
|
||||
helpText: string,
|
||||
rows: number,
|
||||
rows: oneOfType(number, string),
|
||||
mode: oneOf(['javascript', 'yaml', 'jinja2']).isRequired,
|
||||
};
|
||||
CodeDetail.defaultProps = {
|
||||
|
||||
@ -183,6 +183,7 @@ function createNode(state, node) {
|
||||
fullUnifiedJobTemplate: node.nodeResource,
|
||||
isInvalidLinkTarget: false,
|
||||
promptValues: node.promptValues,
|
||||
all_parents_must_converge: node.all_parents_must_converge,
|
||||
});
|
||||
|
||||
// Ensures that root nodes appear to always run
|
||||
@ -657,10 +658,19 @@ function updateLink(state, linkType) {
|
||||
|
||||
function updateNode(state, editedNode) {
|
||||
const { nodeToEdit, nodes } = state;
|
||||
const { nodeResource, launchConfig, promptValues } = editedNode;
|
||||
const {
|
||||
nodeResource,
|
||||
launchConfig,
|
||||
promptValues,
|
||||
all_parents_must_converge,
|
||||
} = editedNode;
|
||||
const newNodes = [...nodes];
|
||||
|
||||
const matchingNode = newNodes.find(node => node.id === nodeToEdit.id);
|
||||
matchingNode.all_parents_must_converge = all_parents_must_converge;
|
||||
if (matchingNode.originalNodeObject) {
|
||||
delete matchingNode.originalNodeObject.all_parents_must_converge;
|
||||
}
|
||||
matchingNode.fullUnifiedJobTemplate = nodeResource;
|
||||
matchingNode.isEdited = true;
|
||||
matchingNode.launchConfig = launchConfig;
|
||||
|
||||
@ -20,6 +20,7 @@ import ContentLoading from '../../components/ContentLoading';
|
||||
|
||||
import ExecutionEnvironmentDetails from './ExecutionEnvironmentDetails';
|
||||
import ExecutionEnvironmentEdit from './ExecutionEnvironmentEdit';
|
||||
import ExecutionEnvironmentTemplateList from './ExecutionEnvironmentTemplate';
|
||||
|
||||
function ExecutionEnvironment({ i18n, setBreadcrumb }) {
|
||||
const { id } = useParams();
|
||||
@ -64,6 +65,11 @@ function ExecutionEnvironment({ i18n, setBreadcrumb }) {
|
||||
link: `/execution_environments/${id}/details`,
|
||||
id: 0,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Templates`),
|
||||
link: `/execution_environments/${id}/templates`,
|
||||
id: 1,
|
||||
},
|
||||
];
|
||||
|
||||
if (!isLoading && contentError) {
|
||||
@ -114,6 +120,11 @@ function ExecutionEnvironment({ i18n, setBreadcrumb }) {
|
||||
executionEnvironment={executionEnvironment}
|
||||
/>
|
||||
</Route>
|
||||
<Route path="/execution_environments/:id/templates">
|
||||
<ExecutionEnvironmentTemplateList
|
||||
executionEnvironment={executionEnvironment}
|
||||
/>
|
||||
</Route>
|
||||
</>
|
||||
)}
|
||||
</Switch>
|
||||
|
||||
@ -64,6 +64,11 @@ function ExecutionEnvironmentDetails({ executionEnvironment, i18n }) {
|
||||
value={description}
|
||||
dataCy="execution-environment-detail-description"
|
||||
/>
|
||||
<Detail
|
||||
label={i18n._(t`Managed by Tower`)}
|
||||
value={managedByTower ? i18n._(t`True`) : i18n._(t`False`)}
|
||||
dataCy="execution-environment-managed-by-tower"
|
||||
/>
|
||||
<Detail
|
||||
label={i18n._(t`Organization`)}
|
||||
value={
|
||||
@ -79,6 +84,7 @@ function ExecutionEnvironmentDetails({ executionEnvironment, i18n }) {
|
||||
}
|
||||
dataCy="execution-environment-detail-organization"
|
||||
/>
|
||||
|
||||
<Detail
|
||||
label={i18n._(t`Pull`)}
|
||||
value={pull === '' ? i18n._(t`Missing`) : toTitleCase(pull)}
|
||||
@ -110,27 +116,31 @@ function ExecutionEnvironmentDetails({ executionEnvironment, i18n }) {
|
||||
</DetailList>
|
||||
{!managedByTower && (
|
||||
<CardActionsRow>
|
||||
<Button
|
||||
ouiaId="execution-environment-detail-edit-button"
|
||||
aria-label={i18n._(t`edit`)}
|
||||
component={Link}
|
||||
to={`/execution_environments/${id}/edit`}
|
||||
>
|
||||
{i18n._(t`Edit`)}
|
||||
</Button>
|
||||
<DeleteButton
|
||||
name={image}
|
||||
modalTitle={i18n._(t`Delete Execution Environment`)}
|
||||
onConfirm={deleteExecutionEnvironment}
|
||||
isDisabled={isLoading}
|
||||
ouiaId="delete-button"
|
||||
deleteDetailsRequests={deleteDetailsRequests}
|
||||
deleteMessage={i18n._(
|
||||
t`This execution environment is currently being used by other resources. Are you sure you want to delete it?`
|
||||
)}
|
||||
>
|
||||
{i18n._(t`Delete`)}
|
||||
</DeleteButton>
|
||||
{summary_fields.user_capabilities?.edit && (
|
||||
<Button
|
||||
ouiaId="execution-environment-detail-edit-button"
|
||||
aria-label={i18n._(t`edit`)}
|
||||
component={Link}
|
||||
to={`/execution_environments/${id}/edit`}
|
||||
>
|
||||
{i18n._(t`Edit`)}
|
||||
</Button>
|
||||
)}
|
||||
{summary_fields.user_capabilities?.delete && (
|
||||
<DeleteButton
|
||||
name={image}
|
||||
modalTitle={i18n._(t`Delete Execution Environment`)}
|
||||
onConfirm={deleteExecutionEnvironment}
|
||||
isDisabled={isLoading}
|
||||
ouiaId="delete-button"
|
||||
deleteDetailsRequests={deleteDetailsRequests}
|
||||
deleteMessage={i18n._(
|
||||
t`This execution environment is currently being used by other resources. Are you sure you want to delete it?`
|
||||
)}
|
||||
>
|
||||
{i18n._(t`Delete`)}
|
||||
</DeleteButton>
|
||||
)}
|
||||
</CardActionsRow>
|
||||
)}
|
||||
|
||||
|
||||
@ -2,7 +2,10 @@ import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { createMemoryHistory } from 'history';
|
||||
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import {
|
||||
mountWithContexts,
|
||||
waitForElement,
|
||||
} from '../../../../testUtils/enzymeHelpers';
|
||||
import { ExecutionEnvironmentsAPI } from '../../../api';
|
||||
|
||||
import ExecutionEnvironmentDetails from './ExecutionEnvironmentDetails';
|
||||
@ -22,6 +25,11 @@ const executionEnvironment = {
|
||||
credential: '/api/v2/credentials/4/',
|
||||
},
|
||||
summary_fields: {
|
||||
user_capabilities: {
|
||||
edit: true,
|
||||
delete: true,
|
||||
copy: true,
|
||||
},
|
||||
credential: {
|
||||
id: 4,
|
||||
name: 'Container Registry',
|
||||
@ -73,6 +81,9 @@ describe('<ExecutionEnvironmentDetails/>', () => {
|
||||
expect(
|
||||
wrapper.find('Detail[label="Credential"]').prop('value').props.children
|
||||
).toEqual(executionEnvironment.summary_fields.credential.name);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Managed by Tower"]').prop('value')
|
||||
).toEqual('False');
|
||||
const dates = wrapper.find('UserDateDetail');
|
||||
expect(dates).toHaveLength(2);
|
||||
expect(dates.at(0).prop('date')).toEqual(executionEnvironment.created);
|
||||
@ -167,6 +178,9 @@ describe('<ExecutionEnvironmentDetails/>', () => {
|
||||
expect(
|
||||
wrapper.find('Detail[label="Credential"]').prop('value').props.children
|
||||
).toEqual(executionEnvironment.summary_fields.credential.name);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Managed by Tower"]').prop('value')
|
||||
).toEqual('True');
|
||||
const dates = wrapper.find('UserDateDetail');
|
||||
expect(dates).toHaveLength(2);
|
||||
expect(dates.at(0).prop('date')).toEqual(executionEnvironment.created);
|
||||
@ -175,6 +189,7 @@ describe('<ExecutionEnvironmentDetails/>', () => {
|
||||
|
||||
expect(wrapper.find('Button[aria-label="Delete"]')).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should have proper number of delete detail requests', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/execution_environments/42/details'],
|
||||
@ -193,4 +208,71 @@ describe('<ExecutionEnvironmentDetails/>', () => {
|
||||
wrapper.find('DeleteButton').prop('deleteDetailsRequests')
|
||||
).toHaveLength(4);
|
||||
});
|
||||
|
||||
test('should show edit button for users with edit permission', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentDetails
|
||||
executionEnvironment={executionEnvironment}
|
||||
/>
|
||||
);
|
||||
});
|
||||
const editButton = await waitForElement(
|
||||
wrapper,
|
||||
'ExecutionEnvironmentDetails Button[aria-label="edit"]'
|
||||
);
|
||||
expect(editButton.text()).toEqual('Edit');
|
||||
expect(editButton.prop('to')).toBe('/execution_environments/17/edit');
|
||||
});
|
||||
|
||||
test('should hide edit button for users without edit permission', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentDetails
|
||||
executionEnvironment={{
|
||||
...executionEnvironment,
|
||||
summary_fields: { user_capabilities: { edit: false } },
|
||||
}}
|
||||
/>
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'ExecutionEnvironmentDetails');
|
||||
expect(
|
||||
wrapper.find('ExecutionEnvironmentDetails Button[aria-label="edit"]')
|
||||
.length
|
||||
).toBe(0);
|
||||
});
|
||||
|
||||
test('should show delete button for users with delete permission', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentDetails
|
||||
executionEnvironment={executionEnvironment}
|
||||
/>
|
||||
);
|
||||
});
|
||||
const deleteButton = await waitForElement(
|
||||
wrapper,
|
||||
'ExecutionEnvironmentDetails Button[aria-label="Delete"]'
|
||||
);
|
||||
expect(deleteButton.text()).toEqual('Delete');
|
||||
});
|
||||
|
||||
test('should hide delete button for users without delete permission', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentDetails
|
||||
executionEnvironment={{
|
||||
...executionEnvironment,
|
||||
summary_fields: { user_capabilities: { delete: false } },
|
||||
}}
|
||||
/>
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'ExecutionEnvironmentDetails');
|
||||
expect(
|
||||
wrapper.find('ExecutionEnvironmentDetails Button[aria-label="Delete"]')
|
||||
.length
|
||||
).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
@ -0,0 +1,139 @@
|
||||
import React, { useEffect, useCallback } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Card } from '@patternfly/react-core';
|
||||
|
||||
import { ExecutionEnvironmentsAPI } from '../../../api';
|
||||
import { getQSConfig, parseQueryString } from '../../../util/qs';
|
||||
import useRequest from '../../../util/useRequest';
|
||||
import DatalistToolbar from '../../../components/DataListToolbar';
|
||||
import PaginatedDataList from '../../../components/PaginatedDataList';
|
||||
|
||||
import ExecutionEnvironmentTemplateListItem from './ExecutionEnvironmentTemplateListItem';
|
||||
|
||||
const QS_CONFIG = getQSConfig(
|
||||
'execution_environments',
|
||||
{
|
||||
page: 1,
|
||||
page_size: 20,
|
||||
order_by: 'name',
|
||||
type: 'job_template,workflow_job_template',
|
||||
},
|
||||
['id', 'page', 'page_size']
|
||||
);
|
||||
|
||||
function ExecutionEnvironmentTemplateList({ i18n, executionEnvironment }) {
|
||||
const { id } = executionEnvironment;
|
||||
const location = useLocation();
|
||||
|
||||
const {
|
||||
error: contentError,
|
||||
isLoading,
|
||||
request: fetchTemplates,
|
||||
result: {
|
||||
templates,
|
||||
templatesCount,
|
||||
relatedSearchableKeys,
|
||||
searchableKeys,
|
||||
},
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
const params = parseQueryString(QS_CONFIG, location.search);
|
||||
|
||||
const [response, responseActions] = await Promise.all([
|
||||
ExecutionEnvironmentsAPI.readUnifiedJobTemplates(id, params),
|
||||
ExecutionEnvironmentsAPI.readUnifiedJobTemplateOptions(id),
|
||||
]);
|
||||
|
||||
return {
|
||||
templates: response.data.results,
|
||||
templatesCount: response.data.count,
|
||||
actions: responseActions.data.actions,
|
||||
relatedSearchableKeys: (
|
||||
responseActions?.data?.related_search_fields || []
|
||||
).map(val => val.slice(0, -8)),
|
||||
searchableKeys: Object.keys(
|
||||
responseActions.data.actions?.GET || {}
|
||||
).filter(key => responseActions.data.actions?.GET[key].filterable),
|
||||
};
|
||||
}, [location, id]),
|
||||
{
|
||||
templates: [],
|
||||
templatesCount: 0,
|
||||
actions: {},
|
||||
relatedSearchableKeys: [],
|
||||
searchableKeys: [],
|
||||
}
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
fetchTemplates();
|
||||
}, [fetchTemplates]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Card>
|
||||
<PaginatedDataList
|
||||
contentError={contentError}
|
||||
hasContentLoading={isLoading}
|
||||
items={templates}
|
||||
itemCount={templatesCount}
|
||||
pluralizedItemName={i18n._(t`Templates`)}
|
||||
qsConfig={QS_CONFIG}
|
||||
toolbarSearchableKeys={searchableKeys}
|
||||
toolbarRelatedSearchableKeys={relatedSearchableKeys}
|
||||
toolbarSearchColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Type`),
|
||||
key: 'or__type',
|
||||
options: [
|
||||
[`job_template`, i18n._(t`Job Template`)],
|
||||
[`workflow_job_template`, i18n._(t`Workflow Template`)],
|
||||
],
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
key: 'created_by__username__icontains',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
key: 'modified_by__username__icontains',
|
||||
},
|
||||
]}
|
||||
toolbarSortColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
key: 'name',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created`),
|
||||
key: 'created',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified`),
|
||||
key: 'modified',
|
||||
},
|
||||
]}
|
||||
renderToolbar={props => (
|
||||
<DatalistToolbar {...props} qsConfig={QS_CONFIG} />
|
||||
)}
|
||||
renderItem={template => (
|
||||
<ExecutionEnvironmentTemplateListItem
|
||||
key={template.id}
|
||||
template={template}
|
||||
detailUrl={`/templates/${template.type}/${template.id}/details`}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(ExecutionEnvironmentTemplateList);
|
||||
@ -0,0 +1,116 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
|
||||
import {
|
||||
mountWithContexts,
|
||||
waitForElement,
|
||||
} from '../../../../testUtils/enzymeHelpers';
|
||||
|
||||
import { ExecutionEnvironmentsAPI } from '../../../api';
|
||||
import ExecutionEnvironmentTemplateList from './ExecutionEnvironmentTemplateList';
|
||||
|
||||
jest.mock('../../../api/');
|
||||
|
||||
const templates = {
|
||||
data: {
|
||||
count: 3,
|
||||
results: [
|
||||
{
|
||||
id: 1,
|
||||
type: 'job_template',
|
||||
name: 'Foo',
|
||||
url: '/api/v2/job_templates/1/',
|
||||
related: {
|
||||
execution_environment: '/api/v2/execution_environments/1/',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
type: 'workflow_job_template',
|
||||
name: 'Bar',
|
||||
url: '/api/v2/workflow_job_templates/2/',
|
||||
related: {
|
||||
execution_environment: '/api/v2/execution_environments/1/',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
type: 'job_template',
|
||||
name: 'Fuzz',
|
||||
url: '/api/v2/job_templates/3/',
|
||||
related: {
|
||||
execution_environment: '/api/v2/execution_environments/1/',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const mockExecutionEnvironment = {
|
||||
id: 1,
|
||||
name: 'Default EE',
|
||||
};
|
||||
|
||||
const options = { data: { actions: { GET: {} } } };
|
||||
|
||||
describe('<ExecutionEnvironmentTemplateList/>', () => {
|
||||
let wrapper;
|
||||
|
||||
test('should mount successfully', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentTemplateList
|
||||
executionEnvironment={mockExecutionEnvironment}
|
||||
/>
|
||||
);
|
||||
});
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'ExecutionEnvironmentTemplateList',
|
||||
el => el.length > 0
|
||||
);
|
||||
});
|
||||
|
||||
test('should have data fetched and render 3 rows', async () => {
|
||||
ExecutionEnvironmentsAPI.readUnifiedJobTemplates.mockResolvedValue(
|
||||
templates
|
||||
);
|
||||
|
||||
ExecutionEnvironmentsAPI.readUnifiedJobTemplateOptions.mockResolvedValue(
|
||||
options
|
||||
);
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentTemplateList
|
||||
executionEnvironment={mockExecutionEnvironment}
|
||||
/>
|
||||
);
|
||||
});
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'ExecutionEnvironmentTemplateList',
|
||||
el => el.length > 0
|
||||
);
|
||||
|
||||
expect(wrapper.find('ExecutionEnvironmentTemplateListItem').length).toBe(3);
|
||||
expect(ExecutionEnvironmentsAPI.readUnifiedJobTemplates).toBeCalled();
|
||||
expect(ExecutionEnvironmentsAPI.readUnifiedJobTemplateOptions).toBeCalled();
|
||||
});
|
||||
|
||||
test('should not render add button', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentTemplateList
|
||||
executionEnvironment={mockExecutionEnvironment}
|
||||
/>
|
||||
);
|
||||
});
|
||||
waitForElement(
|
||||
wrapper,
|
||||
'ExecutionEnvironmentTemplateList',
|
||||
el => el.length > 0
|
||||
);
|
||||
expect(wrapper.find('ToolbarAddButton').length).toBe(0);
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,43 @@
|
||||
import React from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Link } from 'react-router-dom';
|
||||
import {
|
||||
DataListItem,
|
||||
DataListItemRow,
|
||||
DataListItemCells,
|
||||
} from '@patternfly/react-core';
|
||||
|
||||
import DataListCell from '../../../components/DataListCell';
|
||||
|
||||
function ExecutionEnvironmentTemplateListItem({ template, detailUrl, i18n }) {
|
||||
return (
|
||||
<DataListItem
|
||||
key={template.id}
|
||||
aria-labelledby={`check-action-${template.id}`}
|
||||
id={`${template.id}`}
|
||||
>
|
||||
<DataListItemRow>
|
||||
<DataListItemCells
|
||||
dataListCells={[
|
||||
<DataListCell key="name" aria-label={i18n._(t`Name`)}>
|
||||
<Link to={`${detailUrl}`}>
|
||||
<b>{template.name}</b>
|
||||
</Link>
|
||||
</DataListCell>,
|
||||
<DataListCell
|
||||
key="template-type"
|
||||
aria-label={i18n._(t`Template type`)}
|
||||
>
|
||||
{template.type === 'job_template'
|
||||
? i18n._(t`Job Template`)
|
||||
: i18n._(t`Workflow Job Template`)}
|
||||
</DataListCell>,
|
||||
]}
|
||||
/>
|
||||
</DataListItemRow>
|
||||
</DataListItem>
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(ExecutionEnvironmentTemplateListItem);
|
||||
@ -0,0 +1,48 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
|
||||
import ExecutionEnvironmentTemplateListItem from './ExecutionEnvironmentTemplateListItem';
|
||||
|
||||
describe('<ExecutionEnvironmentTemplateListItem/>', () => {
|
||||
let wrapper;
|
||||
const template = {
|
||||
id: 1,
|
||||
name: 'Foo',
|
||||
type: 'job_template',
|
||||
};
|
||||
|
||||
test('should mount successfully', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentTemplateListItem
|
||||
template={template}
|
||||
detailUrl={`/templates/${template.type}/${template.id}/details`}
|
||||
/>
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('ExecutionEnvironmentTemplateListItem').length).toBe(1);
|
||||
expect(wrapper.find('DataListCell[aria-label="Name"]').text()).toBe(
|
||||
template.name
|
||||
);
|
||||
expect(
|
||||
wrapper.find('DataListCell[aria-label="Template type"]').text()
|
||||
).toBe('Job Template');
|
||||
});
|
||||
|
||||
test('should distinguish template types', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ExecutionEnvironmentTemplateListItem
|
||||
template={{ ...template, type: 'workflow_job_template' }}
|
||||
detailUrl={`/templates/${template.type}/${template.id}/details`}
|
||||
/>
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('ExecutionEnvironmentTemplateListItem').length).toBe(1);
|
||||
expect(
|
||||
wrapper.find('DataListCell[aria-label="Template type"]').text()
|
||||
).toBe('Workflow Job Template');
|
||||
});
|
||||
});
|
||||
@ -0,0 +1 @@
|
||||
export { default } from './ExecutionEnvironmentTemplateList';
|
||||
@ -36,7 +36,7 @@ function HostFacts({ i18n, host }) {
|
||||
return (
|
||||
<CardBody>
|
||||
<DetailList gutter="sm">
|
||||
<VariablesDetail label={i18n._(t`Facts`)} fullHeight value={facts} />
|
||||
<VariablesDetail label={i18n._(t`Facts`)} rows="auto" value={facts} />
|
||||
</DetailList>
|
||||
</CardBody>
|
||||
);
|
||||
|
||||
@ -72,11 +72,12 @@ describe('<InventoryGroupDetail />', () => {
|
||||
});
|
||||
|
||||
test('should open delete modal and then call api to delete the group', async () => {
|
||||
expect(wrapper.find('Modal').length).toBe(1); // variables modal already mounted
|
||||
await act(async () => {
|
||||
wrapper.find('button[aria-label="Delete"]').simulate('click');
|
||||
});
|
||||
await waitForElement(wrapper, 'Modal', el => el.length === 1);
|
||||
expect(wrapper.find('Modal').length).toBe(1);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('Modal').length).toBe(2);
|
||||
await act(async () => {
|
||||
wrapper.find('Radio[id="radio-delete"]').invoke('onChange')();
|
||||
});
|
||||
|
||||
@ -35,7 +35,7 @@ function InventoryHostFacts({ i18n, host }) {
|
||||
return (
|
||||
<CardBody>
|
||||
<DetailList gutter="sm">
|
||||
<VariablesDetail label={i18n._(t`Facts`)} fullHeight value={result} />
|
||||
<VariablesDetail label={i18n._(t`Facts`)} rows="auto" value={result} />
|
||||
</DetailList>
|
||||
</CardBody>
|
||||
);
|
||||
|
||||
@ -6,7 +6,7 @@ export default function getRowRangePageSize(startIndex, stopIndex) {
|
||||
page = startIndex + 1;
|
||||
pageSize = 1;
|
||||
} else if (stopIndex >= startIndex + 50) {
|
||||
page = Math.ceil(startIndex / 50);
|
||||
page = Math.floor(startIndex / 50) + 1;
|
||||
pageSize = 50;
|
||||
} else {
|
||||
for (let i = stopIndex - startIndex + 1; i <= 50; i++) {
|
||||
|
||||
@ -29,4 +29,11 @@ describe('getRowRangePageSize', () => {
|
||||
firstIndex: 5,
|
||||
});
|
||||
});
|
||||
test('handles range with 0 startIndex', () => {
|
||||
expect(getRowRangePageSize(0, 50)).toEqual({
|
||||
page: 1,
|
||||
pageSize: 50,
|
||||
firstIndex: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -59,6 +59,11 @@ const NodeDefaultLabel = styled.p`
|
||||
white-space: nowrap;
|
||||
`;
|
||||
|
||||
const ConvergenceLabel = styled.p`
|
||||
font-size: 12px;
|
||||
color: #ffffff;
|
||||
`;
|
||||
|
||||
Elapsed.displayName = 'Elapsed';
|
||||
|
||||
function WorkflowOutputNode({ i18n, mouseEnter, mouseLeave, node }) {
|
||||
@ -100,6 +105,30 @@ function WorkflowOutputNode({ i18n, mouseEnter, mouseLeave, node }) {
|
||||
onMouseEnter={mouseEnter}
|
||||
onMouseLeave={mouseLeave}
|
||||
>
|
||||
{(node.all_parents_must_converge ||
|
||||
node?.originalNodeObject?.all_parents_must_converge) && (
|
||||
<>
|
||||
<rect
|
||||
fill={borderColor}
|
||||
height={wfConstants.nodeH / 4}
|
||||
rx={2}
|
||||
ry={2}
|
||||
x={wfConstants.nodeW / 2 - wfConstants.nodeW / 10}
|
||||
y={-wfConstants.nodeH / 4 + 2}
|
||||
stroke={borderColor}
|
||||
strokeWidth="2px"
|
||||
width={wfConstants.nodeW / 5}
|
||||
/>
|
||||
<foreignObject
|
||||
height={wfConstants.nodeH / 4}
|
||||
width={wfConstants.nodeW / 5}
|
||||
x={wfConstants.nodeW / 2 - wfConstants.nodeW / 10 + 7}
|
||||
y={-wfConstants.nodeH / 4 - 1}
|
||||
>
|
||||
<ConvergenceLabel>{i18n._(t`ALL`)}</ConvergenceLabel>
|
||||
</foreignObject>
|
||||
</>
|
||||
)}
|
||||
<rect
|
||||
fill="#FFFFFF"
|
||||
height={wfConstants.nodeH}
|
||||
|
||||
@ -38,7 +38,7 @@ function OrganizationExecEnvList({ i18n, organization }) {
|
||||
|
||||
const [response, responseActions] = await Promise.all([
|
||||
OrganizationsAPI.readExecutionEnvironments(id, params),
|
||||
OrganizationsAPI.readExecutionEnvironmentsOptions(id, params),
|
||||
OrganizationsAPI.readExecutionEnvironmentsOptions(id),
|
||||
]);
|
||||
|
||||
return {
|
||||
|
||||
@ -286,7 +286,7 @@ const ObjectField = withI18n()(({ i18n, name, config, isRequired = false }) => {
|
||||
>
|
||||
<CodeEditor
|
||||
{...field}
|
||||
fullHeight
|
||||
rows="auto"
|
||||
id={name}
|
||||
mode="javascript"
|
||||
onChange={value => {
|
||||
|
||||
@ -371,6 +371,7 @@ function JobTemplateDetail({ i18n, template }) {
|
||||
value={extra_vars}
|
||||
rows={4}
|
||||
label={i18n._(t`Variables`)}
|
||||
dataCy={`jt-details-${template.id}`}
|
||||
/>
|
||||
</DetailList>
|
||||
<CardActionsRow>
|
||||
|
||||
@ -19,6 +19,7 @@ function NodeAddModal({ i18n }) {
|
||||
timeoutMinutes,
|
||||
timeoutSeconds,
|
||||
linkType,
|
||||
convergence,
|
||||
} = values;
|
||||
|
||||
if (values) {
|
||||
@ -33,8 +34,11 @@ function NodeAddModal({ i18n }) {
|
||||
|
||||
const node = {
|
||||
linkType,
|
||||
all_parents_must_converge: convergence === 'all',
|
||||
};
|
||||
|
||||
delete values.convergence;
|
||||
|
||||
delete values.linkType;
|
||||
|
||||
if (values.nodeType === 'workflow_approval_template') {
|
||||
|
||||
@ -48,6 +48,7 @@ describe('NodeAddModal', () => {
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
node: {
|
||||
all_parents_must_converge: false,
|
||||
linkType: 'success',
|
||||
nodeResource: {
|
||||
id: 448,
|
||||
|
||||
@ -17,11 +17,13 @@ function NodeEditModal({ i18n }) {
|
||||
nodeType,
|
||||
timeoutMinutes,
|
||||
timeoutSeconds,
|
||||
convergence,
|
||||
...rest
|
||||
} = values;
|
||||
let node;
|
||||
if (values.nodeType === 'workflow_approval_template') {
|
||||
node = {
|
||||
all_parents_must_converge: convergence === 'all',
|
||||
nodeResource: {
|
||||
description: approvalDescription,
|
||||
name: approvalName,
|
||||
@ -32,6 +34,7 @@ function NodeEditModal({ i18n }) {
|
||||
} else {
|
||||
node = {
|
||||
nodeResource,
|
||||
all_parents_must_converge: convergence === 'all',
|
||||
};
|
||||
if (nodeType === 'job_template' || nodeType === 'workflow_job_template') {
|
||||
node.promptValues = {
|
||||
|
||||
@ -63,6 +63,7 @@ describe('NodeEditModal', () => {
|
||||
});
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
node: {
|
||||
all_parents_must_converge: false,
|
||||
nodeResource: { id: 448, name: 'Test JT', type: 'job_template' },
|
||||
},
|
||||
type: 'UPDATE_NODE',
|
||||
|
||||
@ -101,7 +101,6 @@ function NodeModalForm({
|
||||
values.extra_data = extraVars && parseVariableField(extraVars);
|
||||
delete values.extra_vars;
|
||||
}
|
||||
|
||||
onSave(values, launchConfig);
|
||||
};
|
||||
|
||||
@ -357,6 +356,7 @@ const NodeModal = ({ onSave, i18n, askLinkType, title }) => {
|
||||
approvalDescription: '',
|
||||
timeoutMinutes: 0,
|
||||
timeoutSeconds: 0,
|
||||
convergence: 'any',
|
||||
linkType: 'success',
|
||||
nodeResource: nodeToEdit?.fullUnifiedJobTemplate || null,
|
||||
nodeType: nodeToEdit?.fullUnifiedJobTemplate?.type || 'job_template',
|
||||
|
||||
@ -307,6 +307,7 @@ describe('NodeModal', () => {
|
||||
});
|
||||
expect(onSave).toBeCalledWith(
|
||||
{
|
||||
convergence: 'any',
|
||||
linkType: 'always',
|
||||
nodeType: 'job_template',
|
||||
inventory: { name: 'Foo Inv', id: 1 },
|
||||
@ -345,6 +346,7 @@ describe('NodeModal', () => {
|
||||
});
|
||||
expect(onSave).toBeCalledWith(
|
||||
{
|
||||
convergence: 'any',
|
||||
linkType: 'failure',
|
||||
nodeResource: {
|
||||
id: 1,
|
||||
@ -383,6 +385,7 @@ describe('NodeModal', () => {
|
||||
});
|
||||
expect(onSave).toBeCalledWith(
|
||||
{
|
||||
convergence: 'any',
|
||||
linkType: 'failure',
|
||||
nodeResource: {
|
||||
id: 1,
|
||||
@ -422,6 +425,7 @@ describe('NodeModal', () => {
|
||||
});
|
||||
expect(onSave).toBeCalledWith(
|
||||
{
|
||||
convergence: 'any',
|
||||
linkType: 'success',
|
||||
nodeResource: {
|
||||
id: 1,
|
||||
@ -506,6 +510,7 @@ describe('NodeModal', () => {
|
||||
});
|
||||
expect(onSave).toBeCalledWith(
|
||||
{
|
||||
convergence: 'any',
|
||||
approvalDescription: 'Test Approval Description',
|
||||
approvalName: 'Test Approval',
|
||||
linkType: 'always',
|
||||
@ -605,6 +610,7 @@ describe('NodeModal', () => {
|
||||
|
||||
expect(onSave).toBeCalledWith(
|
||||
{
|
||||
convergence: 'any',
|
||||
approvalDescription: 'Test Approval Description',
|
||||
approvalName: 'Test Approval',
|
||||
linkType: 'success',
|
||||
@ -668,6 +674,7 @@ describe('NodeModal', () => {
|
||||
});
|
||||
expect(onSave).toBeCalledWith(
|
||||
{
|
||||
convergence: 'any',
|
||||
linkType: 'success',
|
||||
nodeResource: {
|
||||
id: 1,
|
||||
|
||||
@ -1,13 +1,25 @@
|
||||
import 'styled-components/macro';
|
||||
import React from 'react';
|
||||
import React, { useState } from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t, Trans } from '@lingui/macro';
|
||||
import styled from 'styled-components';
|
||||
import { useField } from 'formik';
|
||||
import { Alert, Form, FormGroup, TextInput } from '@patternfly/react-core';
|
||||
import {
|
||||
Alert,
|
||||
Form,
|
||||
FormGroup,
|
||||
TextInput,
|
||||
Select,
|
||||
SelectVariant,
|
||||
SelectOption,
|
||||
} from '@patternfly/react-core';
|
||||
import { required } from '../../../../../../util/validators';
|
||||
|
||||
import { FormFullWidthLayout } from '../../../../../../components/FormLayout';
|
||||
import {
|
||||
FormColumnLayout,
|
||||
FormFullWidthLayout,
|
||||
} from '../../../../../../components/FormLayout';
|
||||
import Popover from '../../../../../../components/Popover';
|
||||
import AnsibleSelect from '../../../../../../components/AnsibleSelect';
|
||||
import InventorySourcesList from './InventorySourcesList';
|
||||
import JobTemplatesList from './JobTemplatesList';
|
||||
@ -44,6 +56,9 @@ function NodeTypeStep({ i18n }) {
|
||||
const [timeoutSecondsField, , timeoutSecondsHelpers] = useField(
|
||||
'timeoutSeconds'
|
||||
);
|
||||
const [convergenceField, , convergenceFieldHelpers] = useField('convergence');
|
||||
|
||||
const [isConvergenceOpen, setIsConvergenceOpen] = useState(false);
|
||||
|
||||
const isValid = !approvalNameMeta.touched || !approvalNameMeta.error;
|
||||
return (
|
||||
@ -101,6 +116,7 @@ function NodeTypeStep({ i18n }) {
|
||||
approvalDescriptionHelpers.setValue('');
|
||||
timeoutMinutesHelpers.setValue(0);
|
||||
timeoutSecondsHelpers.setValue(0);
|
||||
convergenceFieldHelpers.setValue('any');
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
@ -129,61 +145,108 @@ function NodeTypeStep({ i18n }) {
|
||||
onUpdateNodeResource={nodeResourceHelpers.setValue}
|
||||
/>
|
||||
)}
|
||||
{nodeTypeField.value === 'workflow_approval_template' && (
|
||||
<Form css="margin-top: 20px;">
|
||||
<FormFullWidthLayout>
|
||||
<FormField
|
||||
name="approvalName"
|
||||
id="approval-name"
|
||||
isRequired
|
||||
validate={required(null, i18n)}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
label={i18n._(t`Name`)}
|
||||
/>
|
||||
<FormField
|
||||
name="approvalDescription"
|
||||
id="approval-description"
|
||||
label={i18n._(t`Description`)}
|
||||
/>
|
||||
<FormGroup
|
||||
label={i18n._(t`Timeout`)}
|
||||
fieldId="approval-timeout"
|
||||
name="timeout"
|
||||
<Form css="margin-top: 20px;">
|
||||
<FormColumnLayout>
|
||||
{nodeTypeField.value === 'workflow_approval_template' && (
|
||||
<FormFullWidthLayout>
|
||||
<FormField
|
||||
name="approvalName"
|
||||
id="approval-name"
|
||||
isRequired
|
||||
validate={required(null, i18n)}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
label={i18n._(t`Name`)}
|
||||
/>
|
||||
<FormField
|
||||
name="approvalDescription"
|
||||
id="approval-description"
|
||||
label={i18n._(t`Description`)}
|
||||
/>
|
||||
<FormGroup
|
||||
label={i18n._(t`Timeout`)}
|
||||
fieldId="approval-timeout"
|
||||
name="timeout"
|
||||
>
|
||||
<div css="display: flex;align-items: center;">
|
||||
<TimeoutInput
|
||||
{...timeoutMinutesField}
|
||||
aria-label={i18n._(t`Timeout minutes`)}
|
||||
id="approval-timeout-minutes"
|
||||
min="0"
|
||||
onChange={(value, event) => {
|
||||
timeoutMinutesField.onChange(event);
|
||||
}}
|
||||
step="1"
|
||||
type="number"
|
||||
/>
|
||||
<TimeoutLabel>
|
||||
<Trans>min</Trans>
|
||||
</TimeoutLabel>
|
||||
<TimeoutInput
|
||||
{...timeoutSecondsField}
|
||||
aria-label={i18n._(t`Timeout seconds`)}
|
||||
id="approval-timeout-seconds"
|
||||
min="0"
|
||||
onChange={(value, event) => {
|
||||
timeoutSecondsField.onChange(event);
|
||||
}}
|
||||
step="1"
|
||||
type="number"
|
||||
/>
|
||||
<TimeoutLabel>
|
||||
<Trans>sec</Trans>
|
||||
</TimeoutLabel>
|
||||
</div>
|
||||
</FormGroup>
|
||||
</FormFullWidthLayout>
|
||||
)}
|
||||
<FormGroup
|
||||
fieldId="convergence"
|
||||
label={i18n._(t`Convergence`)}
|
||||
isRequired
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={
|
||||
<>
|
||||
{i18n._(
|
||||
t`Preconditions for running this node when there are multiple parents. Refer to the`
|
||||
)}{' '}
|
||||
<a
|
||||
href="https://docs.ansible.com/ansible-tower/latest/html/userguide/workflow_templates.html#convergence-node"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{i18n._(t`documentation`)}
|
||||
</a>{' '}
|
||||
{i18n._(t`for more info.`)}
|
||||
</>
|
||||
}
|
||||
/>
|
||||
}
|
||||
>
|
||||
<Select
|
||||
variant={SelectVariant.single}
|
||||
isOpen={isConvergenceOpen}
|
||||
selections={convergenceField.value}
|
||||
onToggle={setIsConvergenceOpen}
|
||||
onSelect={(event, selection) => {
|
||||
convergenceFieldHelpers.setValue(selection);
|
||||
setIsConvergenceOpen(false);
|
||||
}}
|
||||
aria-label={i18n._(t`Convergence select`)}
|
||||
className="convergenceSelect"
|
||||
ouiaId="convergenceSelect"
|
||||
>
|
||||
<div css="display: flex;align-items: center;">
|
||||
<TimeoutInput
|
||||
{...timeoutMinutesField}
|
||||
aria-label={i18n._(t`Timeout minutes`)}
|
||||
id="approval-timeout-minutes"
|
||||
min="0"
|
||||
onChange={(value, event) => {
|
||||
timeoutMinutesField.onChange(event);
|
||||
}}
|
||||
step="1"
|
||||
type="number"
|
||||
/>
|
||||
<TimeoutLabel>
|
||||
<Trans>min</Trans>
|
||||
</TimeoutLabel>
|
||||
<TimeoutInput
|
||||
{...timeoutSecondsField}
|
||||
aria-label={i18n._(t`Timeout seconds`)}
|
||||
id="approval-timeout-seconds"
|
||||
min="0"
|
||||
onChange={(value, event) => {
|
||||
timeoutSecondsField.onChange(event);
|
||||
}}
|
||||
step="1"
|
||||
type="number"
|
||||
/>
|
||||
<TimeoutLabel>
|
||||
<Trans>sec</Trans>
|
||||
</TimeoutLabel>
|
||||
</div>
|
||||
</FormGroup>
|
||||
</FormFullWidthLayout>
|
||||
</Form>
|
||||
)}
|
||||
<SelectOption key="any" value="any" id="select-option-any">
|
||||
{i18n._(t`Any`)}
|
||||
</SelectOption>
|
||||
<SelectOption key="all" value="all" id="select-option-all">
|
||||
{i18n._(t`All`)}
|
||||
</SelectOption>
|
||||
</Select>
|
||||
</FormGroup>
|
||||
</FormColumnLayout>
|
||||
</Form>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@ -177,6 +177,7 @@ describe('NodeTypeStep', () => {
|
||||
approvalDescription: '',
|
||||
timeoutMinutes: 0,
|
||||
timeoutSeconds: 0,
|
||||
convergence: 'any',
|
||||
}}
|
||||
>
|
||||
<NodeTypeStep />
|
||||
|
||||
@ -86,5 +86,6 @@ function getInitialValues() {
|
||||
timeoutMinutes: 0,
|
||||
timeoutSeconds: 0,
|
||||
nodeType: 'job_template',
|
||||
convergence: 'any',
|
||||
};
|
||||
}
|
||||
|
||||
@ -282,6 +282,7 @@ describe('NodeViewModal', () => {
|
||||
description: '',
|
||||
type: 'workflow_approval_template',
|
||||
timeout: 0,
|
||||
all_parents_must_converge: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@ -39,6 +39,11 @@ const getNodeToEditDefaultValues = (
|
||||
const initialValues = {
|
||||
nodeResource: nodeToEdit?.fullUnifiedJobTemplate || null,
|
||||
nodeType: nodeToEdit?.fullUnifiedJobTemplate?.type || 'job_template',
|
||||
convergence:
|
||||
nodeToEdit?.all_parents_must_converge ||
|
||||
nodeToEdit?.originalNodeObject?.all_parents_must_converge
|
||||
? 'all'
|
||||
: 'any',
|
||||
};
|
||||
|
||||
if (
|
||||
@ -228,7 +233,6 @@ export default function useWorkflowNodeSteps(
|
||||
useEffect(() => {
|
||||
if (launchConfig && surveyConfig && isReady) {
|
||||
let initialValues = {};
|
||||
|
||||
if (
|
||||
nodeToEdit &&
|
||||
nodeToEdit?.fullUnifiedJobTemplate &&
|
||||
@ -264,10 +268,15 @@ export default function useWorkflowNodeSteps(
|
||||
);
|
||||
}
|
||||
|
||||
if (initialValues.convergence === 'all') {
|
||||
formikValues.convergence = 'all';
|
||||
}
|
||||
|
||||
resetForm({
|
||||
errors,
|
||||
values: {
|
||||
...initialValues,
|
||||
convergence: formikValues.convergence,
|
||||
nodeResource: formikValues.nodeResource,
|
||||
nodeType: formikValues.nodeType,
|
||||
linkType: formikValues.linkType,
|
||||
|
||||
@ -369,27 +369,24 @@ function Visualizer({ template, i18n }) {
|
||||
node.fullUnifiedJobTemplate.type === 'workflow_approval_template'
|
||||
) {
|
||||
nodeRequests.push(
|
||||
WorkflowJobTemplatesAPI.createNode(template.id, {}).then(
|
||||
({ data }) => {
|
||||
node.originalNodeObject = data;
|
||||
originalLinkMap[node.id] = {
|
||||
id: data.id,
|
||||
success_nodes: [],
|
||||
failure_nodes: [],
|
||||
always_nodes: [],
|
||||
};
|
||||
approvalTemplateRequests.push(
|
||||
WorkflowJobTemplateNodesAPI.createApprovalTemplate(
|
||||
data.id,
|
||||
{
|
||||
name: node.fullUnifiedJobTemplate.name,
|
||||
description: node.fullUnifiedJobTemplate.description,
|
||||
timeout: node.fullUnifiedJobTemplate.timeout,
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
)
|
||||
WorkflowJobTemplatesAPI.createNode(template.id, {
|
||||
all_parents_must_converge: node.all_parents_must_converge,
|
||||
}).then(({ data }) => {
|
||||
node.originalNodeObject = data;
|
||||
originalLinkMap[node.id] = {
|
||||
id: data.id,
|
||||
success_nodes: [],
|
||||
failure_nodes: [],
|
||||
always_nodes: [],
|
||||
};
|
||||
approvalTemplateRequests.push(
|
||||
WorkflowJobTemplateNodesAPI.createApprovalTemplate(data.id, {
|
||||
name: node.fullUnifiedJobTemplate.name,
|
||||
description: node.fullUnifiedJobTemplate.description,
|
||||
timeout: node.fullUnifiedJobTemplate.timeout,
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
} else {
|
||||
nodeRequests.push(
|
||||
@ -397,6 +394,7 @@ function Visualizer({ template, i18n }) {
|
||||
...node.promptValues,
|
||||
inventory: node.promptValues?.inventory?.id || null,
|
||||
unified_job_template: node.fullUnifiedJobTemplate.id,
|
||||
all_parents_must_converge: node.all_parents_must_converge,
|
||||
}).then(({ data }) => {
|
||||
node.originalNodeObject = data;
|
||||
originalLinkMap[node.id] = {
|
||||
@ -427,27 +425,47 @@ function Visualizer({ template, i18n }) {
|
||||
node.originalNodeObject.summary_fields.unified_job_template
|
||||
.unified_job_type === 'workflow_approval'
|
||||
) {
|
||||
approvalTemplateRequests.push(
|
||||
WorkflowApprovalTemplatesAPI.update(
|
||||
node.originalNodeObject.summary_fields.unified_job_template
|
||||
.id,
|
||||
{
|
||||
name: node.fullUnifiedJobTemplate.name,
|
||||
description: node.fullUnifiedJobTemplate.description,
|
||||
timeout: node.fullUnifiedJobTemplate.timeout,
|
||||
}
|
||||
)
|
||||
);
|
||||
} else {
|
||||
approvalTemplateRequests.push(
|
||||
WorkflowJobTemplateNodesAPI.createApprovalTemplate(
|
||||
nodeRequests.push(
|
||||
WorkflowJobTemplateNodesAPI.replace(
|
||||
node.originalNodeObject.id,
|
||||
{
|
||||
name: node.fullUnifiedJobTemplate.name,
|
||||
description: node.fullUnifiedJobTemplate.description,
|
||||
timeout: node.fullUnifiedJobTemplate.timeout,
|
||||
all_parents_must_converge: node.all_parents_must_converge,
|
||||
}
|
||||
)
|
||||
).then(({ data }) => {
|
||||
node.originalNodeObject = data;
|
||||
approvalTemplateRequests.push(
|
||||
WorkflowApprovalTemplatesAPI.update(
|
||||
node.originalNodeObject.summary_fields
|
||||
.unified_job_template.id,
|
||||
{
|
||||
name: node.fullUnifiedJobTemplate.name,
|
||||
description: node.fullUnifiedJobTemplate.description,
|
||||
timeout: node.fullUnifiedJobTemplate.timeout,
|
||||
}
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
} else {
|
||||
nodeRequests.push(
|
||||
WorkflowJobTemplateNodesAPI.replace(
|
||||
node.originalNodeObject.id,
|
||||
{
|
||||
all_parents_must_converge: node.all_parents_must_converge,
|
||||
}
|
||||
).then(({ data }) => {
|
||||
node.originalNodeObject = data;
|
||||
approvalTemplateRequests.push(
|
||||
WorkflowJobTemplateNodesAPI.createApprovalTemplate(
|
||||
node.originalNodeObject.id,
|
||||
{
|
||||
name: node.fullUnifiedJobTemplate.name,
|
||||
description: node.fullUnifiedJobTemplate.description,
|
||||
timeout: node.fullUnifiedJobTemplate.timeout,
|
||||
}
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@ -456,6 +474,7 @@ function Visualizer({ template, i18n }) {
|
||||
...node.promptValues,
|
||||
inventory: node.promptValues?.inventory?.id || null,
|
||||
unified_job_template: node.fullUnifiedJobTemplate.id,
|
||||
all_parents_must_converge: node.all_parents_must_converge,
|
||||
}).then(() => {
|
||||
const {
|
||||
added: addedCredentials,
|
||||
|
||||
@ -419,6 +419,7 @@ describe('Visualizer', () => {
|
||||
).toBe(1);
|
||||
});
|
||||
|
||||
// TODO: figure out why this test is failing, the scenario passes in the ui
|
||||
test('Error shown when saving fails due to approval template edit error', async () => {
|
||||
workflowReducer.mockImplementation(state => {
|
||||
const newState = {
|
||||
@ -459,6 +460,17 @@ describe('Visualizer', () => {
|
||||
results: [],
|
||||
},
|
||||
});
|
||||
WorkflowJobTemplateNodesAPI.replace.mockResolvedValue({
|
||||
data: {
|
||||
id: 9000,
|
||||
summary_fields: {
|
||||
unified_job_template: {
|
||||
unified_job_type: 'workflow_approval',
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
WorkflowApprovalTemplatesAPI.update.mockRejectedValue(new Error());
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
@ -475,6 +487,7 @@ describe('Visualizer', () => {
|
||||
wrapper.find('Button#visualizer-save').simulate('click');
|
||||
});
|
||||
wrapper.update();
|
||||
expect(WorkflowJobTemplateNodesAPI.replace).toHaveBeenCalledTimes(1);
|
||||
expect(WorkflowApprovalTemplatesAPI.update).toHaveBeenCalledTimes(1);
|
||||
expect(
|
||||
wrapper.find('AlertModal[title="Error saving the workflow!"]').length
|
||||
|
||||
@ -44,6 +44,12 @@ const NodeResourceName = styled.p`
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
`;
|
||||
|
||||
const ConvergenceLabel = styled.p`
|
||||
font-size: 12px;
|
||||
color: #ffffff;
|
||||
`;
|
||||
|
||||
NodeResourceName.displayName = 'NodeResourceName';
|
||||
|
||||
function VisualizerNode({
|
||||
@ -244,6 +250,38 @@ function VisualizerNode({
|
||||
node.id
|
||||
].y - nodePositions[1].y})`}
|
||||
>
|
||||
{(node.all_parents_must_converge ||
|
||||
node?.originalNodeObject?.all_parents_must_converge) && (
|
||||
<>
|
||||
<rect
|
||||
fill={
|
||||
hovering && addingLink && !node.isInvalidLinkTarget
|
||||
? '#007ABC'
|
||||
: '#93969A'
|
||||
}
|
||||
height={wfConstants.nodeH / 4}
|
||||
rx={2}
|
||||
ry={2}
|
||||
x={wfConstants.nodeW / 2 - wfConstants.nodeW / 10}
|
||||
y={-wfConstants.nodeH / 4 + 2}
|
||||
stroke={
|
||||
hovering && addingLink && !node.isInvalidLinkTarget
|
||||
? '#007ABC'
|
||||
: '#93969A'
|
||||
}
|
||||
strokeWidth="2px"
|
||||
width={wfConstants.nodeW / 5}
|
||||
/>
|
||||
<foreignObject
|
||||
height={wfConstants.nodeH / 4}
|
||||
width={wfConstants.nodeW / 5}
|
||||
x={wfConstants.nodeW / 2 - wfConstants.nodeW / 10 + 7}
|
||||
y={-wfConstants.nodeH / 4 - 1}
|
||||
>
|
||||
<ConvergenceLabel>{i18n._(t`ALL`)}</ConvergenceLabel>
|
||||
</foreignObject>
|
||||
</>
|
||||
)}
|
||||
<rect
|
||||
fill="#FFFFFF"
|
||||
height={wfConstants.nodeH}
|
||||
|
||||
@ -72,6 +72,8 @@ Notable releases of the `awx.awx` collection:
|
||||
The following notes are changes that may require changes to playbooks:
|
||||
|
||||
- When a project is created, it will wait for the update/sync to finish by default; this can be turned off with the `wait` parameter, if desired.
|
||||
- When using the wait parameter with project update, if the project did not undergo a revision update, the result will be
|
||||
'not changed'
|
||||
- Creating a "scan" type job template is no longer supported.
|
||||
- Specifying a custom certificate via the `TOWER_CERTIFICATE` environment variable no longer works.
|
||||
- Type changes of variable fields:
|
||||
|
||||
@ -266,8 +266,8 @@ class TowerAPIModule(TowerModule):
|
||||
collection_compare_ver = parsed_collection_version[0]
|
||||
tower_compare_ver = parsed_tower_version[0]
|
||||
else:
|
||||
collection_compare_ver = "{}.{}".format(parsed_collection_version[0], parsed_collection_version[1])
|
||||
tower_compare_ver = '{}.{}'.format(parsed_tower_version[0], parsed_tower_version[1])
|
||||
collection_compare_ver = "{0}.{1}".format(parsed_collection_version[0], parsed_collection_version[1])
|
||||
tower_compare_ver = '{0}.{1}'.format(parsed_tower_version[0], parsed_tower_version[1])
|
||||
|
||||
if self._COLLECTION_TYPE not in self.collection_to_version or self.collection_to_version[self._COLLECTION_TYPE] != tower_type:
|
||||
self.warn("You are using the {0} version of this collection but connecting to {1}".format(self._COLLECTION_TYPE, tower_type))
|
||||
|
||||
@ -34,6 +34,7 @@ options:
|
||||
wait:
|
||||
description:
|
||||
- Wait for the project to update.
|
||||
- If scm revision has not changed module will return not changed.
|
||||
default: True
|
||||
type: bool
|
||||
interval:
|
||||
@ -109,6 +110,9 @@ def main():
|
||||
if project is None:
|
||||
module.fail_json(msg="Unable to find project")
|
||||
|
||||
if wait:
|
||||
scm_revision_original = project['scm_revision']
|
||||
|
||||
# Update the project
|
||||
result = module.post_endpoint(project['related']['update'])
|
||||
|
||||
@ -126,7 +130,12 @@ def main():
|
||||
start = time.time()
|
||||
|
||||
# Invoke wait function
|
||||
module.wait_on_url(url=result['json']['url'], object_name=module.get_item_name(project), object_type='Project Update', timeout=timeout, interval=interval)
|
||||
result = module.wait_on_url(
|
||||
url=result['json']['url'], object_name=module.get_item_name(project), object_type='Project Update', timeout=timeout, interval=interval
|
||||
)
|
||||
scm_revision_new = result['json']['scm_revision']
|
||||
if scm_revision_new == scm_revision_original:
|
||||
module.json_output['changed'] = False
|
||||
|
||||
module.exit_json(**module.json_output)
|
||||
|
||||
|
||||
@ -16,7 +16,7 @@ from requests.models import Response, PreparedRequest
|
||||
import pytest
|
||||
|
||||
from awx.main.tests.functional.conftest import _request
|
||||
from awx.main.models import Organization, Project, Inventory, JobTemplate, Credential, CredentialType
|
||||
from awx.main.models import Organization, Project, Inventory, JobTemplate, Credential, CredentialType, ExecutionEnvironment
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
@ -261,3 +261,8 @@ def silence_warning():
|
||||
"""Warnings use global variable, same as deprecations."""
|
||||
with mock.patch('ansible.module_utils.basic.AnsibleModule.warn') as this_mock:
|
||||
yield this_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def execution_environment():
|
||||
return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", managed_by_tower=True)
|
||||
|
||||
@ -157,7 +157,7 @@ def determine_state(module_id, endpoint, module, parameter, api_option, module_o
|
||||
return 'OK'
|
||||
|
||||
|
||||
def test_completeness(collection_import, request, admin_user, job_template):
|
||||
def test_completeness(collection_import, request, admin_user, job_template, execution_environment):
|
||||
option_comparison = {}
|
||||
# Load a list of existing module files from disk
|
||||
base_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
|
||||
|
||||
@ -59,7 +59,7 @@ def test_version_warning(collection_import, silence_warning):
|
||||
my_module._COLLECTION_TYPE = "awx"
|
||||
my_module.get_endpoint('ping')
|
||||
silence_warning.assert_called_once_with(
|
||||
'You are running collection version {} but connecting to {} version {}'.format(my_module._COLLECTION_VERSION, awx_name, ping_version)
|
||||
'You are running collection version {0} but connecting to {1} version {2}'.format(my_module._COLLECTION_VERSION, awx_name, ping_version)
|
||||
)
|
||||
|
||||
|
||||
@ -107,7 +107,7 @@ def test_version_warning_strictness_tower(collection_import, silence_warning):
|
||||
my_module._COLLECTION_TYPE = "tower"
|
||||
my_module.get_endpoint('ping')
|
||||
silence_warning.assert_called_once_with(
|
||||
'You are running collection version {} but connecting to {} version {}'.format(my_module._COLLECTION_VERSION, tower_name, ping_version)
|
||||
'You are running collection version {0} but connecting to {1} version {2}'.format(my_module._COLLECTION_VERSION, tower_name, ping_version)
|
||||
)
|
||||
|
||||
|
||||
@ -121,7 +121,9 @@ def test_type_warning(collection_import, silence_warning):
|
||||
my_module._COLLECTION_VERSION = ping_version
|
||||
my_module._COLLECTION_TYPE = "tower"
|
||||
my_module.get_endpoint('ping')
|
||||
silence_warning.assert_called_once_with('You are using the {} version of this collection but connecting to {}'.format(my_module._COLLECTION_TYPE, awx_name))
|
||||
silence_warning.assert_called_once_with(
|
||||
'You are using the {0} version of this collection but connecting to {1}'.format(my_module._COLLECTION_TYPE, awx_name)
|
||||
)
|
||||
|
||||
|
||||
def test_duplicate_config(collection_import, silence_warning):
|
||||
|
||||
@ -53,6 +53,7 @@
|
||||
- assert:
|
||||
that:
|
||||
- result is successful
|
||||
- result is not changed
|
||||
|
||||
- name: Delete the test project 1
|
||||
tower_project:
|
||||
|
||||
@ -139,7 +139,7 @@ class UnifiedJob(HasStatus, base.Base):
|
||||
"""
|
||||
self.get()
|
||||
job_args = self.job_args
|
||||
expected_prefix = '/tmp/awx_{}'.format(self.id)
|
||||
expected_prefix = '/tmp/pdd_wrapper_{}'.format(self.id)
|
||||
for arg1, arg2 in zip(job_args[:-1], job_args[1:]):
|
||||
if arg1 == '-v':
|
||||
if ':' in arg2:
|
||||
|
||||
176
docs/licenses/Cython.txt
Normal file
176
docs/licenses/Cython.txt
Normal file
@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
22
docs/licenses/wheel.txt
Normal file
22
docs/licenses/wheel.txt
Normal file
@ -0,0 +1,22 @@
|
||||
"wheel" copyright (c) 2012-2014 Daniel Holth <dholth@fastmail.fm> and
|
||||
contributors.
|
||||
|
||||
The MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
@ -1,5 +1,5 @@
|
||||
aiohttp
|
||||
ansible-runner>=1.4.7
|
||||
ansible-runner==2.0.0a1
|
||||
ansiconv==1.0.0 # UPGRADE BLOCKER: from 2013, consider replacing instead of upgrading
|
||||
asciichartpy
|
||||
autobahn>=20.12.3 # CVE-2020-35678
|
||||
@ -7,6 +7,7 @@ azure-keyvault==1.1.0 # see UPGRADE BLOCKERs
|
||||
channels
|
||||
channels-redis>=3.1.0 # https://github.com/django/channels_redis/issues/212
|
||||
cryptography<3.0.0
|
||||
Cython<3 # Since the bump to PyYAML 5.4.1 this is now a mandatory dep
|
||||
daphne
|
||||
distro
|
||||
django==2.2.16 # see UPGRADE BLOCKERs
|
||||
@ -37,11 +38,13 @@ openshift>=0.11.0 # minimum version to pull in new pyyaml for CVE-2017-18342
|
||||
pexpect==4.7.0 # see library notes
|
||||
prometheus_client
|
||||
psycopg2
|
||||
psutil
|
||||
pygerduty
|
||||
pyparsing
|
||||
python3-saml
|
||||
python-ldap>=3.3.1 # https://github.com/python-ldap/python-ldap/issues/270
|
||||
pyyaml>=5.4.1 # minimum to fix https://github.com/yaml/pyyaml/issues/478
|
||||
receptorctl
|
||||
schedule==0.6.0
|
||||
social-auth-core==3.3.1 # see UPGRADE BLOCKERs
|
||||
social-auth-app-django==3.1.0 # see UPGRADE BLOCKERs
|
||||
@ -53,5 +56,6 @@ twilio
|
||||
twisted[tls]>=20.3.0 # CVE-2020-10108, CVE-2020-10109
|
||||
uWSGI
|
||||
uwsgitop
|
||||
wheel
|
||||
pip==19.3.1 # see UPGRADE BLOCKERs
|
||||
setuptools==41.6.0 # see UPGRADE BLOCKERs
|
||||
|
||||
@ -4,10 +4,8 @@ aiohttp==3.6.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
aioredis==1.3.1
|
||||
# via channels-redis
|
||||
#ansible-runner==1.4.7
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
ansible-runner==2.0.0a1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
ansiconv==1.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
asciichartpy==1.5.25
|
||||
@ -72,6 +70,9 @@ cryptography==2.9.2
|
||||
# pyopenssl
|
||||
# service-identity
|
||||
# social-auth-core
|
||||
Cython==0.29.22
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
daphne==2.4.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
@ -231,8 +232,8 @@ pkgconfig==1.5.1
|
||||
# via xmlsec
|
||||
prometheus-client==0.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psutil==5.7.0
|
||||
# via ansible-runner
|
||||
psutil==5.8.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psycopg2==2.8.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
ptyprocess==0.6.0
|
||||
@ -297,7 +298,8 @@ pyyaml==5.4.1
|
||||
# djangorestframework-yaml
|
||||
# kubernetes
|
||||
# receptorctl
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
receptorctl==0.9.7
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
redis==3.4.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
@ -412,5 +414,6 @@ setuptools==41.6.0
|
||||
# kubernetes
|
||||
# markdown
|
||||
# python-daemon
|
||||
# receptorctl
|
||||
# zope.interface
|
||||
wheel==0.36.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
django-debug-toolbar==1.11
|
||||
django-rest-swagger
|
||||
pprofile
|
||||
ipython==7.21.0
|
||||
unittest2
|
||||
black
|
||||
pytest
|
||||
|
||||
@ -1,3 +1 @@
|
||||
git+https://github.com/ansible/system-certifi.git@devel#egg=certifi
|
||||
git+git://github.com/ansible/ansible-runner@devel#egg=ansible-runner
|
||||
git+https://github.com/project-receptor/receptor.git@0.9.6#egg=receptorctl&subdirectory=receptorctl
|
||||
|
||||
2
setup.py
2
setup.py
@ -141,7 +141,7 @@ setup(
|
||||
# ("%s" % webconfig, ["config/uwsgi_params"]),
|
||||
("%s" % sharedir, ["tools/scripts/request_tower_configuration.sh","tools/scripts/request_tower_configuration.ps1"]),
|
||||
("%s" % docdir, ["docs/licenses/*",]),
|
||||
("%s" % bindir, ["tools/scripts/ansible-tower-service",
|
||||
("%s" % bindir, ["tools/scripts/automation-controller-service",
|
||||
"tools/scripts/failure-event-handler",
|
||||
"tools/scripts/awx-python",
|
||||
"tools/scripts/ansible-tower-setup"]),
|
||||
|
||||
@ -119,7 +119,7 @@ RUN curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master
|
||||
RUN curl -L -o /usr/bin/tini https://github.com/krallin/tini/releases/download/v0.19.0/tini-{{ tini_architecture | default('amd64') }} && \
|
||||
chmod +x /usr/bin/tini
|
||||
|
||||
RUN python3.8 -m ensurepip && pip3 install "virtualenv < 20" supervisor {% if build_dev|bool %}black{% endif %}
|
||||
RUN python3.8 -m ensurepip && pip3 install "virtualenv < 20" supervisor
|
||||
|
||||
RUN rm -rf /root/.cache && rm -rf /tmp/*
|
||||
|
||||
@ -153,6 +153,8 @@ RUN dnf -y install \
|
||||
unzip && \
|
||||
npm install -g n && n 14.15.1 && dnf remove -y nodejs
|
||||
|
||||
RUN pip3 install black git+https://github.com/coderanger/supervisor-stdout
|
||||
|
||||
# This package randomly fails to download.
|
||||
# It is nice to have in the dev env, but not necessary.
|
||||
# Add it back to the list above if the repo ever straighten up.
|
||||
@ -170,7 +172,7 @@ COPY --from=builder /var/lib/awx /var/lib/awx
|
||||
RUN ln -s /var/lib/awx/venv/awx/bin/awx-manage /usr/bin/awx-manage
|
||||
|
||||
{%if build_dev|bool %}
|
||||
COPY --from=quay.io/project-receptor/receptor:0.9.6 /usr/bin/receptor /usr/bin/receptor
|
||||
COPY --from=quay.io/project-receptor/receptor:0.9.7 /usr/bin/receptor /usr/bin/receptor
|
||||
RUN openssl req -nodes -newkey rsa:2048 -keyout /etc/nginx/nginx.key -out /etc/nginx/nginx.csr \
|
||||
-subj "/C=US/ST=North Carolina/L=Durham/O=Ansible/OU=AWX Development/CN=awx.localhost" && \
|
||||
openssl x509 -req -days 365 -in /etc/nginx/nginx.csr -signkey /etc/nginx/nginx.key -out /etc/nginx/nginx.crt && \
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
version: '2'
|
||||
version: '2.1'
|
||||
services:
|
||||
{% for i in range(cluster_node_count|int) %}
|
||||
{% set container_postfix = loop.index %}
|
||||
|
||||
@ -12,8 +12,9 @@ stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
|
||||
[program:awx-receiver]
|
||||
command = make receiver
|
||||
@ -24,8 +25,8 @@ stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-wsbroadcast]
|
||||
command = make wsbroadcast
|
||||
@ -36,8 +37,8 @@ stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-uwsgi]
|
||||
command = make uwsgi
|
||||
@ -48,8 +49,8 @@ stopwaitsecs = 1
|
||||
stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-daphne]
|
||||
command = make daphne
|
||||
@ -60,16 +61,16 @@ stopwaitsecs = 1
|
||||
stopsignal=KILL
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-nginx]
|
||||
command = make nginx
|
||||
autostart = true
|
||||
autorestart = true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-rsyslogd]
|
||||
command = rsyslogd -n -i /var/run/awx-rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf
|
||||
@ -80,8 +81,8 @@ stopsignal=TERM
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-receptor]
|
||||
command = receptor --config /etc/receptor/receptor.conf
|
||||
@ -91,8 +92,8 @@ stopsignal = KILL
|
||||
stopasgroup = true
|
||||
killasgroup = true
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[group:tower-processes]
|
||||
programs=awx-dispatcher,awx-receiver,awx-uwsgi,awx-daphne,awx-nginx,awx-wsbroadcast,awx-rsyslogd
|
||||
@ -106,3 +107,9 @@ serverurl=unix:///var/run/supervisor/supervisor.sock ; use a unix:// URL for a
|
||||
|
||||
[rpcinterface:supervisor]
|
||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
||||
|
||||
[eventlistener:stdout]
|
||||
command = supervisor_stdout
|
||||
buffer_size = 100
|
||||
events = PROCESS_LOG
|
||||
result_handler = supervisor_stdout:event_handler
|
||||
|
||||
@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -f /etc/sysconfig/ansible-tower ]; then
|
||||
source /etc/sysconfig/ansible-tower
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
start|stop|restart)
|
||||
exec systemctl $1 ansible-tower.service
|
||||
;;
|
||||
status)
|
||||
exec systemctl status ansible-tower.service $TOWER_SERVICES
|
||||
;;
|
||||
*)
|
||||
echo "Usage: ansible-tower-service start|stop|restart|status"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
18
tools/scripts/automation-controller-service
Executable file
18
tools/scripts/automation-controller-service
Executable file
@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -f /etc/sysconfig/automation-controller ]; then
|
||||
source /etc/sysconfig/automation-controller
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
start|stop|restart)
|
||||
exec systemctl $1 automation-controller.service
|
||||
;;
|
||||
status)
|
||||
exec systemctl status automation-controller.service $TOWER_SERVICES
|
||||
;;
|
||||
*)
|
||||
echo "Usage: automation-controller-service start|stop|restart|status"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
Loading…
x
Reference in New Issue
Block a user