mirror of
https://github.com/ansible/awx.git
synced 2026-02-08 21:14:47 -03:30
Compare commits
46 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5dd0eab806 | ||
|
|
41f3f381ec | ||
|
|
ac8cff75ce | ||
|
|
94b34b801c | ||
|
|
8f6849fc22 | ||
|
|
821b1701bf | ||
|
|
b7f2825909 | ||
|
|
e87e041a2a | ||
|
|
cc336e791c | ||
|
|
c2a3c3b285 | ||
|
|
7b8dcc98e7 | ||
|
|
d5011492bf | ||
|
|
e363ddf470 | ||
|
|
987709cdb3 | ||
|
|
f04ac3c798 | ||
|
|
71a6baccdb | ||
|
|
d07076b686 | ||
|
|
7129f3e8cd | ||
|
|
df61a5cea1 | ||
|
|
a4b950f79b | ||
|
|
8be739d255 | ||
|
|
ca54195099 | ||
|
|
f0fcfdde39 | ||
|
|
80b1ba4a35 | ||
|
|
51f8e362dc | ||
|
|
737d6d8c8b | ||
|
|
beaf6b6058 | ||
|
|
aad1fbcef8 | ||
|
|
0b96d617ac | ||
|
|
fe768a159b | ||
|
|
c1ebea858b | ||
|
|
7b2938f515 | ||
|
|
e524d3df3e | ||
|
|
cec2d2dfb9 | ||
|
|
15b7ad3570 | ||
|
|
36ff9cbc6d | ||
|
|
ed74d80ecb | ||
|
|
4a7f4d0ed4 | ||
|
|
6e08c3567f | ||
|
|
58734a33c4 | ||
|
|
2832f28014 | ||
|
|
f9bb26ad33 | ||
|
|
878035c13b | ||
|
|
2cc971a43f | ||
|
|
9d77c54612 | ||
|
|
ef651a3a21 |
10
.github/triage_replies.md
vendored
10
.github/triage_replies.md
vendored
@@ -53,6 +53,16 @@ https://github.com/ansible/awx/#get-involved \
|
||||
Thank you once again for this and your interest in AWX!
|
||||
|
||||
|
||||
### Red Hat Support Team
|
||||
- Hi! \
|
||||
\
|
||||
It appears that you are using an RPM build for RHEL. Please reach out to the Red Hat support team and submit a ticket. \
|
||||
\
|
||||
Here is the link to do so: \
|
||||
\
|
||||
https://access.redhat.com/support \
|
||||
\
|
||||
Thank you for your submission and for supporting AWX!
|
||||
|
||||
|
||||
## Common
|
||||
|
||||
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@@ -28,6 +28,9 @@ jobs:
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
label: Run Collection Tests
|
||||
- name: awx-collection-sanity
|
||||
command: /start_tests.sh test_collection_sanity
|
||||
label: Run Ansible core Collection Sanity tests
|
||||
- name: api-schema
|
||||
label: Check API Schema
|
||||
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||
|
||||
16
Makefile
16
Makefile
@@ -6,7 +6,9 @@ CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
MANAGEMENT_COMMAND ?= awx-manage
|
||||
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||
COLLECTION_VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||
|
||||
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
@@ -300,7 +302,8 @@ test_collection:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi && \
|
||||
pip install ansible-core && \
|
||||
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
py.test $(COLLECTION_TEST_DIRS) -v
|
||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||
# First we will use anything expility set as PYTHONPATH
|
||||
@@ -330,8 +333,13 @@ install_collection: build_collection
|
||||
rm -rf $(COLLECTION_INSTALL)
|
||||
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
||||
|
||||
test_collection_sanity: install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity
|
||||
test_collection_sanity:
|
||||
rm -rf awx_collection_build/
|
||||
rm -rf $(COLLECTION_INSTALL)
|
||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
COLLECTION_VERSION=1.0.0 make install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity --exclude=plugins/modules/export.py
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
||||
|
||||
@@ -344,6 +344,13 @@ class InstanceDetail(RetrieveUpdateAPIView):
|
||||
model = models.Instance
|
||||
serializer_class = serializers.InstanceSerializer
|
||||
|
||||
def update_raw_data(self, data):
|
||||
# these fields are only valid on creation of an instance, so they unwanted on detail view
|
||||
data.pop('listener_port', None)
|
||||
data.pop('node_type', None)
|
||||
data.pop('hostname', None)
|
||||
return super(InstanceDetail, self).update_raw_data(data)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
r = super(InstanceDetail, self).update(request, *args, **kwargs)
|
||||
if status.is_success(r.status_code):
|
||||
|
||||
@@ -16,7 +16,7 @@ from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.utils import get_object_or_400
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task
|
||||
from awx.main.models.organization import Team
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.inventory import Inventory
|
||||
@@ -107,6 +107,11 @@ class InstanceGroupMembershipMixin(object):
|
||||
if inst_name in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||
ig_obj.save(update_fields=['policy_instance_list'])
|
||||
|
||||
# sometimes removing an instance has a non-obvious consequence
|
||||
# this is almost always true if policy_instance_percentage or _minimum is non-zero
|
||||
# after removing a single instance, the other memberships need to be re-balanced
|
||||
schedule_policy_task()
|
||||
return response
|
||||
|
||||
|
||||
|
||||
@@ -2697,46 +2697,66 @@ class ActivityStreamAccess(BaseAccess):
|
||||
# 'job_template', 'job', 'project', 'project_update', 'workflow_job',
|
||||
# 'inventory_source', 'workflow_job_template'
|
||||
|
||||
inventory_set = Inventory.accessible_objects(self.user, 'read_role')
|
||||
credential_set = Credential.accessible_objects(self.user, 'read_role')
|
||||
q = Q(user=self.user)
|
||||
inventory_set = Inventory.accessible_pk_qs(self.user, 'read_role')
|
||||
if inventory_set:
|
||||
q |= (
|
||||
Q(ad_hoc_command__inventory__in=inventory_set)
|
||||
| Q(inventory__in=inventory_set)
|
||||
| Q(host__inventory__in=inventory_set)
|
||||
| Q(group__inventory__in=inventory_set)
|
||||
| Q(inventory_source__inventory__in=inventory_set)
|
||||
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
||||
)
|
||||
|
||||
credential_set = Credential.accessible_pk_qs(self.user, 'read_role')
|
||||
if credential_set:
|
||||
q |= Q(credential__in=credential_set)
|
||||
|
||||
auditing_orgs = (
|
||||
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
|
||||
.distinct()
|
||||
.values_list('id', flat=True)
|
||||
)
|
||||
project_set = Project.accessible_objects(self.user, 'read_role')
|
||||
jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
|
||||
team_set = Team.accessible_objects(self.user, 'read_role')
|
||||
wfjt_set = WorkflowJobTemplate.accessible_objects(self.user, 'read_role')
|
||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||
if auditing_orgs:
|
||||
q |= (
|
||||
Q(user__in=auditing_orgs.values('member_role__members'))
|
||||
| Q(organization__in=auditing_orgs)
|
||||
| Q(notification_template__organization__in=auditing_orgs)
|
||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||
| Q(label__organization__in=auditing_orgs)
|
||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
||||
)
|
||||
|
||||
return qs.filter(
|
||||
Q(ad_hoc_command__inventory__in=inventory_set)
|
||||
| Q(o_auth2_application__in=app_set)
|
||||
| Q(o_auth2_access_token__in=token_set)
|
||||
| Q(user__in=auditing_orgs.values('member_role__members'))
|
||||
| Q(user=self.user)
|
||||
| Q(organization__in=auditing_orgs)
|
||||
| Q(inventory__in=inventory_set)
|
||||
| Q(host__inventory__in=inventory_set)
|
||||
| Q(group__inventory__in=inventory_set)
|
||||
| Q(inventory_source__inventory__in=inventory_set)
|
||||
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
|
||||
| Q(credential__in=credential_set)
|
||||
| Q(team__in=team_set)
|
||||
| Q(project__in=project_set)
|
||||
| Q(project_update__project__in=project_set)
|
||||
| Q(job_template__in=jt_set)
|
||||
| Q(job__job_template__in=jt_set)
|
||||
| Q(workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
||||
| Q(notification_template__organization__in=auditing_orgs)
|
||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||
| Q(label__organization__in=auditing_orgs)
|
||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
||||
).distinct()
|
||||
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||
if project_set:
|
||||
q |= Q(project__in=project_set) | Q(project_update__project__in=project_set)
|
||||
|
||||
jt_set = JobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||
if jt_set:
|
||||
q |= Q(job_template__in=jt_set) | Q(job__job_template__in=jt_set)
|
||||
|
||||
wfjt_set = WorkflowJobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||
if wfjt_set:
|
||||
q |= (
|
||||
Q(workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
|
||||
| Q(workflow_job__workflow_job_template__in=wfjt_set)
|
||||
)
|
||||
|
||||
team_set = Team.accessible_pk_qs(self.user, 'read_role')
|
||||
if team_set:
|
||||
q |= Q(team__in=team_set)
|
||||
|
||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||
if app_set:
|
||||
q |= Q(o_auth2_application__in=app_set)
|
||||
|
||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||
if token_set:
|
||||
q |= Q(o_auth2_access_token__in=token_set)
|
||||
|
||||
return qs.filter(q).distinct()
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
|
||||
@@ -9,10 +9,16 @@ aim_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('CyberArk AIM URL'),
|
||||
'label': _('CyberArk CCP URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'webservice_id',
|
||||
'label': _('Web Service ID'),
|
||||
'type': 'string',
|
||||
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
|
||||
},
|
||||
{
|
||||
'id': 'app_id',
|
||||
'label': _('Application ID'),
|
||||
@@ -64,10 +70,13 @@ def aim_backend(**kwargs):
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
webservice_id = kwargs['webservice_id']
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
reason = kwargs.get('reason', None)
|
||||
if webservice_id == '':
|
||||
webservice_id = 'AIMWebService'
|
||||
|
||||
query_params = {
|
||||
'AppId': app_id,
|
||||
@@ -78,7 +87,7 @@ def aim_backend(**kwargs):
|
||||
query_params['reason'] = reason
|
||||
|
||||
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||
request_url = urljoin(url, '/'.join(['AIMWebService', 'api', 'Accounts']))
|
||||
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
|
||||
|
||||
with CertFiles(client_cert, client_key) as cert:
|
||||
res = requests.get(
|
||||
@@ -92,4 +101,4 @@ def aim_backend(**kwargs):
|
||||
return res.json()['Content']
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
|
||||
@@ -32,8 +32,14 @@ class Command(BaseCommand):
|
||||
def handle(self, **options):
|
||||
self.old_key = settings.SECRET_KEY
|
||||
custom_key = os.environ.get("TOWER_SECRET_KEY")
|
||||
if options.get("use_custom_key") and custom_key:
|
||||
self.new_key = custom_key
|
||||
if options.get("use_custom_key"):
|
||||
if custom_key:
|
||||
self.new_key = custom_key
|
||||
else:
|
||||
print("Use custom key was specified but the env var TOWER_SECRET_KEY was not available")
|
||||
import sys
|
||||
|
||||
sys.exit(1)
|
||||
else:
|
||||
self.new_key = base64.encodebytes(os.urandom(33)).decode().rstrip()
|
||||
self._notification_templates()
|
||||
|
||||
@@ -158,7 +158,11 @@ class InstanceManager(models.Manager):
|
||||
return (False, instance)
|
||||
|
||||
# Create new instance, and fill in default values
|
||||
create_defaults = {'node_state': Instance.States.INSTALLED, 'capacity': 0}
|
||||
create_defaults = {
|
||||
'node_state': Instance.States.INSTALLED,
|
||||
'capacity': 0,
|
||||
'listener_port': 27199,
|
||||
}
|
||||
if defaults is not None:
|
||||
create_defaults.update(defaults)
|
||||
uuid_option = {}
|
||||
|
||||
18
awx/main/migrations/0174_ensure_org_ee_admin_roles.py
Normal file
18
awx/main/migrations/0174_ensure_org_ee_admin_roles.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.16 on 2022-12-07 21:11
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0173_instancegroup_max_limits'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(rbac.create_roles),
|
||||
]
|
||||
@@ -15,6 +15,7 @@ def aws(cred, env, private_data_dir):
|
||||
|
||||
if cred.has_input('security_token'):
|
||||
env['AWS_SECURITY_TOKEN'] = cred.get_input('security_token', default='')
|
||||
env['AWS_SESSION_TOKEN'] = env['AWS_SECURITY_TOKEN']
|
||||
|
||||
|
||||
def gce(cred, env, private_data_dir):
|
||||
|
||||
@@ -507,7 +507,7 @@ class TaskManager(TaskBase):
|
||||
return None
|
||||
|
||||
@timeit
|
||||
def start_task(self, task, instance_group, dependent_tasks=None, instance=None):
|
||||
def start_task(self, task, instance_group, instance=None):
|
||||
# Just like for process_running_tasks, add the job to the dependency graph and
|
||||
# ask the TaskManagerInstanceGroups object to update consumed capacity on all
|
||||
# implicated instances and container groups.
|
||||
@@ -524,14 +524,6 @@ class TaskManager(TaskBase):
|
||||
ScheduleTaskManager().schedule()
|
||||
from awx.main.tasks.system import handle_work_error, handle_work_success
|
||||
|
||||
dependent_tasks = dependent_tasks or []
|
||||
|
||||
task_actual = {
|
||||
'type': get_type_for_model(type(task)),
|
||||
'id': task.id,
|
||||
}
|
||||
dependencies = [{'type': get_type_for_model(type(t)), 'id': t.id} for t in dependent_tasks]
|
||||
|
||||
task.status = 'waiting'
|
||||
|
||||
(start_status, opts) = task.pre_start()
|
||||
@@ -563,6 +555,7 @@ class TaskManager(TaskBase):
|
||||
# apply_async does a NOTIFY to the channel dispatcher is listening to
|
||||
# postgres will treat this as part of the transaction, which is what we want
|
||||
if task.status != 'failed' and type(task) is not WorkflowJob:
|
||||
task_actual = {'type': get_type_for_model(type(task)), 'id': task.id}
|
||||
task_cls = task._get_task_class()
|
||||
task_cls.apply_async(
|
||||
[task.pk],
|
||||
@@ -570,7 +563,7 @@ class TaskManager(TaskBase):
|
||||
queue=task.get_queue_name(),
|
||||
uuid=task.celery_task_id,
|
||||
callbacks=[{'task': handle_work_success.name, 'kwargs': {'task_actual': task_actual}}],
|
||||
errbacks=[{'task': handle_work_error.name, 'args': [task.celery_task_id], 'kwargs': {'subtasks': [task_actual] + dependencies}}],
|
||||
errbacks=[{'task': handle_work_error.name, 'kwargs': {'task_actual': task_actual}}],
|
||||
)
|
||||
|
||||
# In exception cases, like a job failing pre-start checks, we send the websocket status message
|
||||
@@ -609,7 +602,7 @@ class TaskManager(TaskBase):
|
||||
if isinstance(task, WorkflowJob):
|
||||
# Previously we were tracking allow_simultaneous blocking both here and in DependencyGraph.
|
||||
# Double check that using just the DependencyGraph works for Workflows and Sliced Jobs.
|
||||
self.start_task(task, None, task.get_jobs_fail_chain(), None)
|
||||
self.start_task(task, None, None)
|
||||
continue
|
||||
|
||||
found_acceptable_queue = False
|
||||
@@ -637,7 +630,7 @@ class TaskManager(TaskBase):
|
||||
execution_instance = self.tm_models.instances[control_instance.hostname].obj
|
||||
task.log_lifecycle("controller_node_chosen")
|
||||
task.log_lifecycle("execution_node_chosen")
|
||||
self.start_task(task, self.controlplane_ig, task.get_jobs_fail_chain(), execution_instance)
|
||||
self.start_task(task, self.controlplane_ig, execution_instance)
|
||||
found_acceptable_queue = True
|
||||
continue
|
||||
|
||||
@@ -645,7 +638,7 @@ class TaskManager(TaskBase):
|
||||
if not self.tm_models.instance_groups[instance_group.name].has_remaining_capacity(task):
|
||||
continue
|
||||
if instance_group.is_container_group:
|
||||
self.start_task(task, instance_group, task.get_jobs_fail_chain(), None)
|
||||
self.start_task(task, instance_group, None)
|
||||
found_acceptable_queue = True
|
||||
break
|
||||
|
||||
@@ -670,7 +663,7 @@ class TaskManager(TaskBase):
|
||||
)
|
||||
)
|
||||
execution_instance = self.tm_models.instances[execution_instance.hostname].obj
|
||||
self.start_task(task, instance_group, task.get_jobs_fail_chain(), execution_instance)
|
||||
self.start_task(task, instance_group, execution_instance)
|
||||
found_acceptable_queue = True
|
||||
break
|
||||
else:
|
||||
|
||||
@@ -52,6 +52,7 @@ from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_local_queuename, reaper
|
||||
from awx.main.utils.common import (
|
||||
get_type_for_model,
|
||||
ignore_inventory_computed_fields,
|
||||
ignore_inventory_group_removal,
|
||||
ScheduleWorkflowManager,
|
||||
@@ -720,45 +721,43 @@ def handle_work_success(task_actual):
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def handle_work_error(task_id, *args, **kwargs):
|
||||
subtasks = kwargs.get('subtasks', None)
|
||||
logger.debug('Executing error task id %s, subtasks: %s' % (task_id, str(subtasks)))
|
||||
first_instance = None
|
||||
first_instance_type = ''
|
||||
if subtasks is not None:
|
||||
for each_task in subtasks:
|
||||
try:
|
||||
instance = UnifiedJob.get_instance_by_type(each_task['type'], each_task['id'])
|
||||
if not instance:
|
||||
# Unknown task type
|
||||
logger.warning("Unknown task type: {}".format(each_task['type']))
|
||||
continue
|
||||
except ObjectDoesNotExist:
|
||||
logger.warning('Missing {} `{}` in error callback.'.format(each_task['type'], each_task['id']))
|
||||
continue
|
||||
def handle_work_error(task_actual):
|
||||
try:
|
||||
instance = UnifiedJob.get_instance_by_type(task_actual['type'], task_actual['id'])
|
||||
except ObjectDoesNotExist:
|
||||
logger.warning('Missing {} `{}` in error callback.'.format(task_actual['type'], task_actual['id']))
|
||||
return
|
||||
if not instance:
|
||||
return
|
||||
|
||||
if first_instance is None:
|
||||
first_instance = instance
|
||||
first_instance_type = each_task['type']
|
||||
subtasks = instance.get_jobs_fail_chain() # reverse of dependent_jobs mostly
|
||||
logger.debug(f'Executing error task id {task_actual["id"]}, subtasks: {[subtask.id for subtask in subtasks]}')
|
||||
|
||||
if instance.celery_task_id != task_id and not instance.cancel_flag and not instance.status in ('successful', 'failed'):
|
||||
instance.status = 'failed'
|
||||
instance.failed = True
|
||||
if not instance.job_explanation:
|
||||
instance.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % (
|
||||
first_instance_type,
|
||||
first_instance.name,
|
||||
first_instance.id,
|
||||
)
|
||||
instance.save()
|
||||
instance.websocket_emit_status("failed")
|
||||
deps_of_deps = {}
|
||||
|
||||
for subtask in subtasks:
|
||||
if subtask.celery_task_id != instance.celery_task_id and not subtask.cancel_flag and not subtask.status in ('successful', 'failed'):
|
||||
# If there are multiple in the dependency chain, A->B->C, and this was called for A, blame B for clarity
|
||||
blame_job = deps_of_deps.get(subtask.id, instance)
|
||||
subtask.status = 'failed'
|
||||
subtask.failed = True
|
||||
if not subtask.job_explanation:
|
||||
subtask.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % (
|
||||
get_type_for_model(type(blame_job)),
|
||||
blame_job.name,
|
||||
blame_job.id,
|
||||
)
|
||||
subtask.save()
|
||||
subtask.websocket_emit_status("failed")
|
||||
|
||||
for sub_subtask in subtask.get_jobs_fail_chain():
|
||||
deps_of_deps[sub_subtask.id] = subtask
|
||||
|
||||
# We only send 1 job complete message since all the job completion message
|
||||
# handling does is trigger the scheduler. If we extend the functionality of
|
||||
# what the job complete message handler does then we may want to send a
|
||||
# completion event for each job here.
|
||||
if first_instance:
|
||||
schedule_manager_success_or_error(first_instance)
|
||||
schedule_manager_success_or_error(instance)
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
|
||||
@@ -3,5 +3,6 @@
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"AWS_ACCESS_KEY_ID": "fooo",
|
||||
"AWS_SECRET_ACCESS_KEY": "fooo",
|
||||
"AWS_SECURITY_TOKEN": "fooo"
|
||||
"AWS_SECURITY_TOKEN": "fooo",
|
||||
"AWS_SESSION_TOKEN": "fooo"
|
||||
}
|
||||
@@ -171,13 +171,17 @@ class TestKeyRegeneration:
|
||||
|
||||
def test_use_custom_key_with_empty_tower_secret_key_env_var(self):
|
||||
os.environ['TOWER_SECRET_KEY'] = ''
|
||||
new_key = call_command('regenerate_secret_key', '--use-custom-key')
|
||||
assert settings.SECRET_KEY != new_key
|
||||
with pytest.raises(SystemExit) as e:
|
||||
call_command('regenerate_secret_key', '--use-custom-key')
|
||||
assert e.type == SystemExit
|
||||
assert e.value.code == 1
|
||||
|
||||
def test_use_custom_key_with_no_tower_secret_key_env_var(self):
|
||||
os.environ.pop('TOWER_SECRET_KEY', None)
|
||||
new_key = call_command('regenerate_secret_key', '--use-custom-key')
|
||||
assert settings.SECRET_KEY != new_key
|
||||
with pytest.raises(SystemExit) as e:
|
||||
call_command('regenerate_secret_key', '--use-custom-key')
|
||||
assert e.type == SystemExit
|
||||
assert e.value.code == 1
|
||||
|
||||
def test_with_tower_secret_key_env_var(self):
|
||||
custom_key = 'MXSq9uqcwezBOChl/UfmbW1k4op+bC+FQtwPqgJ1u9XV'
|
||||
|
||||
@@ -23,7 +23,7 @@ def test_multi_group_basic_job_launch(instance_factory, controlplane_instance_gr
|
||||
mock_task_impact.return_value = 500
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, [], i1), mock.call(j2, ig2, [], i2)])
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -54,7 +54,7 @@ def test_multi_group_with_shared_dependency(instance_factory, controlplane_insta
|
||||
DependencyManager().schedule()
|
||||
TaskManager().schedule()
|
||||
pu = p.project_updates.first()
|
||||
TaskManager.start_task.assert_called_once_with(pu, controlplane_instance_group, [j1, j2], controlplane_instance_group.instances.all()[0])
|
||||
TaskManager.start_task.assert_called_once_with(pu, controlplane_instance_group, controlplane_instance_group.instances.all()[0])
|
||||
pu.finished = pu.created + timedelta(seconds=1)
|
||||
pu.status = "successful"
|
||||
pu.save()
|
||||
@@ -62,8 +62,8 @@ def test_multi_group_with_shared_dependency(instance_factory, controlplane_insta
|
||||
DependencyManager().schedule()
|
||||
TaskManager().schedule()
|
||||
|
||||
TaskManager.start_task.assert_any_call(j1, ig1, [], i1)
|
||||
TaskManager.start_task.assert_any_call(j2, ig2, [], i2)
|
||||
TaskManager.start_task.assert_any_call(j1, ig1, i1)
|
||||
TaskManager.start_task.assert_any_call(j2, ig2, i2)
|
||||
assert TaskManager.start_task.call_count == 2
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ def test_workflow_job_no_instancegroup(workflow_job_template_factory, controlpla
|
||||
wfj.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, [], None)
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, None)
|
||||
assert wfj.instance_group is None
|
||||
|
||||
|
||||
@@ -150,7 +150,7 @@ def test_failover_group_run(instance_factory, controlplane_instance_group, mocke
|
||||
mock_task_impact.return_value = 500
|
||||
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
|
||||
tm.schedule()
|
||||
mock_job.assert_has_calls([mock.call(j1, ig1, [], i1), mock.call(j1_1, ig2, [], i2)])
|
||||
mock_job.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j1_1, ig2, i2)])
|
||||
assert mock_job.call_count == 2
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ def test_single_job_scheduler_launch(hybrid_instance, controlplane_instance_grou
|
||||
j = create_job(objects.job_template)
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, [], instance)
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -240,12 +240,12 @@ def test_multi_jt_capacity_blocking(hybrid_instance, job_template_factory, mocke
|
||||
mock_task_impact.return_value = 505
|
||||
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
|
||||
tm.schedule()
|
||||
mock_job.assert_called_once_with(j1, controlplane_instance_group, [], instance)
|
||||
mock_job.assert_called_once_with(j1, controlplane_instance_group, instance)
|
||||
j1.status = "successful"
|
||||
j1.save()
|
||||
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
|
||||
tm.schedule()
|
||||
mock_job.assert_called_once_with(j2, controlplane_instance_group, [], instance)
|
||||
mock_job.assert_called_once_with(j2, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -337,12 +337,12 @@ def test_single_job_dependencies_project_launch(controlplane_instance_group, job
|
||||
pu = [x for x in p.project_updates.all()]
|
||||
assert len(pu) == 1
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(pu[0], controlplane_instance_group, [j], instance)
|
||||
TaskManager.start_task.assert_called_once_with(pu[0], controlplane_instance_group, instance)
|
||||
pu[0].status = "successful"
|
||||
pu[0].save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, [], instance)
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -365,12 +365,12 @@ def test_single_job_dependencies_inventory_update_launch(controlplane_instance_g
|
||||
iu = [x for x in ii.inventory_updates.all()]
|
||||
assert len(iu) == 1
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(iu[0], controlplane_instance_group, [j], instance)
|
||||
TaskManager.start_task.assert_called_once_with(iu[0], controlplane_instance_group, instance)
|
||||
iu[0].status = "successful"
|
||||
iu[0].save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, [], instance)
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -412,7 +412,7 @@ def test_job_dependency_with_already_updated(controlplane_instance_group, job_te
|
||||
mock_iu.assert_not_called()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, [], instance)
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -442,9 +442,7 @@ def test_shared_dependencies_launch(controlplane_instance_group, job_template_fa
|
||||
TaskManager().schedule()
|
||||
pu = p.project_updates.first()
|
||||
iu = ii.inventory_updates.first()
|
||||
TaskManager.start_task.assert_has_calls(
|
||||
[mock.call(iu, controlplane_instance_group, [j1, j2], instance), mock.call(pu, controlplane_instance_group, [j1, j2], instance)]
|
||||
)
|
||||
TaskManager.start_task.assert_has_calls([mock.call(iu, controlplane_instance_group, instance), mock.call(pu, controlplane_instance_group, instance)])
|
||||
pu.status = "successful"
|
||||
pu.finished = pu.created + timedelta(seconds=1)
|
||||
pu.save()
|
||||
@@ -453,9 +451,7 @@ def test_shared_dependencies_launch(controlplane_instance_group, job_template_fa
|
||||
iu.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls(
|
||||
[mock.call(j1, controlplane_instance_group, [], instance), mock.call(j2, controlplane_instance_group, [], instance)]
|
||||
)
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, controlplane_instance_group, instance), mock.call(j2, controlplane_instance_group, instance)])
|
||||
pu = [x for x in p.project_updates.all()]
|
||||
iu = [x for x in ii.inventory_updates.all()]
|
||||
assert len(pu) == 1
|
||||
@@ -479,7 +475,7 @@ def test_job_not_blocking_project_update(controlplane_instance_group, job_templa
|
||||
project_update.status = "pending"
|
||||
project_update.save()
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(project_update, controlplane_instance_group, [], instance)
|
||||
TaskManager.start_task.assert_called_once_with(project_update, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -503,7 +499,7 @@ def test_job_not_blocking_inventory_update(controlplane_instance_group, job_temp
|
||||
|
||||
DependencyManager().schedule()
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(inventory_update, controlplane_instance_group, [], instance)
|
||||
TaskManager.start_task.assert_called_once_with(inventory_update, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -5,8 +5,8 @@ import tempfile
|
||||
import shutil
|
||||
|
||||
from awx.main.tasks.jobs import RunJob
|
||||
from awx.main.tasks.system import execution_node_health_check, _cleanup_images_and_files
|
||||
from awx.main.models import Instance, Job
|
||||
from awx.main.tasks.system import execution_node_health_check, _cleanup_images_and_files, handle_work_error
|
||||
from awx.main.models import Instance, Job, InventoryUpdate, ProjectUpdate
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -74,3 +74,17 @@ def test_does_not_run_reaped_job(mocker, mock_me):
|
||||
job.refresh_from_db()
|
||||
assert job.status == 'failed'
|
||||
mock_run.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_handle_work_error_nested(project, inventory_source):
|
||||
pu = ProjectUpdate.objects.create(status='failed', project=project, celery_task_id='1234')
|
||||
iu = InventoryUpdate.objects.create(status='pending', inventory_source=inventory_source, source='scm')
|
||||
job = Job.objects.create(status='pending')
|
||||
iu.dependent_jobs.add(pu)
|
||||
job.dependent_jobs.add(pu, iu)
|
||||
handle_work_error({'type': 'project_update', 'id': pu.id})
|
||||
iu.refresh_from_db()
|
||||
job.refresh_from_db()
|
||||
assert iu.job_explanation == f'Previous Task Failed: {{"job_type": "project_update", "job_name": "", "job_id": "{pu.id}"}}'
|
||||
assert job.job_explanation == f'Previous Task Failed: {{"job_type": "inventory_update", "job_name": "", "job_id": "{iu.id}"}}'
|
||||
|
||||
@@ -114,7 +114,7 @@ if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa
|
||||
# this needs to stay at the bottom of this file
|
||||
try:
|
||||
if os.getenv('AWX_KUBE_DEVEL', False):
|
||||
include(optional('minikube.py'), scope=locals())
|
||||
include(optional('development_kube.py'), scope=locals())
|
||||
else:
|
||||
include(optional('local_*.py'), scope=locals())
|
||||
except ImportError:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
BROADCAST_WEBSOCKET_SECRET = '🤖starscream🤖'
|
||||
BROADCAST_WEBSOCKET_PORT = 8013
|
||||
BROADCAST_WEBSOCKET_PORT = 8052
|
||||
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
||||
BROADCAST_WEBSOCKET_PROTOCOL = 'http'
|
||||
79
awx/ui/package-lock.json
generated
79
awx/ui/package-lock.json
generated
@@ -8,7 +8,7 @@
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.250.1",
|
||||
"@patternfly/react-core": "^4.264.0",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
@@ -22,7 +22,7 @@
|
||||
"has-ansi": "5.0.1",
|
||||
"html-entities": "2.3.2",
|
||||
"js-yaml": "4.1.0",
|
||||
"luxon": "^3.0.3",
|
||||
"luxon": "^3.1.1",
|
||||
"prop-types": "^15.8.1",
|
||||
"react": "17.0.2",
|
||||
"react-ace": "^10.1.0",
|
||||
@@ -3752,13 +3752,13 @@
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-core": {
|
||||
"version": "4.250.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||
"version": "4.264.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.264.0.tgz",
|
||||
"integrity": "sha512-tK0BMWxw8nhukev40HZ6q6d02pDnjX7oyA91vHa18aakJUKBWMaerqpG4NZVMoh0tPKX3aLNj+zyCwDALFAZZw==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": "^4.92.6",
|
||||
"@patternfly/react-styles": "^4.91.6",
|
||||
"@patternfly/react-tokens": "^4.93.6",
|
||||
"@patternfly/react-icons": "^4.93.0",
|
||||
"@patternfly/react-styles": "^4.92.0",
|
||||
"@patternfly/react-tokens": "^4.94.0",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
@@ -3769,6 +3769,15 @@
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.93.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.93.0.tgz",
|
||||
"integrity": "sha512-OH0vORVioL+HLWMEog8/3u8jsiMCeJ0pFpvRKRhy5Uk4CdAe40k1SOBvXJP6opr+O8TLbz0q3bm8Jsh/bPaCuQ==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
@@ -3784,9 +3793,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-styles": {
|
||||
"version": "4.91.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||
"version": "4.92.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.92.0.tgz",
|
||||
"integrity": "sha512-B/f6iyu8UEN1+wRxdC4sLIhvJeyL8SqInDXZmwOIqK8uPJ8Lze7qrbVhkkVzbMF37/oDPVa6dZH8qZFq062LEA=="
|
||||
},
|
||||
"node_modules/@patternfly/react-table": {
|
||||
"version": "4.108.0",
|
||||
@@ -3811,9 +3820,9 @@
|
||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@patternfly/react-tokens": {
|
||||
"version": "4.93.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||
"version": "4.94.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.94.0.tgz",
|
||||
"integrity": "sha512-fYXxUJZnzpn89K2zzHF0cSncZZVGKrohdb5f5T1wzxwU2NZPVGpvr88xhm+V2Y/fSrrTPwXcP3IIdtNOOtJdZw=="
|
||||
},
|
||||
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -15468,9 +15477,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.0.3.tgz",
|
||||
"integrity": "sha512-+EfHWnF+UT7GgTnq5zXg3ldnTKL2zdv7QJgsU5bjjpbH17E3qi/puMhQyJVYuCq+FRkogvB5WB6iVvUr+E4a7w==",
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.1.1.tgz",
|
||||
"integrity": "sha512-Ah6DloGmvseB/pX1cAmjbFvyU/pKuwQMQqz7d0yvuDlVYLTs2WeDHQMpC8tGjm1da+BriHROW/OEIT/KfYg6xw==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
@@ -25094,19 +25103,25 @@
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
},
|
||||
"@patternfly/react-core": {
|
||||
"version": "4.250.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||
"version": "4.264.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.264.0.tgz",
|
||||
"integrity": "sha512-tK0BMWxw8nhukev40HZ6q6d02pDnjX7oyA91vHa18aakJUKBWMaerqpG4NZVMoh0tPKX3aLNj+zyCwDALFAZZw==",
|
||||
"requires": {
|
||||
"@patternfly/react-icons": "^4.92.6",
|
||||
"@patternfly/react-styles": "^4.91.6",
|
||||
"@patternfly/react-tokens": "^4.93.6",
|
||||
"@patternfly/react-icons": "^4.93.0",
|
||||
"@patternfly/react-styles": "^4.92.0",
|
||||
"@patternfly/react-tokens": "^4.94.0",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.93.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.93.0.tgz",
|
||||
"integrity": "sha512-OH0vORVioL+HLWMEog8/3u8jsiMCeJ0pFpvRKRhy5Uk4CdAe40k1SOBvXJP6opr+O8TLbz0q3bm8Jsh/bPaCuQ==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
@@ -25121,9 +25136,9 @@
|
||||
"requires": {}
|
||||
},
|
||||
"@patternfly/react-styles": {
|
||||
"version": "4.91.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||
"version": "4.92.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.92.0.tgz",
|
||||
"integrity": "sha512-B/f6iyu8UEN1+wRxdC4sLIhvJeyL8SqInDXZmwOIqK8uPJ8Lze7qrbVhkkVzbMF37/oDPVa6dZH8qZFq062LEA=="
|
||||
},
|
||||
"@patternfly/react-table": {
|
||||
"version": "4.108.0",
|
||||
@@ -25146,9 +25161,9 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "4.93.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||
"version": "4.94.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.94.0.tgz",
|
||||
"integrity": "sha512-fYXxUJZnzpn89K2zzHF0cSncZZVGKrohdb5f5T1wzxwU2NZPVGpvr88xhm+V2Y/fSrrTPwXcP3IIdtNOOtJdZw=="
|
||||
},
|
||||
"@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -34210,9 +34225,9 @@
|
||||
}
|
||||
},
|
||||
"luxon": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.0.3.tgz",
|
||||
"integrity": "sha512-+EfHWnF+UT7GgTnq5zXg3ldnTKL2zdv7QJgsU5bjjpbH17E3qi/puMhQyJVYuCq+FRkogvB5WB6iVvUr+E4a7w=="
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.1.1.tgz",
|
||||
"integrity": "sha512-Ah6DloGmvseB/pX1cAmjbFvyU/pKuwQMQqz7d0yvuDlVYLTs2WeDHQMpC8tGjm1da+BriHROW/OEIT/KfYg6xw=="
|
||||
},
|
||||
"lz-string": {
|
||||
"version": "1.4.4",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.250.1",
|
||||
"@patternfly/react-core": "^4.264.0",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
@@ -22,7 +22,7 @@
|
||||
"has-ansi": "5.0.1",
|
||||
"html-entities": "2.3.2",
|
||||
"js-yaml": "4.1.0",
|
||||
"luxon": "^3.0.3",
|
||||
"luxon": "^3.1.1",
|
||||
"prop-types": "^15.8.1",
|
||||
"react": "17.0.2",
|
||||
"react-ace": "^10.1.0",
|
||||
|
||||
@@ -153,6 +153,10 @@ function CredentialsStep({
|
||||
}))}
|
||||
value={selectedType && selectedType.id}
|
||||
onChange={(e, id) => {
|
||||
// Reset query params when the category of credentials is changed
|
||||
history.replace({
|
||||
search: '',
|
||||
});
|
||||
setSelectedType(types.find((o) => o.id === parseInt(id, 10)));
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -3,6 +3,7 @@ import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI, CredentialTypesAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import CredentialsStep from './CredentialsStep';
|
||||
|
||||
jest.mock('../../../api/models/CredentialTypes');
|
||||
@@ -164,6 +165,41 @@ describe('CredentialsStep', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test('should reset query params (credential.page) when selected credential type is changed', async () => {
|
||||
let wrapper;
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['?credential.page=2'],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik>
|
||||
<CredentialsStep allowCredentialsWithPasswords />
|
||||
</Formik>,
|
||||
{
|
||||
context: { router: { history } },
|
||||
}
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type: 1,
|
||||
order_by: 'name',
|
||||
page: 2,
|
||||
page_size: 5,
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper.find('AnsibleSelect').invoke('onChange')({}, 3);
|
||||
});
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type: 3,
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
|
||||
test("error should be shown when a credential that prompts for passwords is selected on a step that doesn't allow it", async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
|
||||
@@ -173,6 +173,10 @@ function MultiCredentialsLookup({
|
||||
}))}
|
||||
value={selectedType && selectedType.id}
|
||||
onChange={(e, id) => {
|
||||
// Reset query params when the category of credentials is changed
|
||||
history.replace({
|
||||
search: '',
|
||||
});
|
||||
setSelectedType(
|
||||
credentialTypes.find((o) => o.id === parseInt(id, 10))
|
||||
);
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
mountWithContexts,
|
||||
waitForElement,
|
||||
} from '../../../testUtils/enzymeHelpers';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import MultiCredentialsLookup from './MultiCredentialsLookup';
|
||||
|
||||
jest.mock('../../api');
|
||||
@@ -228,6 +229,53 @@ describe('<Formik><MultiCredentialsLookup /></Formik>', () => {
|
||||
]);
|
||||
});
|
||||
|
||||
test('should reset query params (credentials.page) when selected credential type is changed', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['?credentials.page=2'],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik>
|
||||
<MultiCredentialsLookup
|
||||
value={credentials}
|
||||
tooltip="This is credentials look up"
|
||||
onChange={() => {}}
|
||||
onError={() => {}}
|
||||
/>
|
||||
</Formik>,
|
||||
{
|
||||
context: { router: { history } },
|
||||
}
|
||||
);
|
||||
});
|
||||
const searchButton = await waitForElement(
|
||||
wrapper,
|
||||
'Button[aria-label="Search"]'
|
||||
);
|
||||
await act(async () => {
|
||||
searchButton.invoke('onClick')();
|
||||
});
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type: 400,
|
||||
order_by: 'name',
|
||||
page: 2,
|
||||
page_size: 5,
|
||||
});
|
||||
|
||||
const select = await waitForElement(wrapper, 'AnsibleSelect');
|
||||
await act(async () => {
|
||||
select.invoke('onChange')({}, 500);
|
||||
});
|
||||
wrapper.update();
|
||||
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type: 500,
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
|
||||
test('should only add 1 credential per credential type except vault(see below)', async () => {
|
||||
const onChange = jest.fn();
|
||||
await act(async () => {
|
||||
|
||||
@@ -465,7 +465,7 @@
|
||||
},
|
||||
"created": "2020-05-18T21:53:35.370730Z",
|
||||
"modified": "2020-05-18T21:54:05.436400Z",
|
||||
"name": "CyberArk AIM Central Credential Provider Lookup",
|
||||
"name": "CyberArk Central Credential Provider Lookup",
|
||||
"description": "",
|
||||
"kind": "external",
|
||||
"namespace": "aim",
|
||||
|
||||
@@ -24,6 +24,7 @@ function WorkflowJobTemplateAdd() {
|
||||
limit,
|
||||
job_tags,
|
||||
skip_tags,
|
||||
scm_branch,
|
||||
...templatePayload
|
||||
} = values;
|
||||
templatePayload.inventory = inventory?.id;
|
||||
@@ -32,6 +33,7 @@ function WorkflowJobTemplateAdd() {
|
||||
templatePayload.limit = limit === '' ? null : limit;
|
||||
templatePayload.job_tags = job_tags === '' ? null : job_tags;
|
||||
templatePayload.skip_tags = skip_tags === '' ? null : skip_tags;
|
||||
templatePayload.scm_branch = scm_branch === '' ? null : scm_branch;
|
||||
const organizationId =
|
||||
organization?.id || inventory?.summary_fields?.organization.id;
|
||||
try {
|
||||
|
||||
@@ -119,7 +119,7 @@ describe('<WorkflowJobTemplateAdd/>', () => {
|
||||
job_tags: null,
|
||||
limit: null,
|
||||
organization: undefined,
|
||||
scm_branch: '',
|
||||
scm_branch: null,
|
||||
skip_tags: null,
|
||||
webhook_credential: undefined,
|
||||
webhook_service: '',
|
||||
|
||||
@@ -30,6 +30,7 @@ function WorkflowJobTemplateEdit({ template }) {
|
||||
limit,
|
||||
job_tags,
|
||||
skip_tags,
|
||||
scm_branch,
|
||||
...templatePayload
|
||||
} = values;
|
||||
templatePayload.inventory = inventory?.id || null;
|
||||
@@ -38,6 +39,7 @@ function WorkflowJobTemplateEdit({ template }) {
|
||||
templatePayload.limit = limit === '' ? null : limit;
|
||||
templatePayload.job_tags = job_tags === '' ? null : job_tags;
|
||||
templatePayload.skip_tags = skip_tags === '' ? null : skip_tags;
|
||||
templatePayload.scm_branch = scm_branch === '' ? null : scm_branch;
|
||||
|
||||
const formOrgId =
|
||||
organization?.id || inventory?.summary_fields?.organization.id || null;
|
||||
|
||||
@@ -7,7 +7,6 @@ __metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: controller
|
||||
plugin_type: inventory
|
||||
author:
|
||||
- Matthew Jones (@matburt)
|
||||
- Yunfan Zhang (@YunfanZhang42)
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = """
|
||||
lookup: controller_api
|
||||
name: controller_api
|
||||
author: John Westcott IV (@john-westcott-iv)
|
||||
short_description: Search the API for objects
|
||||
requirements:
|
||||
@@ -74,7 +74,7 @@ EXAMPLES = """
|
||||
|
||||
- name: Load the UI settings specifying the connection info
|
||||
set_fact:
|
||||
controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/ui' host='controller.example.com',
|
||||
controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/ui', host='controller.example.com',
|
||||
username='admin', password=my_pass_var, verify_ssl=False) }}"
|
||||
|
||||
- name: Report the usernames of all users with admin privs
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = """
|
||||
lookup: schedule_rrule
|
||||
name: schedule_rrule
|
||||
author: John Westcott IV (@john-westcott-iv)
|
||||
short_description: Generate an rrule string which can be used for Schedules
|
||||
requirements:
|
||||
@@ -101,39 +101,39 @@ else:
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
frequencies = {
|
||||
'none': rrule.DAILY,
|
||||
'minute': rrule.MINUTELY,
|
||||
'hour': rrule.HOURLY,
|
||||
'day': rrule.DAILY,
|
||||
'week': rrule.WEEKLY,
|
||||
'month': rrule.MONTHLY,
|
||||
}
|
||||
|
||||
weekdays = {
|
||||
'monday': rrule.MO,
|
||||
'tuesday': rrule.TU,
|
||||
'wednesday': rrule.WE,
|
||||
'thursday': rrule.TH,
|
||||
'friday': rrule.FR,
|
||||
'saturday': rrule.SA,
|
||||
'sunday': rrule.SU,
|
||||
}
|
||||
|
||||
set_positions = {
|
||||
'first': 1,
|
||||
'second': 2,
|
||||
'third': 3,
|
||||
'fourth': 4,
|
||||
'last': -1,
|
||||
}
|
||||
|
||||
# plugin constructor
|
||||
def __init__(self, *args, **kwargs):
|
||||
if LIBRARY_IMPORT_ERROR:
|
||||
raise_from(AnsibleError('{0}'.format(LIBRARY_IMPORT_ERROR)), LIBRARY_IMPORT_ERROR)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.frequencies = {
|
||||
'none': rrule.DAILY,
|
||||
'minute': rrule.MINUTELY,
|
||||
'hour': rrule.HOURLY,
|
||||
'day': rrule.DAILY,
|
||||
'week': rrule.WEEKLY,
|
||||
'month': rrule.MONTHLY,
|
||||
}
|
||||
|
||||
self.weekdays = {
|
||||
'monday': rrule.MO,
|
||||
'tuesday': rrule.TU,
|
||||
'wednesday': rrule.WE,
|
||||
'thursday': rrule.TH,
|
||||
'friday': rrule.FR,
|
||||
'saturday': rrule.SA,
|
||||
'sunday': rrule.SU,
|
||||
}
|
||||
|
||||
self.set_positions = {
|
||||
'first': 1,
|
||||
'second': 2,
|
||||
'third': 3,
|
||||
'fourth': 4,
|
||||
'last': -1,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def parse_date_time(date_string):
|
||||
try:
|
||||
@@ -149,14 +149,13 @@ class LookupModule(LookupBase):
|
||||
|
||||
return self.get_rrule(frequency, kwargs)
|
||||
|
||||
@staticmethod
|
||||
def get_rrule(frequency, kwargs):
|
||||
def get_rrule(self, frequency, kwargs):
|
||||
|
||||
if frequency not in LookupModule.frequencies:
|
||||
if frequency not in self.frequencies:
|
||||
raise AnsibleError('Frequency of {0} is invalid'.format(frequency))
|
||||
|
||||
rrule_kwargs = {
|
||||
'freq': LookupModule.frequencies[frequency],
|
||||
'freq': self.frequencies[frequency],
|
||||
'interval': kwargs.get('every', 1),
|
||||
}
|
||||
|
||||
@@ -187,9 +186,9 @@ class LookupModule(LookupBase):
|
||||
days = []
|
||||
for day in kwargs['on_days'].split(','):
|
||||
day = day.strip()
|
||||
if day not in LookupModule.weekdays:
|
||||
raise AnsibleError('Parameter on_days must only contain values {0}'.format(', '.join(LookupModule.weekdays.keys())))
|
||||
days.append(LookupModule.weekdays[day])
|
||||
if day not in self.weekdays:
|
||||
raise AnsibleError('Parameter on_days must only contain values {0}'.format(', '.join(self.weekdays.keys())))
|
||||
days.append(self.weekdays[day])
|
||||
|
||||
rrule_kwargs['byweekday'] = days
|
||||
|
||||
@@ -214,13 +213,13 @@ class LookupModule(LookupBase):
|
||||
except Exception as e:
|
||||
raise_from(AnsibleError('on_the parameter must be two words separated by a space'), e)
|
||||
|
||||
if weekday not in LookupModule.weekdays:
|
||||
if weekday not in self.weekdays:
|
||||
raise AnsibleError('Weekday portion of on_the parameter is not valid')
|
||||
if occurance not in LookupModule.set_positions:
|
||||
if occurance not in self.set_positions:
|
||||
raise AnsibleError('The first string of the on_the parameter is not valid')
|
||||
|
||||
rrule_kwargs['byweekday'] = LookupModule.weekdays[weekday]
|
||||
rrule_kwargs['bysetpos'] = LookupModule.set_positions[occurance]
|
||||
rrule_kwargs['byweekday'] = self.weekdays[weekday]
|
||||
rrule_kwargs['bysetpos'] = self.set_positions[occurance]
|
||||
|
||||
my_rule = rrule.rrule(**rrule_kwargs)
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = """
|
||||
lookup: schedule_rruleset
|
||||
name: schedule_rruleset
|
||||
author: John Westcott IV (@john-westcott-iv)
|
||||
short_description: Generate an rruleset string
|
||||
requirements:
|
||||
@@ -31,7 +31,8 @@ DOCUMENTATION = """
|
||||
rules:
|
||||
description:
|
||||
- Array of rules in the rruleset
|
||||
type: array
|
||||
type: list
|
||||
elements: dict
|
||||
required: True
|
||||
suboptions:
|
||||
frequency:
|
||||
@@ -136,40 +137,44 @@ try:
|
||||
import pytz
|
||||
from dateutil import rrule
|
||||
except ImportError as imp_exc:
|
||||
raise_from(AnsibleError('{0}'.format(imp_exc)), imp_exc)
|
||||
LIBRARY_IMPORT_ERROR = imp_exc
|
||||
else:
|
||||
LIBRARY_IMPORT_ERROR = None
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
frequencies = {
|
||||
'none': rrule.DAILY,
|
||||
'minute': rrule.MINUTELY,
|
||||
'hour': rrule.HOURLY,
|
||||
'day': rrule.DAILY,
|
||||
'week': rrule.WEEKLY,
|
||||
'month': rrule.MONTHLY,
|
||||
}
|
||||
|
||||
weekdays = {
|
||||
'monday': rrule.MO,
|
||||
'tuesday': rrule.TU,
|
||||
'wednesday': rrule.WE,
|
||||
'thursday': rrule.TH,
|
||||
'friday': rrule.FR,
|
||||
'saturday': rrule.SA,
|
||||
'sunday': rrule.SU,
|
||||
}
|
||||
|
||||
set_positions = {
|
||||
'first': 1,
|
||||
'second': 2,
|
||||
'third': 3,
|
||||
'fourth': 4,
|
||||
'last': -1,
|
||||
}
|
||||
|
||||
# plugin constructor
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if LIBRARY_IMPORT_ERROR:
|
||||
raise_from(AnsibleError('{0}'.format(LIBRARY_IMPORT_ERROR)), LIBRARY_IMPORT_ERROR)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.frequencies = {
|
||||
'none': rrule.DAILY,
|
||||
'minute': rrule.MINUTELY,
|
||||
'hour': rrule.HOURLY,
|
||||
'day': rrule.DAILY,
|
||||
'week': rrule.WEEKLY,
|
||||
'month': rrule.MONTHLY,
|
||||
}
|
||||
|
||||
self.weekdays = {
|
||||
'monday': rrule.MO,
|
||||
'tuesday': rrule.TU,
|
||||
'wednesday': rrule.WE,
|
||||
'thursday': rrule.TH,
|
||||
'friday': rrule.FR,
|
||||
'saturday': rrule.SA,
|
||||
'sunday': rrule.SU,
|
||||
}
|
||||
|
||||
self.set_positions = {
|
||||
'first': 1,
|
||||
'second': 2,
|
||||
'third': 3,
|
||||
'fourth': 4,
|
||||
'last': -1,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def parse_date_time(date_string):
|
||||
@@ -188,14 +193,14 @@ class LookupModule(LookupBase):
|
||||
# something: [1,2,3] - A list of ints
|
||||
return_values = []
|
||||
# If they give us a single int, lets make it a list of ints
|
||||
if type(rule[field_name]) == int:
|
||||
if isinstance(rule[field_name], int):
|
||||
rule[field_name] = [rule[field_name]]
|
||||
# If its not a list, we need to split it into a list
|
||||
if type(rule[field_name]) != list:
|
||||
if isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
# If they have a list of strs we want to strip the str incase its space delineated
|
||||
if type(value) == str:
|
||||
if isinstance(value, str):
|
||||
value = value.strip()
|
||||
# If value happens to be an int (from a list of ints) we need to coerce it into a str for the re.match
|
||||
if not re.match(r"^\d+$", str(value)) or int(value) < min_value or int(value) > max_value:
|
||||
@@ -205,7 +210,7 @@ class LookupModule(LookupBase):
|
||||
|
||||
def process_list(self, field_name, rule, valid_list, rule_number):
|
||||
return_values = []
|
||||
if type(rule[field_name]) != list:
|
||||
if isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
value = value.strip()
|
||||
@@ -260,11 +265,11 @@ class LookupModule(LookupBase):
|
||||
frequency = rule.get('frequency', None)
|
||||
if not frequency:
|
||||
raise AnsibleError("Rule {0} is missing a frequency".format(rule_number))
|
||||
if frequency not in LookupModule.frequencies:
|
||||
if frequency not in self.frequencies:
|
||||
raise AnsibleError('Frequency of rule {0} is invalid {1}'.format(rule_number, frequency))
|
||||
|
||||
rrule_kwargs = {
|
||||
'freq': LookupModule.frequencies[frequency],
|
||||
'freq': self.frequencies[frequency],
|
||||
'interval': rule.get('interval', 1),
|
||||
'dtstart': start_date,
|
||||
}
|
||||
@@ -287,7 +292,7 @@ class LookupModule(LookupBase):
|
||||
)
|
||||
|
||||
if 'bysetpos' in rule:
|
||||
rrule_kwargs['bysetpos'] = self.process_list('bysetpos', rule, LookupModule.set_positions, rule_number)
|
||||
rrule_kwargs['bysetpos'] = self.process_list('bysetpos', rule, self.set_positions, rule_number)
|
||||
|
||||
if 'bymonth' in rule:
|
||||
rrule_kwargs['bymonth'] = self.process_integer('bymonth', rule, 1, 12, rule_number)
|
||||
@@ -302,7 +307,7 @@ class LookupModule(LookupBase):
|
||||
rrule_kwargs['byweekno'] = self.process_integer('byweekno', rule, 1, 52, rule_number)
|
||||
|
||||
if 'byweekday' in rule:
|
||||
rrule_kwargs['byweekday'] = self.process_list('byweekday', rule, LookupModule.weekdays, rule_number)
|
||||
rrule_kwargs['byweekday'] = self.process_list('byweekday', rule, self.weekdays, rule_number)
|
||||
|
||||
if 'byhour' in rule:
|
||||
rrule_kwargs['byhour'] = self.process_integer('byhour', rule, 0, 23, rule_number)
|
||||
|
||||
@@ -4,6 +4,7 @@ __metaclass__ = type
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, env_fallback
|
||||
from ansible.module_utils.urls import Request, SSLValidationError, ConnectionError
|
||||
from ansible.module_utils.parsing.convert_bool import boolean as strtobool
|
||||
from ansible.module_utils.six import PY2
|
||||
from ansible.module_utils.six import raise_from, string_types
|
||||
from ansible.module_utils.six.moves import StringIO
|
||||
@@ -11,14 +12,21 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError
|
||||
from ansible.module_utils.six.moves.http_cookiejar import CookieJar
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse, urlencode
|
||||
from ansible.module_utils.six.moves.configparser import ConfigParser, NoOptionError
|
||||
from distutils.version import LooseVersion as Version
|
||||
from socket import getaddrinfo, IPPROTO_TCP
|
||||
import time
|
||||
import re
|
||||
from json import loads, dumps
|
||||
from os.path import isfile, expanduser, split, join, exists, isdir
|
||||
from os import access, R_OK, getcwd
|
||||
from distutils.util import strtobool
|
||||
|
||||
|
||||
try:
|
||||
from ansible.module_utils.compat.version import LooseVersion as Version
|
||||
except ImportError:
|
||||
try:
|
||||
from distutils.version import LooseVersion as Version
|
||||
except ImportError:
|
||||
raise AssertionError('To use this plugin or module with ansible-core 2.11, you need to use Python < 3.12 with distutils.version present')
|
||||
|
||||
try:
|
||||
import yaml
|
||||
|
||||
@@ -55,7 +55,6 @@ options:
|
||||
description:
|
||||
- The arguments to pass to the module.
|
||||
type: str
|
||||
default: ""
|
||||
forks:
|
||||
description:
|
||||
- The number of forks to use for this ad hoc execution.
|
||||
|
||||
@@ -42,6 +42,7 @@ options:
|
||||
- Maximum time in seconds to wait for a job to finish.
|
||||
- Not specifying means the task will wait until the controller cancels the command.
|
||||
type: int
|
||||
default: 0
|
||||
extends_documentation_fragment: awx.awx.auth
|
||||
'''
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ options:
|
||||
- The credential type being created.
|
||||
- Can be a built-in credential type such as "Machine", or a custom credential type such as "My Credential Type"
|
||||
- Choices include Amazon Web Services, Ansible Galaxy/Automation Hub API Token, Centrify Vault Credential Provider Lookup,
|
||||
Container Registry, CyberArk AIM Central Credential Provider Lookup, CyberArk Conjur Secrets Manager Lookup, Google Compute Engine,
|
||||
Container Registry, CyberArk Central Credential Provider Lookup, CyberArk Conjur Secret Lookup, Google Compute Engine,
|
||||
GitHub Personal Access Token, GitLab Personal Access Token, GPG Public Key, HashiCorp Vault Secret Lookup, HashiCorp Vault Signed SSH,
|
||||
Insights, Machine, Microsoft Azure Key Vault, Microsoft Azure Resource Manager, Network, OpenShift or Kubernetes API
|
||||
Bearer Token, OpenStack, Red Hat Ansible Automation Platform, Red Hat Satellite 6, Red Hat Virtualization, Source Control,
|
||||
|
||||
@@ -80,9 +80,9 @@ def main():
|
||||
name=dict(required=True),
|
||||
new_name=dict(),
|
||||
image=dict(required=True),
|
||||
description=dict(default=''),
|
||||
description=dict(),
|
||||
organization=dict(),
|
||||
credential=dict(default=''),
|
||||
credential=dict(),
|
||||
state=dict(choices=['present', 'absent'], default='present'),
|
||||
pull=dict(choices=['always', 'missing', 'never'], default='missing'),
|
||||
)
|
||||
|
||||
@@ -86,6 +86,16 @@ options:
|
||||
- workflow names to export
|
||||
type: list
|
||||
elements: str
|
||||
applications:
|
||||
description:
|
||||
- OAuth2 application names to export
|
||||
type: list
|
||||
elements: str
|
||||
schedules:
|
||||
description:
|
||||
- schedule names to export
|
||||
type: list
|
||||
elements: str
|
||||
requirements:
|
||||
- "awxkit >= 9.3.0"
|
||||
notes:
|
||||
|
||||
@@ -266,6 +266,7 @@ options:
|
||||
description:
|
||||
- Maximum time in seconds to wait for a job to finish (server-side).
|
||||
type: int
|
||||
default: 0
|
||||
job_slice_count:
|
||||
description:
|
||||
- The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.
|
||||
@@ -287,7 +288,6 @@ options:
|
||||
description:
|
||||
- Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.
|
||||
type: str
|
||||
default: ''
|
||||
labels:
|
||||
description:
|
||||
- The labels applied to this job template
|
||||
|
||||
@@ -60,12 +60,10 @@ options:
|
||||
description:
|
||||
- The branch to use for the SCM resource.
|
||||
type: str
|
||||
default: ''
|
||||
scm_refspec:
|
||||
description:
|
||||
- The refspec to use for the SCM resource.
|
||||
type: str
|
||||
default: ''
|
||||
credential:
|
||||
description:
|
||||
- Name of the credential to use with this SCM resource.
|
||||
|
||||
@@ -51,7 +51,6 @@ options:
|
||||
- Specify C(extra_vars) for the template.
|
||||
required: False
|
||||
type: dict
|
||||
default: {}
|
||||
forks:
|
||||
description:
|
||||
- Forks applied as a prompt, assuming job template prompts for forks
|
||||
|
||||
@@ -39,6 +39,7 @@ options:
|
||||
- Note This is a client side search, not an API side search
|
||||
required: False
|
||||
type: dict
|
||||
default: {}
|
||||
extends_documentation_fragment: awx.awx.auth
|
||||
'''
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ options:
|
||||
- Optional description of this access token.
|
||||
required: False
|
||||
type: str
|
||||
default: ''
|
||||
application:
|
||||
description:
|
||||
- The application tied to this token.
|
||||
|
||||
@@ -214,7 +214,8 @@ options:
|
||||
type: int
|
||||
job_slice_count:
|
||||
description:
|
||||
- The number of jobs to slice into at runtime, if job template prompts for job slices. Will cause the Job Template to launch a workflow if value is greater than 1.
|
||||
- The number of jobs to slice into at runtime, if job template prompts for job slices.
|
||||
- Will cause the Job Template to launch a workflow if value is greater than 1.
|
||||
type: int
|
||||
default: '1'
|
||||
timeout:
|
||||
@@ -328,42 +329,46 @@ options:
|
||||
- Nodes that will run after this node completes.
|
||||
- List of node identifiers.
|
||||
type: list
|
||||
elements: dict
|
||||
suboptions:
|
||||
identifier:
|
||||
description:
|
||||
- Identifier of Node that will run after this node completes given this option.
|
||||
elements: str
|
||||
type: str
|
||||
success_nodes:
|
||||
description:
|
||||
- Nodes that will run after this node on success.
|
||||
- List of node identifiers.
|
||||
type: list
|
||||
elements: dict
|
||||
suboptions:
|
||||
identifier:
|
||||
description:
|
||||
- Identifier of Node that will run after this node completes given this option.
|
||||
elements: str
|
||||
type: str
|
||||
failure_nodes:
|
||||
description:
|
||||
- Nodes that will run after this node on failure.
|
||||
- List of node identifiers.
|
||||
type: list
|
||||
elements: dict
|
||||
suboptions:
|
||||
identifier:
|
||||
description:
|
||||
- Identifier of Node that will run after this node completes given this option.
|
||||
elements: str
|
||||
type: str
|
||||
credentials:
|
||||
description:
|
||||
- Credentials to be applied to job as launch-time prompts.
|
||||
- List of credential names.
|
||||
- Uniqueness is not handled rigorously.
|
||||
type: list
|
||||
elements: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- Name Credentials to be applied to job as launch-time prompts.
|
||||
elements: str
|
||||
type: str
|
||||
organization:
|
||||
description:
|
||||
- Name of key for use in model for organizational reference
|
||||
@@ -379,11 +384,12 @@ options:
|
||||
- List of Label names.
|
||||
- Uniqueness is not handled rigorously.
|
||||
type: list
|
||||
elements: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- Name Labels to be applied to job as launch-time prompts.
|
||||
elements: str
|
||||
type: str
|
||||
organization:
|
||||
description:
|
||||
- Name of key for use in model for organizational reference
|
||||
@@ -399,11 +405,12 @@ options:
|
||||
- List of Instance group names.
|
||||
- Uniqueness is not handled rigorously.
|
||||
type: list
|
||||
elements: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- Name of Instance groups to be applied to job as launch-time prompts.
|
||||
elements: str
|
||||
type: str
|
||||
destroy_current_nodes:
|
||||
description:
|
||||
- Set in order to destroy current workflow_nodes on the workflow.
|
||||
@@ -789,6 +796,7 @@ def main():
|
||||
allow_simultaneous=dict(type='bool'),
|
||||
ask_variables_on_launch=dict(type='bool'),
|
||||
ask_labels_on_launch=dict(type='bool', aliases=['ask_labels']),
|
||||
ask_tags_on_launch=dict(type='bool', aliases=['ask_tags']),
|
||||
ask_skip_tags_on_launch=dict(type='bool', aliases=['ask_skip_tags']),
|
||||
inventory=dict(),
|
||||
limit=dict(),
|
||||
@@ -873,6 +881,7 @@ def main():
|
||||
'ask_limit_on_launch',
|
||||
'ask_variables_on_launch',
|
||||
'ask_labels_on_launch',
|
||||
'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch',
|
||||
'webhook_service',
|
||||
'job_tags',
|
||||
|
||||
@@ -30,7 +30,6 @@ options:
|
||||
- Variables to apply at launch time.
|
||||
- Will only be accepted if job template prompts for vars or has a survey asking for those vars.
|
||||
type: dict
|
||||
default: {}
|
||||
inventory:
|
||||
description:
|
||||
- Inventory applied as a prompt, if job template prompts for inventory
|
||||
|
||||
@@ -159,7 +159,7 @@ def run_module(request, collection_import):
|
||||
elif getattr(resource_module, 'TowerLegacyModule', None):
|
||||
resource_class = resource_module.TowerLegacyModule
|
||||
else:
|
||||
raise ("The module has neither a TowerLegacyModule, ControllerAWXKitModule or a ControllerAPIModule")
|
||||
raise RuntimeError("The module has neither a TowerLegacyModule, ControllerAWXKitModule or a ControllerAPIModule")
|
||||
|
||||
with mock.patch.object(resource_class, '_load_params', new=mock_load_params):
|
||||
# Call the test utility (like a mock server) instead of issuing HTTP requests
|
||||
|
||||
@@ -14,7 +14,7 @@ def test_create_project(run_module, admin_user, organization, silence_warning):
|
||||
dict(name='foo', organization=organization.name, scm_type='git', scm_url='https://foo.invalid', wait=False, scm_update_cache_timeout=5),
|
||||
admin_user,
|
||||
)
|
||||
silence_warning.assert_called_once_with('scm_update_cache_timeout will be ignored since scm_update_on_launch ' 'was not set to true')
|
||||
silence_warning.assert_called_once_with('scm_update_cache_timeout will be ignored since scm_update_on_launch was not set to true')
|
||||
|
||||
assert result.pop('changed', None), result
|
||||
|
||||
|
||||
@@ -81,7 +81,7 @@ def test_delete_same_named_schedule(run_module, project, inventory, admin_user):
|
||||
],
|
||||
)
|
||||
def test_rrule_lookup_plugin(collection_import, freq, kwargs, expect):
|
||||
LookupModule = collection_import('plugins.lookup.schedule_rrule').LookupModule
|
||||
LookupModule = collection_import('plugins.lookup.schedule_rrule').LookupModule()
|
||||
generated_rule = LookupModule.get_rrule(freq, kwargs)
|
||||
assert generated_rule == expect
|
||||
rrule_checker = SchedulePreviewSerializer()
|
||||
@@ -92,7 +92,7 @@ def test_rrule_lookup_plugin(collection_import, freq, kwargs, expect):
|
||||
|
||||
@pytest.mark.parametrize("freq", ('none', 'minute', 'hour', 'day', 'week', 'month'))
|
||||
def test_empty_schedule_rrule(collection_import, freq):
|
||||
LookupModule = collection_import('plugins.lookup.schedule_rrule').LookupModule
|
||||
LookupModule = collection_import('plugins.lookup.schedule_rrule').LookupModule()
|
||||
if freq == 'day':
|
||||
pfreq = 'DAILY'
|
||||
elif freq == 'none':
|
||||
@@ -136,7 +136,7 @@ def test_empty_schedule_rrule(collection_import, freq):
|
||||
],
|
||||
)
|
||||
def test_rrule_lookup_plugin_failure(collection_import, freq, kwargs, msg):
|
||||
LookupModule = collection_import('plugins.lookup.schedule_rrule').LookupModule
|
||||
LookupModule = collection_import('plugins.lookup.schedule_rrule').LookupModule()
|
||||
with pytest.raises(AnsibleError) as e:
|
||||
assert LookupModule.get_rrule(freq, kwargs)
|
||||
assert msg in str(e.value)
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
credential:
|
||||
description: Credential for Testing Source
|
||||
name: "{{ src_cred_name }}"
|
||||
credential_type: CyberArk AIM Central Credential Provider Lookup
|
||||
credential_type: CyberArk Central Credential Provider Lookup
|
||||
inputs:
|
||||
url: "https://cyberark.example.com"
|
||||
app_id: "My-App-ID"
|
||||
@@ -58,7 +58,7 @@
|
||||
credential:
|
||||
description: Credential for Testing Source Change
|
||||
name: "{{ src_cred_name }}-2"
|
||||
credential_type: CyberArk AIM Central Credential Provider Lookup
|
||||
credential_type: CyberArk Central Credential Provider Lookup
|
||||
inputs:
|
||||
url: "https://cyberark-prod.example.com"
|
||||
app_id: "My-App-ID"
|
||||
@@ -92,7 +92,7 @@
|
||||
credential:
|
||||
name: "{{ src_cred_name }}"
|
||||
organization: Default
|
||||
credential_type: CyberArk AIM Central Credential Provider Lookup
|
||||
credential_type: CyberArk Central Credential Provider Lookup
|
||||
state: absent
|
||||
register: result
|
||||
|
||||
@@ -100,7 +100,7 @@
|
||||
credential:
|
||||
name: "{{ src_cred_name }}-2"
|
||||
organization: Default
|
||||
credential_type: CyberArk AIM Central Credential Provider Lookup
|
||||
credential_type: CyberArk Central Credential Provider Lookup
|
||||
state: absent
|
||||
register: result
|
||||
|
||||
|
||||
@@ -245,6 +245,7 @@
|
||||
ask_inventory_on_launch: true
|
||||
ask_scm_branch_on_launch: true
|
||||
ask_limit_on_launch: true
|
||||
ask_tags_on_launch: true
|
||||
ask_variables_on_launch: true
|
||||
register: result
|
||||
|
||||
@@ -263,6 +264,7 @@
|
||||
ask_inventory_on_launch: true
|
||||
ask_scm_branch_on_launch: true
|
||||
ask_limit_on_launch: true
|
||||
ask_tags_on_launch: true
|
||||
ask_variables_on_launch: true
|
||||
register: bad_label_results
|
||||
ignore_errors: true
|
||||
@@ -278,6 +280,7 @@
|
||||
ask_inventory_on_launch: false
|
||||
ask_scm_branch_on_launch: false
|
||||
ask_limit_on_launch: false
|
||||
ask_tags_on_launch: false
|
||||
ask_variables_on_launch: false
|
||||
state: present
|
||||
|
||||
|
||||
88
awx_collection/tests/sanity/ignore-2.13.txt
Normal file
88
awx_collection/tests/sanity/ignore-2.13.txt
Normal file
@@ -0,0 +1,88 @@
|
||||
plugins/module_utils/awxkit.py import-3.9
|
||||
plugins/module_utils/controller_api.py import-3.9
|
||||
plugins/modules/ad_hoc_command.py import-3.9
|
||||
plugins/modules/ad_hoc_command_cancel.py import-3.9
|
||||
plugins/modules/ad_hoc_command_wait.py import-3.9
|
||||
plugins/modules/application.py import-3.9
|
||||
plugins/modules/controller_meta.py import-3.9
|
||||
plugins/modules/credential.py import-3.92
|
||||
plugins/modules/credential_input_source.py import-3.9
|
||||
plugins/modules/credential_type.py import-3.9
|
||||
plugins/modules/execution_environment.py import-3.9
|
||||
plugins/modules/export.py import-3.9
|
||||
plugins/modules/group.py import-3.9
|
||||
plugins/modules/host.py import-3.9
|
||||
plugins/modules/import.py import-3.9
|
||||
plugins/modules/instance.py import-3.9
|
||||
plugins/modules/instance_group.py import-3.9
|
||||
plugins/modules/inventory.py import-3.9
|
||||
plugins/modules/inventory_source.py import-3.9
|
||||
plugins/modules/inventory_source_update.py import-3.9
|
||||
plugins/modules/job_cancel.py import-3.9
|
||||
plugins/modules/job_launch.py import-3.9
|
||||
plugins/modules/job_list.py import-3.9
|
||||
plugins/modules/job_template.py import-3.93
|
||||
plugins/modules/job_wait.py import-3.9
|
||||
plugins/modules/label.py import-3.9
|
||||
plugins/modules/license.py import-3.9
|
||||
plugins/modules/notification_template.py import-3.9
|
||||
plugins/modules/organization.py import-3.9
|
||||
plugins/modules/project.py import-3.92
|
||||
plugins/modules/project_update.py import-3.9
|
||||
plugins/modules/role.py import-3.9
|
||||
plugins/modules/schedule.py import-3.9
|
||||
plugins/modules/settings.py import-3.9
|
||||
plugins/modules/subscriptions.py import-3.9
|
||||
plugins/modules/team.py import-3.9
|
||||
plugins/modules/token.py import-3.9
|
||||
plugins/modules/user.py import-3.9
|
||||
plugins/modules/workflow_approval.py import-3.9
|
||||
plugins/modules/workflow_job_template.py import-3.9
|
||||
plugins/modules/workflow_job_template_node.py import-3.9
|
||||
plugins/modules/workflow_launch.py import-3.9
|
||||
plugins/modules/workflow_node_wait.py import-3.9
|
||||
plugins/inventory/controller.py import-3.10
|
||||
plugins/lookup/controller_api.py import-3.10
|
||||
plugins/module_utils/awxkit.py import-3.10
|
||||
plugins/module_utils/controller_api.py import-3.10
|
||||
plugins/modules/ad_hoc_command.py import-3.10
|
||||
plugins/modules/ad_hoc_command_cancel.py import-3.10
|
||||
plugins/modules/ad_hoc_command_wait.py import-3.10
|
||||
plugins/modules/application.py import-3.10
|
||||
plugins/modules/controller_meta.py import-3.10
|
||||
plugins/modules/credential.py import-3.10
|
||||
plugins/modules/credential_input_source.py import-3.10
|
||||
plugins/modules/credential_type.py import-3.10
|
||||
plugins/modules/execution_environment.py import-3.10
|
||||
plugins/modules/export.py import-3.10
|
||||
plugins/modules/group.py import-3.10
|
||||
plugins/modules/host.py import-3.10
|
||||
plugins/modules/import.py import-3.10
|
||||
plugins/modules/instance.py import-3.10
|
||||
plugins/modules/instance_group.py import-3.10
|
||||
plugins/modules/inventory.py import-3.10
|
||||
plugins/modules/inventory_source.py import-3.10
|
||||
plugins/modules/inventory_source_update.py import-3.10
|
||||
plugins/modules/job_cancel.py import-3.10
|
||||
plugins/modules/job_launch.py import-3.10
|
||||
plugins/modules/job_list.py import-3.10
|
||||
plugins/modules/job_template.py import-3.10
|
||||
plugins/modules/job_wait.py import-3.10
|
||||
plugins/modules/label.py import-3.10
|
||||
plugins/modules/license.py import-3.10
|
||||
plugins/modules/notification_template.py import-3.10
|
||||
plugins/modules/organization.py import-3.10
|
||||
plugins/modules/project.py import-3.10
|
||||
plugins/modules/project_update.py import-3.10
|
||||
plugins/modules/role.py import-3.10
|
||||
plugins/modules/schedule.py import-3.10
|
||||
plugins/modules/settings.py import-3.10
|
||||
plugins/modules/subscriptions.py import-3.10
|
||||
plugins/modules/team.py import-3.10
|
||||
plugins/modules/token.py import-3.10
|
||||
plugins/modules/user.py import-3.10
|
||||
plugins/modules/workflow_approval.py import-3.10
|
||||
plugins/modules/workflow_job_template.py import-3.10
|
||||
plugins/modules/workflow_job_template_node.py import-3.10
|
||||
plugins/modules/workflow_launch.py import-3.10
|
||||
plugins/modules/workflow_node_wait.py import-3.10
|
||||
@@ -1,4 +1,11 @@
|
||||
---
|
||||
- name: Sanity assertions, that some variables have a non-blank value
|
||||
assert:
|
||||
that:
|
||||
- collection_version
|
||||
- collection_package
|
||||
- collection_path
|
||||
|
||||
- name: Set the collection version in the controller_api.py file
|
||||
replace:
|
||||
path: "{{ collection_path }}/plugins/module_utils/controller_api.py"
|
||||
|
||||
@@ -9,7 +9,6 @@ skip_missing_interpreters = true
|
||||
|
||||
[testenv]
|
||||
basepython = python3.9
|
||||
passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:.
|
||||
deps =
|
||||
|
||||
22
licenses/aioredis.txt
Normal file
22
licenses/aioredis.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2017 Alexey Popravka
|
||||
Copyright (c) 2021 Sean Stewart
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
29
licenses/hiredis.txt
Normal file
29
licenses/hiredis.txt
Normal file
@@ -0,0 +1,29 @@
|
||||
Copyright (c) 2009-2011, Salvatore Sanfilippo <antirez at gmail dot com>
|
||||
Copyright (c) 2010-2011, Pieter Noordhuis <pcnoordhuis at gmail dot com>
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of Redis nor the names of its contributors may be used
|
||||
to endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -148,6 +148,15 @@ in the top-level Makefile.
|
||||
|
||||
If modifying this library make sure testing with the offline build is performed to confirm it is functionally working.
|
||||
|
||||
### channels-redis
|
||||
|
||||
Due to an upstream bug (linked below), we see `RuntimeError: Event loop is closed` errors with newer versions of `channels-redis`.
|
||||
Upstream is aware of the bug and it is likely to be fixed in the next release according to the issue linked below.
|
||||
For now, we pin to the old version, 3.4.1
|
||||
|
||||
* https://github.com/django/channels_redis/issues/332
|
||||
* https://github.com/ansible/awx/issues/13313
|
||||
|
||||
## Library Notes
|
||||
|
||||
### pexpect
|
||||
|
||||
@@ -4,7 +4,7 @@ asciichartpy
|
||||
asn1
|
||||
azure-keyvault==1.1.0 # see UPGRADE BLOCKERs
|
||||
channels
|
||||
channels-redis
|
||||
channels-redis==3.4.1 # see UPGRADE BLOCKERs
|
||||
cryptography
|
||||
Cython<3 # Since the bump to PyYAML 5.4.1 this is now a mandatory dep
|
||||
daphne
|
||||
@@ -38,7 +38,6 @@ psycopg2
|
||||
psutil
|
||||
pygerduty
|
||||
pyparsing==2.4.6 # Upgrading to v3 of pyparsing introduce errors on smart host filtering: Expected 'or' term, found 'or' (at char 15), (line:1, col:16)
|
||||
python3-saml==1.13.0
|
||||
python-dsv-sdk
|
||||
python-tss-sdk==1.0.0
|
||||
python-ldap
|
||||
@@ -59,10 +58,8 @@ wheel
|
||||
pip==21.2.4 # see UPGRADE BLOCKERs
|
||||
setuptools # see UPGRADE BLOCKERs
|
||||
setuptools_scm[toml] # see UPGRADE BLOCKERs, xmlsec build dep
|
||||
xmlsec==1.3.12 # xmlsec 1.3.13 removed the ability to use lxml 4.7.0 but python3-saml requires lxml 4.7.0 so we need to pin xmlsec
|
||||
lxml>=3.8 # xmlsec build dep
|
||||
pkgconfig>=1.5.1 # xmlsec build dep
|
||||
setuptools-rust >= 0.11.4 # cryptography build dep
|
||||
pkgconfig>=1.5.1 # xmlsec build dep - needed for offline build
|
||||
|
||||
# Temporarily added to use ansible-runner from git branch, to be removed
|
||||
# when ansible-runner moves from requirements_git.txt to here
|
||||
|
||||
@@ -2,6 +2,8 @@ adal==1.2.7
|
||||
# via msrestazure
|
||||
aiohttp==3.8.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
aioredis==1.3.1
|
||||
# via channels-redis
|
||||
aiosignal==1.3.1
|
||||
# via aiohttp
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
@@ -20,6 +22,7 @@ asn1==2.6.0
|
||||
async-timeout==4.0.2
|
||||
# via
|
||||
# aiohttp
|
||||
# aioredis
|
||||
# redis
|
||||
attrs==22.1.0
|
||||
# via
|
||||
@@ -51,11 +54,11 @@ cachetools==5.2.0
|
||||
# requests
|
||||
cffi==1.15.1
|
||||
# via cryptography
|
||||
channels==4.0.0
|
||||
channels==3.0.5
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
channels-redis==4.0.0
|
||||
channels-redis==3.4.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
charset-normalizer==2.1.1
|
||||
# via
|
||||
@@ -76,8 +79,10 @@ cryptography==38.0.4
|
||||
# social-auth-core
|
||||
cython==0.29.32
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
daphne==4.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
daphne==3.0.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels
|
||||
dataclasses==0.6
|
||||
# via
|
||||
# python-dsv-sdk
|
||||
@@ -153,6 +158,8 @@ gitpython==3.1.29
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
google-auth==2.14.1
|
||||
# via kubernetes
|
||||
hiredis==2.1.0
|
||||
# via aioredis
|
||||
hyperlink==21.0.0
|
||||
# via
|
||||
# autobahn
|
||||
@@ -198,15 +205,14 @@ jinja2==3.1.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
json-log-formatter==0.5.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema==4.17.1
|
||||
jsonschema==4.17.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
kubernetes==25.3.0
|
||||
# via openshift
|
||||
lockfile==0.12.2
|
||||
# via python-daemon
|
||||
lxml==4.7.0
|
||||
lxml==4.9.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# python3-saml
|
||||
# xmlsec
|
||||
markdown==3.4.1
|
||||
@@ -315,8 +321,7 @@ python-tss-sdk==1.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
python3-openid==3.2.0
|
||||
# via social-auth-core
|
||||
python3-saml==1.13.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
pytz==2022.6
|
||||
# via
|
||||
# django
|
||||
@@ -336,7 +341,6 @@ receptorctl==1.2.3
|
||||
redis==4.3.5
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
# django-redis
|
||||
requests==2.28.1
|
||||
# via
|
||||
@@ -435,10 +439,8 @@ websocket-client==1.4.2
|
||||
# via kubernetes
|
||||
wheel==0.38.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
xmlsec==1.3.12
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# python3-saml
|
||||
xmlsec==1.3.13
|
||||
# via python3-saml
|
||||
yarl==1.8.1
|
||||
# via aiohttp
|
||||
zipp==3.11.0
|
||||
|
||||
@@ -4,3 +4,4 @@ git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner
|
||||
# django-radius has an aggressive pin of future==0.16.0, see https://github.com/robgolding/django-radius/pull/25
|
||||
git+https://github.com/ansible/django-radius.git@develop#egg=django-radius
|
||||
git+https://github.com/PythonCharmers/python-future@master#egg=future
|
||||
git+https://github.com/ansible/python3-saml.git@devel#egg=python3-saml
|
||||
|
||||
Reference in New Issue
Block a user