mirror of
https://github.com/ansible/awx.git
synced 2026-02-09 05:24:42 -03:30
Compare commits
49 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea9c52aca6 | ||
|
|
a7ebce1fef | ||
|
|
5de9cf748d | ||
|
|
ebea78943d | ||
|
|
1e33bc4020 | ||
|
|
e9ad01e806 | ||
|
|
8a4059d266 | ||
|
|
01a7076267 | ||
|
|
32b6aec66b | ||
|
|
884ab424d5 | ||
|
|
7e55305c45 | ||
|
|
e9a1582b70 | ||
|
|
51ef1e808d | ||
|
|
11fbfc2063 | ||
|
|
f6395c69dd | ||
|
|
ca07bc85cb | ||
|
|
b87dd6dc56 | ||
|
|
f8d46d5e71 | ||
|
|
ce0a456ecc | ||
|
|
5775ff1422 | ||
|
|
82e8bcd2bb | ||
|
|
d73cc501d5 | ||
|
|
7e40a4daed | ||
|
|
47e824dd11 | ||
|
|
4643b816fe | ||
|
|
79d9329cfa | ||
|
|
6492c03965 | ||
|
|
98107301a5 | ||
|
|
4810099158 | ||
|
|
1aca9929ab | ||
|
|
2aa58bc17d | ||
|
|
b99a434dee | ||
|
|
6cee99a9f9 | ||
|
|
ee509aea56 | ||
|
|
b5452a48f8 | ||
|
|
68e555824d | ||
|
|
0c980fa7d5 | ||
|
|
e34ce8c795 | ||
|
|
58bad6cfa9 | ||
|
|
3543644e0e | ||
|
|
36c0d07b30 | ||
|
|
03b0281fde | ||
|
|
6f6f04a071 | ||
|
|
239827a9cf | ||
|
|
ac9871b36f | ||
|
|
f739908ccf | ||
|
|
cf1ec07eab | ||
|
|
d968b648de | ||
|
|
d34f6af830 |
22
.github/workflows/ci.yml
vendored
22
.github/workflows/ci.yml
vendored
@@ -28,9 +28,6 @@ jobs:
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
label: Run Collection Tests
|
||||
- name: awx-collection-sanity
|
||||
command: /start_tests.sh test_collection_sanity
|
||||
label: Run Ansible core Collection Sanity tests
|
||||
- name: api-schema
|
||||
label: Check API Schema
|
||||
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||
@@ -148,3 +145,22 @@ jobs:
|
||||
env:
|
||||
AWX_TEST_IMAGE: awx
|
||||
AWX_TEST_VERSION: ci
|
||||
|
||||
collection-sanity:
|
||||
name: awx_collection sanity
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Run sanity tests
|
||||
run: make test_collection_sanity
|
||||
env:
|
||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||
# with the fix has not been made yet.
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
|
||||
4
.github/workflows/e2e_test.yml
vendored
4
.github/workflows/e2e_test.yml
vendored
@@ -6,7 +6,7 @@ env:
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
jobs:
|
||||
e2e-test:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||
runs-on: ubuntu-latest
|
||||
@@ -107,5 +107,3 @@ jobs:
|
||||
with:
|
||||
name: AWX-logs-${{ matrix.job }}
|
||||
path: make-docker-compose-output.log
|
||||
|
||||
|
||||
|
||||
15
.github/workflows/promote.yml
vendored
15
.github/workflows/promote.yml
vendored
@@ -38,9 +38,13 @@ jobs:
|
||||
- name: Build collection and publish to galaxy
|
||||
run: |
|
||||
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz
|
||||
if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
||||
echo "Galaxy release already done"; \
|
||||
else \
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
||||
fi
|
||||
|
||||
- name: Set official pypi info
|
||||
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
||||
@@ -52,6 +56,7 @@ jobs:
|
||||
|
||||
- name: Build awxkit and upload to pypi
|
||||
run: |
|
||||
git reset --hard
|
||||
cd awxkit && python3 setup.py bdist_wheel
|
||||
twine upload \
|
||||
-r ${{ env.pypi_repo }} \
|
||||
@@ -74,4 +79,6 @@ jobs:
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository }}:latest
|
||||
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
|
||||
15
.github/workflows/stage.yml
vendored
15
.github/workflows/stage.yml
vendored
@@ -84,6 +84,20 @@ jobs:
|
||||
-e push=yes \
|
||||
-e awx_official=yes
|
||||
|
||||
- name: Log in to GHCR
|
||||
run: |
|
||||
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Log in to Quay
|
||||
run: |
|
||||
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
|
||||
|
||||
- name: tag awx-ee:latest with version input
|
||||
run: |
|
||||
docker pull quay.io/ansible/awx-ee:latest
|
||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Build and stage awx-operator
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
@@ -103,6 +117,7 @@ jobs:
|
||||
env:
|
||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Create draft release for AWX
|
||||
working-directory: awx
|
||||
|
||||
20
Makefile
20
Makefile
@@ -9,6 +9,17 @@ VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||
|
||||
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||
# args for the ansible-test sanity command
|
||||
COLLECTION_SANITY_ARGS ?= --docker
|
||||
# collection unit testing directories
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
# collection integration test directories (defaults to all)
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
# args for collection install
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||
COLLECTION_TEMPLATE_VERSION ?= false
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
@@ -290,13 +301,6 @@ test:
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||
COLLECTION_TEMPLATE_VERSION ?= false
|
||||
|
||||
test_collection:
|
||||
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -339,7 +343,7 @@ test_collection_sanity:
|
||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
COLLECTION_VERSION=1.0.0 make install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity --exclude=plugins/modules/export.py
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
||||
|
||||
@@ -96,6 +96,15 @@ register(
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
register(
|
||||
'ALLOW_METRICS_FOR_ANONYMOUS_USERS',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Allow anonymous users to poll metrics'),
|
||||
help_text=_('If true, anonymous users are allowed to poll metrics.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
|
||||
@@ -5,9 +5,11 @@
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
@@ -31,9 +33,14 @@ class MetricsView(APIView):
|
||||
|
||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS:
|
||||
self.permission_classes = (AllowAny,)
|
||||
return super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
|
||||
def get(self, request):
|
||||
'''Show Metrics Details'''
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor:
|
||||
metrics_to_show = ''
|
||||
if not request.query_params.get('subsystemonly', "0") == "1":
|
||||
metrics_to_show += metrics().decode('UTF-8')
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import copy
|
||||
import os
|
||||
import pathlib
|
||||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
@@ -247,7 +248,15 @@ def kv_backend(**kwargs):
|
||||
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
response = sess.get(request_url, **request_kwargs)
|
||||
request_retries = 0
|
||||
while request_retries < 5:
|
||||
response = sess.get(request_url, **request_kwargs)
|
||||
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||
if response.status_code == 412:
|
||||
request_retries += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
raise_for_status(response)
|
||||
|
||||
json = response.json()
|
||||
@@ -289,8 +298,15 @@ def ssh_backend(**kwargs):
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
|
||||
request_retries = 0
|
||||
while request_retries < 5:
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||
if resp.status_code == 412:
|
||||
request_retries += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
raise_for_status(resp)
|
||||
return resp.json()['data']['signed_key']
|
||||
|
||||
|
||||
@@ -3,14 +3,12 @@ import logging
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
import traceback
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.db import DatabaseError, OperationalError, transaction, connection as django_connection
|
||||
from django.db.utils import InterfaceError, InternalError
|
||||
from django.db import transaction, connection as django_connection
|
||||
from django_guid import set_guid
|
||||
|
||||
import psutil
|
||||
@@ -64,6 +62,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
"""
|
||||
|
||||
MAX_RETRIES = 2
|
||||
INDIVIDUAL_EVENT_RETRIES = 3
|
||||
last_stats = time.time()
|
||||
last_flush = time.time()
|
||||
total = 0
|
||||
@@ -164,38 +163,48 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
else: # only calculate the seconds if the created time already has been set
|
||||
metrics_total_job_event_processing_seconds += e.modified - e.created
|
||||
metrics_duration_to_save = time.perf_counter()
|
||||
saved_events = []
|
||||
try:
|
||||
cls.objects.bulk_create(events)
|
||||
metrics_bulk_events_saved += len(events)
|
||||
saved_events = events
|
||||
self.buff[cls] = []
|
||||
except Exception as exc:
|
||||
logger.warning(f'Error in events bulk_create, will try indiviually up to 5 errors, error {str(exc)}')
|
||||
# If the database is flaking, let ensure_connection throw a general exception
|
||||
# will be caught by the outer loop, which goes into a proper sleep and retry loop
|
||||
django_connection.ensure_connection()
|
||||
logger.warning(f'Error in events bulk_create, will try indiviually, error: {str(exc)}')
|
||||
# if an exception occurs, we should re-attempt to save the
|
||||
# events one-by-one, because something in the list is
|
||||
# broken/stale
|
||||
consecutive_errors = 0
|
||||
events_saved = 0
|
||||
metrics_events_batch_save_errors += 1
|
||||
for e in events:
|
||||
for e in events.copy():
|
||||
try:
|
||||
e.save()
|
||||
events_saved += 1
|
||||
consecutive_errors = 0
|
||||
metrics_singular_events_saved += 1
|
||||
events.remove(e)
|
||||
saved_events.append(e) # Importantly, remove successfully saved events from the buffer
|
||||
except Exception as exc_indv:
|
||||
consecutive_errors += 1
|
||||
logger.info(f'Database Error Saving individual Job Event, error {str(exc_indv)}')
|
||||
if consecutive_errors >= 5:
|
||||
raise
|
||||
metrics_singular_events_saved += events_saved
|
||||
if events_saved == 0:
|
||||
raise
|
||||
retry_count = getattr(e, '_retry_count', 0) + 1
|
||||
e._retry_count = retry_count
|
||||
|
||||
# special sanitization logic for postgres treatment of NUL 0x00 char
|
||||
if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout):
|
||||
e.stdout = e.stdout.replace("\x00", "")
|
||||
|
||||
if retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
|
||||
logger.error(f'Hit max retries ({retry_count}) saving individual Event error: {str(exc_indv)}\ndata:\n{e.__dict__}')
|
||||
events.remove(e)
|
||||
else:
|
||||
logger.info(f'Database Error Saving individual Event uuid={e.uuid} try={retry_count}, error: {str(exc_indv)}')
|
||||
|
||||
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
|
||||
for e in events:
|
||||
for e in saved_events:
|
||||
if not getattr(e, '_skip_websocket_message', False):
|
||||
metrics_events_broadcast += 1
|
||||
emit_event_detail(e)
|
||||
if getattr(e, '_notification_trigger_event', False):
|
||||
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
|
||||
self.buff = {}
|
||||
self.last_flush = time.time()
|
||||
# only update metrics if we saved events
|
||||
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
|
||||
@@ -267,20 +276,16 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
try:
|
||||
self.flush(force=flush)
|
||||
break
|
||||
except (OperationalError, InterfaceError, InternalError) as exc:
|
||||
except Exception as exc:
|
||||
# Aside form bugs, exceptions here are assumed to be due to database flake
|
||||
if retries >= self.MAX_RETRIES:
|
||||
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
|
||||
self.buff = {}
|
||||
return
|
||||
delay = 60 * retries
|
||||
logger.warning(f'Database Error Flushing Job Events, retry #{retries + 1} in {delay} seconds: {str(exc)}')
|
||||
django_connection.close()
|
||||
time.sleep(delay)
|
||||
retries += 1
|
||||
except DatabaseError:
|
||||
logger.exception('Database Error Flushing Job Events')
|
||||
django_connection.close()
|
||||
break
|
||||
except Exception as exc:
|
||||
tb = traceback.format_exc()
|
||||
logger.error('Callback Task Processor Raised Exception: %r', exc)
|
||||
logger.error('Detail: {}'.format(tb))
|
||||
except Exception:
|
||||
logger.exception(f'Callback Task Processor Raised Unexpected Exception processing event data:\n{body}')
|
||||
|
||||
@@ -390,6 +390,7 @@ class BaseTask(object):
|
||||
logger.error("I/O error({0}) while trying to open lock file [{1}]: {2}".format(e.errno, lock_path, e.strerror))
|
||||
raise
|
||||
|
||||
emitted_lockfile_log = False
|
||||
start_time = time.time()
|
||||
while True:
|
||||
try:
|
||||
@@ -401,6 +402,9 @@ class BaseTask(object):
|
||||
logger.error("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(e.errno, lock_path, e.strerror))
|
||||
raise
|
||||
else:
|
||||
if not emitted_lockfile_log:
|
||||
logger.info(f"exception acquiring lock {lock_path}: {e}")
|
||||
emitted_lockfile_log = True
|
||||
time.sleep(1.0)
|
||||
self.instance.refresh_from_db(fields=['cancel_flag'])
|
||||
if self.instance.cancel_flag or signal_callback():
|
||||
|
||||
@@ -411,9 +411,11 @@ class AWXReceptorJob:
|
||||
unit_status = receptor_ctl.simple_command(f'work status {self.unit_id}')
|
||||
detail = unit_status.get('Detail', None)
|
||||
state_name = unit_status.get('StateName', None)
|
||||
stdout_size = unit_status.get('StdoutSize', 0)
|
||||
except Exception:
|
||||
detail = ''
|
||||
state_name = ''
|
||||
stdout_size = 0
|
||||
logger.exception(f'An error was encountered while getting status for work unit {self.unit_id}')
|
||||
|
||||
if 'exceeded quota' in detail:
|
||||
@@ -424,9 +426,16 @@ class AWXReceptorJob:
|
||||
return
|
||||
|
||||
try:
|
||||
resultsock = receptor_ctl.get_work_results(self.unit_id, return_sockfile=True)
|
||||
lines = resultsock.readlines()
|
||||
receptor_output = b"".join(lines).decode()
|
||||
receptor_output = ''
|
||||
if state_name == 'Failed' and self.task.runner_callback.event_ct == 0:
|
||||
# if receptor work unit failed and no events were emitted, work results may
|
||||
# contain useful information about why the job failed. In case stdout is
|
||||
# massive, only ask for last 1000 bytes
|
||||
startpos = max(stdout_size - 1000, 0)
|
||||
resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id, startpos=startpos, return_socket=True, return_sockfile=True)
|
||||
resultsock.setblocking(False) # this makes resultfile reads non blocking
|
||||
lines = resultfile.readlines()
|
||||
receptor_output = b"".join(lines).decode()
|
||||
if receptor_output:
|
||||
self.task.runner_callback.delay_update(result_traceback=receptor_output)
|
||||
elif detail:
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import pytest
|
||||
import time
|
||||
from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from awx.main.dispatch.worker.callback import job_stats_wrapup, CallbackBrokerWorker
|
||||
|
||||
from awx.main.dispatch.worker.callback import job_stats_wrapup
|
||||
from awx.main.models.jobs import Job
|
||||
from awx.main.models.inventory import InventoryUpdate, InventorySource
|
||||
from awx.main.models.events import InventoryUpdateEvent
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -24,3 +32,108 @@ def test_wrapup_does_send_notifications(mocker):
|
||||
job.refresh_from_db()
|
||||
assert job.host_status_counts == {}
|
||||
mock.assert_called_once_with('succeeded')
|
||||
|
||||
|
||||
class FakeRedis:
|
||||
def keys(self, *args, **kwargs):
|
||||
return []
|
||||
|
||||
def set(self):
|
||||
pass
|
||||
|
||||
def get(self):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def from_url(cls, *args, **kwargs):
|
||||
return cls()
|
||||
|
||||
def pipeline(self):
|
||||
return self
|
||||
|
||||
|
||||
class TestCallbackBrokerWorker(TransactionTestCase):
|
||||
@pytest.fixture(autouse=True)
|
||||
def turn_off_websockets(self):
|
||||
with mock.patch('awx.main.dispatch.worker.callback.emit_event_detail', lambda *a, **kw: None):
|
||||
yield
|
||||
|
||||
def get_worker(self):
|
||||
with mock.patch('redis.Redis', new=FakeRedis): # turn off redis stuff
|
||||
return CallbackBrokerWorker()
|
||||
|
||||
def event_create_kwargs(self):
|
||||
inventory_update = InventoryUpdate.objects.create(source='file', inventory_source=InventorySource.objects.create(source='file'))
|
||||
return dict(inventory_update=inventory_update, created=inventory_update.created)
|
||||
|
||||
def test_flush_with_valid_event(self):
|
||||
worker = self.get_worker()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
||||
worker.buff = {InventoryUpdateEvent: events}
|
||||
worker.flush()
|
||||
assert worker.buff.get(InventoryUpdateEvent, []) == []
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1
|
||||
|
||||
def test_flush_with_invalid_event(self):
|
||||
worker = self.get_worker()
|
||||
kwargs = self.event_create_kwargs()
|
||||
events = [
|
||||
InventoryUpdateEvent(uuid=str(uuid4()), stdout='good1', **kwargs),
|
||||
InventoryUpdateEvent(uuid=str(uuid4()), stdout='bad', counter=-2, **kwargs),
|
||||
InventoryUpdateEvent(uuid=str(uuid4()), stdout='good2', **kwargs),
|
||||
]
|
||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||
worker.flush()
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[1].uuid).count() == 0
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[2].uuid).count() == 1
|
||||
assert worker.buff == {InventoryUpdateEvent: [events[1]]}
|
||||
|
||||
def test_duplicate_key_not_saved_twice(self):
|
||||
worker = self.get_worker()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), **self.event_create_kwargs())]
|
||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||
worker.flush()
|
||||
|
||||
# put current saved event in buffer (error case)
|
||||
worker.buff = {InventoryUpdateEvent: [InventoryUpdateEvent.objects.get(uuid=events[0].uuid)]}
|
||||
worker.last_flush = time.time() - 2.0
|
||||
# here, the bulk_create will fail with UNIQUE constraint violation, but individual saves should resolve it
|
||||
worker.flush()
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 1
|
||||
assert worker.buff.get(InventoryUpdateEvent, []) == []
|
||||
|
||||
def test_give_up_on_bad_event(self):
|
||||
worker = self.get_worker()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), counter=-2, **self.event_create_kwargs())]
|
||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||
|
||||
for i in range(5):
|
||||
worker.last_flush = time.time() - 2.0
|
||||
worker.flush()
|
||||
|
||||
# Could not save, should be logged, and buffer should be cleared
|
||||
assert worker.buff.get(InventoryUpdateEvent, []) == []
|
||||
assert InventoryUpdateEvent.objects.filter(uuid=events[0].uuid).count() == 0 # sanity
|
||||
|
||||
def test_postgres_invalid_NUL_char(self):
|
||||
# In postgres, text fields reject NUL character, 0x00
|
||||
# tests use sqlite3 which will not raise an error
|
||||
# but we can still test that it is sanitized before saving
|
||||
worker = self.get_worker()
|
||||
kwargs = self.event_create_kwargs()
|
||||
events = [InventoryUpdateEvent(uuid=str(uuid4()), stdout="\x00", **kwargs)]
|
||||
assert "\x00" in events[0].stdout # sanity
|
||||
worker.buff = {InventoryUpdateEvent: events.copy()}
|
||||
|
||||
with mock.patch.object(InventoryUpdateEvent.objects, 'bulk_create', side_effect=ValueError):
|
||||
with mock.patch.object(events[0], 'save', side_effect=ValueError):
|
||||
worker.flush()
|
||||
|
||||
assert "\x00" not in events[0].stdout
|
||||
|
||||
worker.last_flush = time.time() - 2.0
|
||||
worker.flush()
|
||||
|
||||
event = InventoryUpdateEvent.objects.get(uuid=events[0].uuid)
|
||||
assert "\x00" not in event.stdout
|
||||
|
||||
@@ -103,6 +103,10 @@ ColorHandler = logging.StreamHandler
|
||||
if settings.COLOR_LOGS is True:
|
||||
try:
|
||||
from logutils.colorize import ColorizingStreamHandler
|
||||
import colorama
|
||||
|
||||
colorama.deinit()
|
||||
colorama.init(wrap=False, convert=False, strip=False)
|
||||
|
||||
class ColorHandler(ColorizingStreamHandler):
|
||||
def colorize(self, line, record):
|
||||
|
||||
@@ -418,6 +418,9 @@ AUTH_BASIC_ENABLED = True
|
||||
# when trying to access a UI page that requries authentication.
|
||||
LOGIN_REDIRECT_OVERRIDE = ''
|
||||
|
||||
# Note: This setting may be overridden by database settings.
|
||||
ALLOW_METRICS_FOR_ANONYMOUS_USERS = False
|
||||
|
||||
DEVSERVER_DEFAULT_ADDR = '0.0.0.0'
|
||||
DEVSERVER_DEFAULT_PORT = '8013'
|
||||
|
||||
|
||||
@@ -452,7 +452,10 @@ def on_populate_user(sender, **kwargs):
|
||||
remove = bool(team_opts.get('remove', True))
|
||||
state = _update_m2m_from_groups(ldap_user, users_opts, remove)
|
||||
if state is not None:
|
||||
desired_team_states[team_name] = {'member_role': state}
|
||||
organization = team_opts['organization']
|
||||
if organization not in desired_team_states:
|
||||
desired_team_states[organization] = {}
|
||||
desired_team_states[organization][team_name] = {'member_role': state}
|
||||
|
||||
# Check if user.profile is available, otherwise force user.save()
|
||||
try:
|
||||
@@ -473,16 +476,28 @@ def on_populate_user(sender, **kwargs):
|
||||
|
||||
|
||||
def reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, source):
|
||||
#
|
||||
# Arguments:
|
||||
# user - a user object
|
||||
# desired_org_states: { '<org_name>': { '<role>': <boolean> or None } }
|
||||
# desired_team_states: { '<org_name>': { '<team name>': { '<role>': <boolean> or None } } }
|
||||
# source - a text label indicating the "authentication adapter" for debug messages
|
||||
#
|
||||
# This function will load the users existing roles and then based on the deisred states modify the users roles
|
||||
# True indicates the user needs to be a member of the role
|
||||
# False indicates the user should not be a member of the role
|
||||
# None means this function should not change the users membership of a role
|
||||
#
|
||||
from awx.main.models import Organization, Team
|
||||
|
||||
content_types = []
|
||||
reconcile_items = []
|
||||
if desired_org_states:
|
||||
content_types.append(ContentType.objects.get_for_model(Organization))
|
||||
reconcile_items.append(('organization', desired_org_states, Organization))
|
||||
reconcile_items.append(('organization', desired_org_states))
|
||||
if desired_team_states:
|
||||
content_types.append(ContentType.objects.get_for_model(Team))
|
||||
reconcile_items.append(('team', desired_team_states, Team))
|
||||
reconcile_items.append(('team', desired_team_states))
|
||||
|
||||
if not content_types:
|
||||
# If both desired states were empty we can simply return because there is nothing to reconcile
|
||||
@@ -491,24 +506,39 @@ def reconcile_users_org_team_mappings(user, desired_org_states, desired_team_sta
|
||||
# users_roles is a flat set of IDs
|
||||
users_roles = set(user.roles.filter(content_type__in=content_types).values_list('pk', flat=True))
|
||||
|
||||
for object_type, desired_states, model in reconcile_items:
|
||||
# Get all of the roles in the desired states for efficient DB extraction
|
||||
for object_type, desired_states in reconcile_items:
|
||||
roles = []
|
||||
for sub_dict in desired_states.values():
|
||||
for role_name in sub_dict:
|
||||
if sub_dict[role_name] is None:
|
||||
continue
|
||||
if role_name not in roles:
|
||||
roles.append(role_name)
|
||||
|
||||
# Get a set of named tuples for the org/team name plus all of the roles we got above
|
||||
model_roles = model.objects.filter(name__in=desired_states.keys()).values_list('name', *roles, named=True)
|
||||
if object_type == 'organization':
|
||||
for sub_dict in desired_states.values():
|
||||
for role_name in sub_dict:
|
||||
if sub_dict[role_name] is None:
|
||||
continue
|
||||
if role_name not in roles:
|
||||
roles.append(role_name)
|
||||
model_roles = Organization.objects.filter(name__in=desired_states.keys()).values_list('name', *roles, named=True)
|
||||
else:
|
||||
team_names = []
|
||||
for teams_dict in desired_states.values():
|
||||
team_names.extend(teams_dict.keys())
|
||||
for sub_dict in teams_dict.values():
|
||||
for role_name in sub_dict:
|
||||
if sub_dict[role_name] is None:
|
||||
continue
|
||||
if role_name not in roles:
|
||||
roles.append(role_name)
|
||||
model_roles = Team.objects.filter(name__in=team_names).values_list('name', 'organization__name', *roles, named=True)
|
||||
|
||||
for row in model_roles:
|
||||
for role_name in roles:
|
||||
desired_state = desired_states.get(row.name, {})
|
||||
if desired_state[role_name] is None:
|
||||
if object_type == 'organization':
|
||||
desired_state = desired_states.get(row.name, {})
|
||||
else:
|
||||
desired_state = desired_states.get(row.organization__name, {}).get(row.name, {})
|
||||
|
||||
if desired_state.get(role_name, None) is None:
|
||||
# The mapping was not defined for this [org/team]/role so we can just pass
|
||||
pass
|
||||
continue
|
||||
|
||||
# If somehow the auth adapter knows about an items role but that role is not defined in the DB we are going to print a pretty error
|
||||
# This is your classic safety net that we should never hit; but here you are reading this comment... good luck and Godspeed.
|
||||
|
||||
@@ -34,8 +34,14 @@ const QS_CONFIG = getQSConfig('template', {
|
||||
order_by: 'name',
|
||||
});
|
||||
|
||||
function RelatedTemplateList({ searchParams, projectName = null }) {
|
||||
const { id: projectId } = useParams();
|
||||
const resources = {
|
||||
projects: 'project',
|
||||
inventories: 'inventory',
|
||||
credentials: 'credentials',
|
||||
};
|
||||
|
||||
function RelatedTemplateList({ searchParams, resourceName = null }) {
|
||||
const { id } = useParams();
|
||||
const location = useLocation();
|
||||
const { addToast, Toast, toastProps } = useToast();
|
||||
|
||||
@@ -129,12 +135,19 @@ function RelatedTemplateList({ searchParams, projectName = null }) {
|
||||
actions && Object.prototype.hasOwnProperty.call(actions, 'POST');
|
||||
|
||||
let linkTo = '';
|
||||
|
||||
if (projectName) {
|
||||
const qs = encodeQueryString({
|
||||
project_id: projectId,
|
||||
project_name: projectName,
|
||||
});
|
||||
if (resourceName) {
|
||||
const queryString = {
|
||||
resource_id: id,
|
||||
resource_name: resourceName,
|
||||
resource_type: resources[location.pathname.split('/')[1]],
|
||||
resource_kind: null,
|
||||
};
|
||||
if (Array.isArray(resourceName)) {
|
||||
const [name, kind] = resourceName;
|
||||
queryString.resource_name = name;
|
||||
queryString.resource_kind = kind;
|
||||
}
|
||||
const qs = encodeQueryString(queryString);
|
||||
linkTo = `/templates/job_template/add/?${qs}`;
|
||||
} else {
|
||||
linkTo = '/templates/job_template/add';
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
/* eslint-disable import/prefer-default-export */
|
||||
@@ -420,7 +420,7 @@ describe('<AdvancedSearch />', () => {
|
||||
const selectOptions = wrapper.find(
|
||||
'Select[aria-label="Related search type"] SelectOption'
|
||||
);
|
||||
expect(selectOptions).toHaveLength(2);
|
||||
expect(selectOptions).toHaveLength(3);
|
||||
expect(
|
||||
selectOptions.find('SelectOption[id="name-option-select"]').prop('value')
|
||||
).toBe('name__icontains');
|
||||
|
||||
@@ -31,6 +31,12 @@ function RelatedLookupTypeInput({
|
||||
value="name__icontains"
|
||||
description={t`Fuzzy search on name field.`}
|
||||
/>
|
||||
<SelectOption
|
||||
id="name-exact-option-select"
|
||||
key="name"
|
||||
value="name"
|
||||
description={t`Exact search on name field.`}
|
||||
/>
|
||||
<SelectOption
|
||||
id="id-option-select"
|
||||
key="id"
|
||||
|
||||
@@ -22,6 +22,16 @@ import { CredentialsAPI } from 'api';
|
||||
import CredentialDetail from './CredentialDetail';
|
||||
import CredentialEdit from './CredentialEdit';
|
||||
|
||||
const jobTemplateCredentialTypes = [
|
||||
'machine',
|
||||
'cloud',
|
||||
'net',
|
||||
'ssh',
|
||||
'vault',
|
||||
'kubernetes',
|
||||
'cryptography',
|
||||
];
|
||||
|
||||
function Credential({ setBreadcrumb }) {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
@@ -75,13 +85,14 @@ function Credential({ setBreadcrumb }) {
|
||||
link: `/credentials/${id}/access`,
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
];
|
||||
if (jobTemplateCredentialTypes.includes(credential?.kind)) {
|
||||
tabsArray.push({
|
||||
name: t`Job Templates`,
|
||||
link: `/credentials/${id}/job_templates`,
|
||||
id: 2,
|
||||
},
|
||||
];
|
||||
|
||||
});
|
||||
}
|
||||
let showCardHeader = true;
|
||||
|
||||
if (pathname.endsWith('edit') || pathname.endsWith('add')) {
|
||||
@@ -133,6 +144,7 @@ function Credential({ setBreadcrumb }) {
|
||||
<Route key="job_templates" path="/credentials/:id/job_templates">
|
||||
<RelatedTemplateList
|
||||
searchParams={{ credentials__id: credential.id }}
|
||||
resourceName={[credential.name, credential.kind]}
|
||||
/>
|
||||
</Route>,
|
||||
<Route key="not-found" path="*">
|
||||
|
||||
@@ -6,7 +6,8 @@ import {
|
||||
mountWithContexts,
|
||||
waitForElement,
|
||||
} from '../../../testUtils/enzymeHelpers';
|
||||
import mockCredential from './shared/data.scmCredential.json';
|
||||
import mockMachineCredential from './shared/data.machineCredential.json';
|
||||
import mockSCMCredential from './shared/data.scmCredential.json';
|
||||
import Credential from './Credential';
|
||||
|
||||
jest.mock('../../api');
|
||||
@@ -21,13 +22,10 @@ jest.mock('react-router-dom', () => ({
|
||||
describe('<Credential />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
test('initially renders user-based machine credential successfully', async () => {
|
||||
CredentialsAPI.readDetail.mockResolvedValueOnce({
|
||||
data: mockCredential,
|
||||
data: mockMachineCredential,
|
||||
});
|
||||
});
|
||||
|
||||
test('initially renders user-based credential successfully', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<Credential setBreadcrumb={() => {}} />);
|
||||
});
|
||||
@@ -36,6 +34,18 @@ describe('<Credential />', () => {
|
||||
expect(wrapper.find('RoutedTabs li').length).toBe(4);
|
||||
});
|
||||
|
||||
test('initially renders user-based SCM credential successfully', async () => {
|
||||
CredentialsAPI.readDetail.mockResolvedValueOnce({
|
||||
data: mockSCMCredential,
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<Credential setBreadcrumb={() => {}} />);
|
||||
});
|
||||
wrapper.update();
|
||||
expect(wrapper.find('Credential').length).toBe(1);
|
||||
expect(wrapper.find('RoutedTabs li').length).toBe(3);
|
||||
});
|
||||
|
||||
test('should render expected tabs', async () => {
|
||||
const expectedTabs = [
|
||||
'Back to Credentials',
|
||||
|
||||
@@ -81,35 +81,30 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await InstanceGroupsAPI.readInstances(instanceGroup.id);
|
||||
let instanceDetails;
|
||||
const isAssociated = results.some(
|
||||
({ id: instId }) => instId === parseInt(instanceId, 10)
|
||||
);
|
||||
|
||||
if (isAssociated) {
|
||||
const [{ data: details }, { data: healthCheckData }] =
|
||||
await Promise.all([
|
||||
InstancesAPI.readDetail(instanceId),
|
||||
InstancesAPI.readHealthCheckDetail(instanceId),
|
||||
]);
|
||||
|
||||
instanceDetails = details;
|
||||
setHealthCheck(healthCheckData);
|
||||
} else {
|
||||
throw new Error(
|
||||
`This instance is not associated with this instance group`
|
||||
const { data: details } = await InstancesAPI.readDetail(instanceId);
|
||||
if (details.node_type === 'execution') {
|
||||
const { data: healthCheckData } =
|
||||
await InstancesAPI.readHealthCheckDetail(instanceId);
|
||||
setHealthCheck(healthCheckData);
|
||||
}
|
||||
setBreadcrumb(instanceGroup, details);
|
||||
setForks(
|
||||
computeForks(
|
||||
details.mem_capacity,
|
||||
details.cpu_capacity,
|
||||
details.capacity_adjustment
|
||||
)
|
||||
);
|
||||
return { instance: details };
|
||||
}
|
||||
|
||||
setBreadcrumb(instanceGroup, instanceDetails);
|
||||
setForks(
|
||||
computeForks(
|
||||
instanceDetails.mem_capacity,
|
||||
instanceDetails.cpu_capacity,
|
||||
instanceDetails.capacity_adjustment
|
||||
)
|
||||
throw new Error(
|
||||
`This instance is not associated with this instance group`
|
||||
);
|
||||
return { instance: instanceDetails };
|
||||
}, [instanceId, setBreadcrumb, instanceGroup]),
|
||||
{ instance: {}, isLoading: true }
|
||||
);
|
||||
|
||||
@@ -181,6 +181,7 @@ function Inventory({ setBreadcrumb }) {
|
||||
>
|
||||
<RelatedTemplateList
|
||||
searchParams={{ inventory__id: inventory.id }}
|
||||
resourceName={inventory.name}
|
||||
/>
|
||||
</Route>,
|
||||
<Route path="*" key="not-found">
|
||||
|
||||
@@ -41,7 +41,7 @@ function JobEvent({
|
||||
if (lineNumber < 0) {
|
||||
return null;
|
||||
}
|
||||
const canToggle = index === toggleLineIndex;
|
||||
const canToggle = index === toggleLineIndex && !event.isTracebackOnly;
|
||||
return (
|
||||
<JobEventLine
|
||||
onClick={isClickable ? onJobEventClick : undefined}
|
||||
@@ -55,7 +55,7 @@ function JobEvent({
|
||||
onToggle={onToggleCollapsed}
|
||||
/>
|
||||
<JobEventLineNumber>
|
||||
{lineNumber}
|
||||
{!event.isTracebackOnly ? lineNumber : ''}
|
||||
<JobEventEllipsis isCollapsed={isCollapsed && canToggle} />
|
||||
</JobEventLineNumber>
|
||||
<JobEventLineText
|
||||
|
||||
@@ -187,7 +187,9 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
useEffect(() => {
|
||||
const pendingRequests = Object.values(eventByUuidRequests.current || {});
|
||||
setHasContentLoading(true); // prevents "no content found" screen from flashing
|
||||
setIsFollowModeEnabled(false);
|
||||
if (location.search) {
|
||||
setIsFollowModeEnabled(false);
|
||||
}
|
||||
Promise.allSettled(pendingRequests).then(() => {
|
||||
setRemoteRowCount(0);
|
||||
clearLoadedEvents();
|
||||
@@ -251,6 +253,9 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
});
|
||||
const updated = oldWsEvents.concat(newEvents);
|
||||
jobSocketCounter.current = updated.length;
|
||||
if (!oldWsEvents.length && min > remoteRowCount + 1) {
|
||||
loadJobEvents(min);
|
||||
}
|
||||
return updated.sort((a, b) => a.counter - b.counter);
|
||||
});
|
||||
setCssMap((prevCssMap) => ({
|
||||
@@ -358,7 +363,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
}
|
||||
};
|
||||
|
||||
const loadJobEvents = async () => {
|
||||
const loadJobEvents = async (firstWsCounter = null) => {
|
||||
const [params, loadRange] = getEventRequestParams(job, 50, [1, 50]);
|
||||
|
||||
if (isMounted.current) {
|
||||
@@ -371,6 +376,9 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
if (isFlatMode) {
|
||||
params.not__stdout = '';
|
||||
}
|
||||
if (firstWsCounter) {
|
||||
params.counter__lt = firstWsCounter;
|
||||
}
|
||||
const qsParams = parseQueryString(QS_CONFIG, location.search);
|
||||
const eventPromise = getJobModel(job.type).readEvents(job.id, {
|
||||
...params,
|
||||
@@ -435,7 +443,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
if (getEvent(counter)) {
|
||||
return true;
|
||||
}
|
||||
if (index > remoteRowCount && index < remoteRowCount + wsEvents.length) {
|
||||
if (index >= remoteRowCount && index < remoteRowCount + wsEvents.length) {
|
||||
return true;
|
||||
}
|
||||
return currentlyLoading.includes(counter);
|
||||
@@ -462,7 +470,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
}
|
||||
if (
|
||||
!event &&
|
||||
index > remoteRowCount &&
|
||||
index >= remoteRowCount &&
|
||||
index < remoteRowCount + wsEvents.length
|
||||
) {
|
||||
event = wsEvents[index - remoteRowCount];
|
||||
@@ -629,10 +637,14 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
|
||||
setIsFollowModeEnabled(false);
|
||||
};
|
||||
|
||||
const scrollToEnd = () => {
|
||||
const scrollToEnd = useCallback(() => {
|
||||
scrollToRow(-1);
|
||||
setTimeout(() => scrollToRow(-1), 100);
|
||||
};
|
||||
let timeout;
|
||||
if (isFollowModeEnabled) {
|
||||
setTimeout(() => scrollToRow(-1), 100);
|
||||
}
|
||||
return () => clearTimeout(timeout);
|
||||
}, [isFollowModeEnabled]);
|
||||
|
||||
const handleScrollLast = () => {
|
||||
scrollToEnd();
|
||||
|
||||
@@ -29,8 +29,11 @@ export function prependTraceback(job, events) {
|
||||
start_line: 0,
|
||||
};
|
||||
const firstIndex = events.findIndex((jobEvent) => jobEvent.counter === 1);
|
||||
if (firstIndex && events[firstIndex]?.stdout) {
|
||||
const stdoutLines = events[firstIndex].stdout.split('\r\n');
|
||||
if (firstIndex > -1) {
|
||||
if (!events[firstIndex].stdout) {
|
||||
events[firstIndex].isTracebackOnly = true;
|
||||
}
|
||||
const stdoutLines = events[firstIndex].stdout?.split('\r\n') || [];
|
||||
stdoutLines[0] = tracebackEvent.stdout;
|
||||
events[firstIndex].stdout = stdoutLines.join('\r\n');
|
||||
} else {
|
||||
|
||||
@@ -179,7 +179,7 @@ function Project({ setBreadcrumb }) {
|
||||
searchParams={{
|
||||
project__id: project.id,
|
||||
}}
|
||||
projectName={project.name}
|
||||
resourceName={project.name}
|
||||
/>
|
||||
</Route>
|
||||
{project?.scm_type && project.scm_type !== '' && (
|
||||
|
||||
@@ -141,14 +141,14 @@ function JobsEdit() {
|
||||
<FormColumnLayout>
|
||||
<InputField
|
||||
name="AWX_ISOLATION_BASE_PATH"
|
||||
config={jobs.AWX_ISOLATION_BASE_PATH}
|
||||
isRequired
|
||||
config={jobs.AWX_ISOLATION_BASE_PATH ?? null}
|
||||
isRequired={Boolean(options?.AWX_ISOLATION_BASE_PATH)}
|
||||
/>
|
||||
<InputField
|
||||
name="SCHEDULE_MAX_JOBS"
|
||||
config={jobs.SCHEDULE_MAX_JOBS}
|
||||
type="number"
|
||||
isRequired
|
||||
config={jobs.SCHEDULE_MAX_JOBS ?? null}
|
||||
type={options?.SCHEDULE_MAX_JOBS ? 'number' : undefined}
|
||||
isRequired={Boolean(options?.SCHEDULE_MAX_JOBS)}
|
||||
/>
|
||||
<InputField
|
||||
name="DEFAULT_JOB_TIMEOUT"
|
||||
|
||||
@@ -122,4 +122,22 @@ describe('<JobsEdit />', () => {
|
||||
await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0);
|
||||
expect(wrapper.find('ContentError').length).toBe(1);
|
||||
});
|
||||
|
||||
test('Form input fields that are invisible (due to being set manually via a settings file) should not prevent submitting the form', async () => {
|
||||
const mockOptions = Object.assign({}, mockAllOptions);
|
||||
// If AWX_ISOLATION_BASE_PATH has been set in a settings file it will be absent in the PUT options
|
||||
delete mockOptions['actions']['PUT']['AWX_ISOLATION_BASE_PATH'];
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<SettingsProvider value={mockOptions.actions}>
|
||||
<JobsEdit />
|
||||
</SettingsProvider>
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0);
|
||||
await act(async () => {
|
||||
wrapper.find('Form').invoke('onSubmit')();
|
||||
});
|
||||
expect(SettingsAPI.updateAll).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -397,7 +397,10 @@ const InputField = ({ name, config, type = 'text', isRequired = false }) => {
|
||||
};
|
||||
InputField.propTypes = {
|
||||
name: string.isRequired,
|
||||
config: shape({}).isRequired,
|
||||
config: shape({}),
|
||||
};
|
||||
InputField.defaultProps = {
|
||||
config: null,
|
||||
};
|
||||
|
||||
const TextAreaField = ({ name, config, isRequired = false }) => {
|
||||
|
||||
@@ -9,29 +9,31 @@ function JobTemplateAdd() {
|
||||
const [formSubmitError, setFormSubmitError] = useState(null);
|
||||
const history = useHistory();
|
||||
|
||||
const projectParams = {
|
||||
project_id: null,
|
||||
project_name: null,
|
||||
const resourceParams = {
|
||||
resource_id: null,
|
||||
resource_name: null,
|
||||
resource_type: null,
|
||||
resource_kind: null,
|
||||
};
|
||||
history.location.search
|
||||
.replace(/^\?/, '')
|
||||
.split('&')
|
||||
.map((s) => s.split('='))
|
||||
.forEach(([key, val]) => {
|
||||
if (!(key in projectParams)) {
|
||||
if (!(key in resourceParams)) {
|
||||
return;
|
||||
}
|
||||
projectParams[key] = decodeURIComponent(val);
|
||||
resourceParams[key] = decodeURIComponent(val);
|
||||
});
|
||||
|
||||
let projectValues = null;
|
||||
let resourceValues = null;
|
||||
|
||||
if (
|
||||
Object.values(projectParams).filter((item) => item !== null).length === 2
|
||||
) {
|
||||
projectValues = {
|
||||
id: projectParams.project_id,
|
||||
name: projectParams.project_name,
|
||||
if (history.location.search.includes('resource_id' && 'resource_name')) {
|
||||
resourceValues = {
|
||||
id: resourceParams.resource_id,
|
||||
name: resourceParams.resource_name,
|
||||
type: resourceParams.resource_type,
|
||||
kind: resourceParams.resource_kind, // refers to credential kind
|
||||
};
|
||||
}
|
||||
|
||||
@@ -122,7 +124,7 @@ function JobTemplateAdd() {
|
||||
handleCancel={handleCancel}
|
||||
handleSubmit={handleSubmit}
|
||||
submitError={formSubmitError}
|
||||
projectValues={projectValues}
|
||||
resourceValues={resourceValues}
|
||||
isOverrideDisabledLookup
|
||||
/>
|
||||
</CardBody>
|
||||
|
||||
@@ -274,9 +274,14 @@ describe('<JobTemplateAdd />', () => {
|
||||
test('should parse and pre-fill project field from query params', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: [
|
||||
'/templates/job_template/add/add?project_id=6&project_name=Demo%20Project',
|
||||
'/templates/job_template/add?resource_id=6&resource_name=Demo%20Project&resource_type=project',
|
||||
],
|
||||
});
|
||||
ProjectsAPI.read.mockResolvedValueOnce({
|
||||
count: 1,
|
||||
results: [{ name: 'foo', id: 1, allow_override: true, organization: 1 }],
|
||||
});
|
||||
ProjectsAPI.readOptions.mockResolvedValueOnce({});
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<JobTemplateAdd />, {
|
||||
@@ -284,8 +289,9 @@ describe('<JobTemplateAdd />', () => {
|
||||
});
|
||||
});
|
||||
await waitForElement(wrapper, 'EmptyStateBody', (el) => el.length === 0);
|
||||
|
||||
expect(wrapper.find('input#project').prop('value')).toEqual('Demo Project');
|
||||
expect(ProjectsAPI.readPlaybooks).toBeCalledWith('6');
|
||||
expect(ProjectsAPI.readPlaybooks).toBeCalledWith(6);
|
||||
});
|
||||
|
||||
test('should not call ProjectsAPI.readPlaybooks if there is no project', async () => {
|
||||
|
||||
@@ -690,7 +690,7 @@ JobTemplateForm.defaultProps = {
|
||||
};
|
||||
|
||||
const FormikApp = withFormik({
|
||||
mapPropsToValues({ projectValues = {}, template = {} }) {
|
||||
mapPropsToValues({ resourceValues = null, template = {} }) {
|
||||
const {
|
||||
summary_fields = {
|
||||
labels: { results: [] },
|
||||
@@ -698,7 +698,7 @@ const FormikApp = withFormik({
|
||||
},
|
||||
} = template;
|
||||
|
||||
return {
|
||||
const initialValues = {
|
||||
allow_callbacks: template.allow_callbacks || false,
|
||||
allow_simultaneous: template.allow_simultaneous || false,
|
||||
ask_credential_on_launch: template.ask_credential_on_launch || false,
|
||||
@@ -739,7 +739,7 @@ const FormikApp = withFormik({
|
||||
playbook: template.playbook || '',
|
||||
prevent_instance_group_fallback:
|
||||
template.prevent_instance_group_fallback || false,
|
||||
project: summary_fields?.project || projectValues || null,
|
||||
project: summary_fields?.project || null,
|
||||
scm_branch: template.scm_branch || '',
|
||||
skip_tags: template.skip_tags || '',
|
||||
timeout: template.timeout || 0,
|
||||
@@ -756,6 +756,24 @@ const FormikApp = withFormik({
|
||||
execution_environment:
|
||||
template.summary_fields?.execution_environment || null,
|
||||
};
|
||||
if (resourceValues !== null) {
|
||||
if (resourceValues.type === 'credentials') {
|
||||
initialValues[resourceValues.type] = [
|
||||
{
|
||||
id: parseInt(resourceValues.id, 10),
|
||||
name: resourceValues.name,
|
||||
kind: resourceValues.kind,
|
||||
},
|
||||
];
|
||||
} else {
|
||||
initialValues[resourceValues.type] = {
|
||||
id: parseInt(resourceValues.id, 10),
|
||||
name: resourceValues.name,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return initialValues;
|
||||
},
|
||||
handleSubmit: async (values, { props, setErrors }) => {
|
||||
try {
|
||||
|
||||
@@ -46,90 +46,216 @@ action_groups:
|
||||
plugin_routing:
|
||||
inventory:
|
||||
tower:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* plugins have been deprecated, use awx.awx.controller instead.
|
||||
redirect: awx.awx.controller
|
||||
lookup:
|
||||
tower_api:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* plugins have been deprecated, use awx.awx.controller_api instead.
|
||||
redirect: awx.awx.controller_api
|
||||
tower_schedule_rrule:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* plugins have been deprecated, use awx.awx.schedule_rrule instead.
|
||||
redirect: awx.awx.schedule_rrule
|
||||
modules:
|
||||
tower_ad_hoc_command_cancel:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.ad_hoc_command_cancel instead.
|
||||
redirect: awx.awx.ad_hoc_command_cancel
|
||||
tower_ad_hoc_command_wait:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.ad_hoc_command_wait instead.
|
||||
redirect: awx.awx.ad_hoc_command_wait
|
||||
tower_ad_hoc_command:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.ad_hoc_command instead.
|
||||
redirect: awx.awx.ad_hoc_command
|
||||
tower_application:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.application instead.
|
||||
redirect: awx.awx.application
|
||||
tower_meta:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.controller_meta instead.
|
||||
redirect: awx.awx.controller_meta
|
||||
tower_credential_input_source:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.credential_input_source instead.
|
||||
redirect: awx.awx.credential_input_source
|
||||
tower_credential_type:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.credential_type instead.
|
||||
redirect: awx.awx.credential_type
|
||||
tower_credential:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.credential instead.
|
||||
redirect: awx.awx.credential
|
||||
tower_execution_environment:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.execution_environment instead.
|
||||
redirect: awx.awx.execution_environment
|
||||
tower_export:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.export instead.
|
||||
redirect: awx.awx.export
|
||||
tower_group:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.group instead.
|
||||
redirect: awx.awx.group
|
||||
tower_host:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.host instead.
|
||||
redirect: awx.awx.host
|
||||
tower_import:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.import instead.
|
||||
redirect: awx.awx.import
|
||||
tower_instance_group:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.instance_group instead.
|
||||
redirect: awx.awx.instance_group
|
||||
tower_inventory_source_update:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.inventory_source_update instead.
|
||||
redirect: awx.awx.inventory_source_update
|
||||
tower_inventory_source:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.inventory_source instead.
|
||||
redirect: awx.awx.inventory_source
|
||||
tower_inventory:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.inventory instead.
|
||||
redirect: awx.awx.inventory
|
||||
tower_job_cancel:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.job_cancel instead.
|
||||
redirect: awx.awx.job_cancel
|
||||
tower_job_launch:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.job_launch instead.
|
||||
redirect: awx.awx.job_launch
|
||||
tower_job_list:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.job_list instead.
|
||||
redirect: awx.awx.job_list
|
||||
tower_job_template:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.job_template instead.
|
||||
redirect: awx.awx.job_template
|
||||
tower_job_wait:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.job_wait instead.
|
||||
redirect: awx.awx.job_wait
|
||||
tower_label:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.label instead.
|
||||
redirect: awx.awx.label
|
||||
tower_license:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.license instead.
|
||||
redirect: awx.awx.license
|
||||
tower_notification_template:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.notification_template instead.
|
||||
redirect: awx.awx.notification_template
|
||||
tower_notification:
|
||||
redirect: awx.awx.notification_template
|
||||
tower_organization:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.organization instead.
|
||||
redirect: awx.awx.organization
|
||||
tower_project_update:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.project_update instead.
|
||||
redirect: awx.awx.project_update
|
||||
tower_project:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.project instead.
|
||||
redirect: awx.awx.project
|
||||
tower_role:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.role instead.
|
||||
redirect: awx.awx.role
|
||||
tower_schedule:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.schedule instead.
|
||||
redirect: awx.awx.schedule
|
||||
tower_settings:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.settings instead.
|
||||
redirect: awx.awx.settings
|
||||
tower_team:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.team instead.
|
||||
redirect: awx.awx.team
|
||||
tower_token:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.token instead.
|
||||
redirect: awx.awx.token
|
||||
tower_user:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.user instead.
|
||||
redirect: awx.awx.user
|
||||
tower_workflow_approval:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_approval instead.
|
||||
redirect: awx.awx.workflow_approval
|
||||
tower_workflow_job_template_node:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_job_template_node instead.
|
||||
redirect: awx.awx.workflow_job_template_node
|
||||
tower_workflow_job_template:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_job_template instead.
|
||||
redirect: awx.awx.workflow_job_template
|
||||
tower_workflow_launch:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_launch instead.
|
||||
redirect: awx.awx.workflow_launch
|
||||
tower_workflow_node_wait:
|
||||
deprecation:
|
||||
removal_date: '2022-01-23'
|
||||
warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_node_wait instead.
|
||||
redirect: awx.awx.workflow_node_wait
|
||||
|
||||
@@ -128,7 +128,7 @@ def main():
|
||||
description = module.params.get('description')
|
||||
state = module.params.pop('state')
|
||||
preserve_existing_hosts = module.params.get('preserve_existing_hosts')
|
||||
preserve_existing_children = module.params.get('preserve_existing_groups')
|
||||
preserve_existing_children = module.params.get('preserve_existing_children')
|
||||
variables = module.params.get('variables')
|
||||
|
||||
# Attempt to look up the related items the user specified (these will fail the module if not found)
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
ad_hoc_command.py
|
||||
@@ -1 +0,0 @@
|
||||
ad_hoc_command_cancel.py
|
||||
@@ -1 +0,0 @@
|
||||
ad_hoc_command_wait.py
|
||||
@@ -1 +0,0 @@
|
||||
application.py
|
||||
@@ -1 +0,0 @@
|
||||
controller_meta.py
|
||||
@@ -1 +0,0 @@
|
||||
credential.py
|
||||
@@ -1 +0,0 @@
|
||||
credential_input_source.py
|
||||
@@ -1 +0,0 @@
|
||||
credential_type.py
|
||||
@@ -1 +0,0 @@
|
||||
execution_environment.py
|
||||
@@ -1 +0,0 @@
|
||||
export.py
|
||||
@@ -1 +0,0 @@
|
||||
group.py
|
||||
@@ -1 +0,0 @@
|
||||
host.py
|
||||
@@ -1 +0,0 @@
|
||||
import.py
|
||||
@@ -1 +0,0 @@
|
||||
instance_group.py
|
||||
@@ -1 +0,0 @@
|
||||
inventory.py
|
||||
@@ -1 +0,0 @@
|
||||
inventory_source.py
|
||||
@@ -1 +0,0 @@
|
||||
inventory_source_update.py
|
||||
@@ -1 +0,0 @@
|
||||
job_cancel.py
|
||||
@@ -1 +0,0 @@
|
||||
job_launch.py
|
||||
@@ -1 +0,0 @@
|
||||
job_list.py
|
||||
@@ -1 +0,0 @@
|
||||
job_template.py
|
||||
@@ -1 +0,0 @@
|
||||
job_wait.py
|
||||
@@ -1 +0,0 @@
|
||||
label.py
|
||||
@@ -1 +0,0 @@
|
||||
license.py
|
||||
@@ -1 +0,0 @@
|
||||
notification_template.py
|
||||
@@ -1 +0,0 @@
|
||||
organization.py
|
||||
@@ -1 +0,0 @@
|
||||
project.py
|
||||
@@ -1 +0,0 @@
|
||||
project_update.py
|
||||
@@ -1 +0,0 @@
|
||||
role.py
|
||||
@@ -1 +0,0 @@
|
||||
schedule.py
|
||||
@@ -1 +0,0 @@
|
||||
settings.py
|
||||
@@ -1 +0,0 @@
|
||||
team.py
|
||||
@@ -1 +0,0 @@
|
||||
token.py
|
||||
@@ -1 +0,0 @@
|
||||
user.py
|
||||
@@ -1 +0,0 @@
|
||||
workflow_approval.py
|
||||
@@ -1 +0,0 @@
|
||||
workflow_job_template.py
|
||||
@@ -1 +0,0 @@
|
||||
workflow_job_template_node.py
|
||||
@@ -1 +0,0 @@
|
||||
workflow_launch.py
|
||||
@@ -1 +0,0 @@
|
||||
workflow_node_wait.py
|
||||
@@ -19,7 +19,6 @@ author: "John Westcott IV (@john-westcott-iv)"
|
||||
short_description: create, update, or destroy Automation Platform Controller workflow job templates.
|
||||
description:
|
||||
- Create, update, or destroy Automation Platform Controller workflow job templates.
|
||||
- Replaces the deprecated tower_workflow_template module.
|
||||
- Use workflow_job_template_node after this, or use the workflow_nodes parameter to build the workflow's graph
|
||||
options:
|
||||
name:
|
||||
@@ -614,6 +613,10 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
|
||||
if workflow_node['unified_job_template']['type'] != 'workflow_approval':
|
||||
module.fail_json(msg="Unable to Find unified_job_template: {0}".format(search_fields))
|
||||
|
||||
inventory = workflow_node.get('inventory')
|
||||
if inventory:
|
||||
workflow_node_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)
|
||||
|
||||
# Lookup Values for other fields
|
||||
|
||||
for field_name in (
|
||||
|
||||
@@ -20,7 +20,6 @@ short_description: create, update, or destroy Automation Platform Controller wor
|
||||
description:
|
||||
- Create, update, or destroy Automation Platform Controller workflow job template nodes.
|
||||
- Use this to build a graph for a workflow, which dictates what the workflow runs.
|
||||
- Replaces the deprecated tower_workflow_template module schema command.
|
||||
- You can create nodes first, and link them afterwards, and not worry about ordering.
|
||||
For failsafe referencing of a node, specify identifier, WFJT, and organization.
|
||||
With those specified, you can choose to modify or not modify any other parameter.
|
||||
|
||||
3
awx_collection/tests/config.yml
Normal file
3
awx_collection/tests/config.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
---
|
||||
modules:
|
||||
python_requires: '>3'
|
||||
@@ -1,88 +0,0 @@
|
||||
plugins/module_utils/awxkit.py import-3.9
|
||||
plugins/module_utils/controller_api.py import-3.9
|
||||
plugins/modules/ad_hoc_command.py import-3.9
|
||||
plugins/modules/ad_hoc_command_cancel.py import-3.9
|
||||
plugins/modules/ad_hoc_command_wait.py import-3.9
|
||||
plugins/modules/application.py import-3.9
|
||||
plugins/modules/controller_meta.py import-3.9
|
||||
plugins/modules/credential.py import-3.92
|
||||
plugins/modules/credential_input_source.py import-3.9
|
||||
plugins/modules/credential_type.py import-3.9
|
||||
plugins/modules/execution_environment.py import-3.9
|
||||
plugins/modules/export.py import-3.9
|
||||
plugins/modules/group.py import-3.9
|
||||
plugins/modules/host.py import-3.9
|
||||
plugins/modules/import.py import-3.9
|
||||
plugins/modules/instance.py import-3.9
|
||||
plugins/modules/instance_group.py import-3.9
|
||||
plugins/modules/inventory.py import-3.9
|
||||
plugins/modules/inventory_source.py import-3.9
|
||||
plugins/modules/inventory_source_update.py import-3.9
|
||||
plugins/modules/job_cancel.py import-3.9
|
||||
plugins/modules/job_launch.py import-3.9
|
||||
plugins/modules/job_list.py import-3.9
|
||||
plugins/modules/job_template.py import-3.93
|
||||
plugins/modules/job_wait.py import-3.9
|
||||
plugins/modules/label.py import-3.9
|
||||
plugins/modules/license.py import-3.9
|
||||
plugins/modules/notification_template.py import-3.9
|
||||
plugins/modules/organization.py import-3.9
|
||||
plugins/modules/project.py import-3.92
|
||||
plugins/modules/project_update.py import-3.9
|
||||
plugins/modules/role.py import-3.9
|
||||
plugins/modules/schedule.py import-3.9
|
||||
plugins/modules/settings.py import-3.9
|
||||
plugins/modules/subscriptions.py import-3.9
|
||||
plugins/modules/team.py import-3.9
|
||||
plugins/modules/token.py import-3.9
|
||||
plugins/modules/user.py import-3.9
|
||||
plugins/modules/workflow_approval.py import-3.9
|
||||
plugins/modules/workflow_job_template.py import-3.9
|
||||
plugins/modules/workflow_job_template_node.py import-3.9
|
||||
plugins/modules/workflow_launch.py import-3.9
|
||||
plugins/modules/workflow_node_wait.py import-3.9
|
||||
plugins/inventory/controller.py import-3.10
|
||||
plugins/lookup/controller_api.py import-3.10
|
||||
plugins/module_utils/awxkit.py import-3.10
|
||||
plugins/module_utils/controller_api.py import-3.10
|
||||
plugins/modules/ad_hoc_command.py import-3.10
|
||||
plugins/modules/ad_hoc_command_cancel.py import-3.10
|
||||
plugins/modules/ad_hoc_command_wait.py import-3.10
|
||||
plugins/modules/application.py import-3.10
|
||||
plugins/modules/controller_meta.py import-3.10
|
||||
plugins/modules/credential.py import-3.10
|
||||
plugins/modules/credential_input_source.py import-3.10
|
||||
plugins/modules/credential_type.py import-3.10
|
||||
plugins/modules/execution_environment.py import-3.10
|
||||
plugins/modules/export.py import-3.10
|
||||
plugins/modules/group.py import-3.10
|
||||
plugins/modules/host.py import-3.10
|
||||
plugins/modules/import.py import-3.10
|
||||
plugins/modules/instance.py import-3.10
|
||||
plugins/modules/instance_group.py import-3.10
|
||||
plugins/modules/inventory.py import-3.10
|
||||
plugins/modules/inventory_source.py import-3.10
|
||||
plugins/modules/inventory_source_update.py import-3.10
|
||||
plugins/modules/job_cancel.py import-3.10
|
||||
plugins/modules/job_launch.py import-3.10
|
||||
plugins/modules/job_list.py import-3.10
|
||||
plugins/modules/job_template.py import-3.10
|
||||
plugins/modules/job_wait.py import-3.10
|
||||
plugins/modules/label.py import-3.10
|
||||
plugins/modules/license.py import-3.10
|
||||
plugins/modules/notification_template.py import-3.10
|
||||
plugins/modules/organization.py import-3.10
|
||||
plugins/modules/project.py import-3.10
|
||||
plugins/modules/project_update.py import-3.10
|
||||
plugins/modules/role.py import-3.10
|
||||
plugins/modules/schedule.py import-3.10
|
||||
plugins/modules/settings.py import-3.10
|
||||
plugins/modules/subscriptions.py import-3.10
|
||||
plugins/modules/team.py import-3.10
|
||||
plugins/modules/token.py import-3.10
|
||||
plugins/modules/user.py import-3.10
|
||||
plugins/modules/workflow_approval.py import-3.10
|
||||
plugins/modules/workflow_job_template.py import-3.10
|
||||
plugins/modules/workflow_job_template_node.py import-3.10
|
||||
plugins/modules/workflow_launch.py import-3.10
|
||||
plugins/modules/workflow_node_wait.py import-3.10
|
||||
1
awx_collection/tests/sanity/ignore-2.14.txt
Normal file
1
awx_collection/tests/sanity/ignore-2.14.txt
Normal file
@@ -0,0 +1 @@
|
||||
plugins/modules/export.py validate-modules:nonexistent-parameter-documented # needs awxkit to construct argspec
|
||||
1
awx_collection/tests/sanity/ignore-2.15.txt
Normal file
1
awx_collection/tests/sanity/ignore-2.15.txt
Normal file
@@ -0,0 +1 @@
|
||||
plugins/modules/export.py validate-modules:nonexistent-parameter-documented # needs awxkit to construct argspec
|
||||
@@ -74,6 +74,7 @@ Notable releases of the `{{ collection_namespace }}.{{ collection_package }}` co
|
||||
- 7.0.0 is intended to be identical to the content prior to the migration, aside from changes necessary to function as a collection.
|
||||
- 11.0.0 has no non-deprecated modules that depend on the deprecated `tower-cli` [PyPI](https://pypi.org/project/ansible-tower-cli/).
|
||||
- 19.2.1 large renaming purged "tower" names (like options and module names), adding redirects for old names
|
||||
- 21.11.0 "tower" modules deprecated and symlinks removed.
|
||||
- 0.0.1-devel is the version you should see if installing from source, which is intended for development and expected to be unstable.
|
||||
{% else %}
|
||||
- 3.7.0 initial release
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
awxkit
|
||||
======
|
||||
|
||||
Python library that backs the provided `awx` command line client.
|
||||
A Python library that backs the provided `awx` command line client.
|
||||
|
||||
For more information on installing the CLI and building the docs on how to use it, look [here](./awxkit/cli/docs).
|
||||
It can be installed by running `pip install awxkit`.
|
||||
|
||||
The PyPI respository can be found [here](https://pypi.org/project/awxkit/).
|
||||
|
||||
For more information on installing the CLI and building the docs on how to use it, look [here](./awxkit/cli/docs).
|
||||
@@ -157,6 +157,15 @@ For now, we pin to the old version, 3.4.1
|
||||
* https://github.com/django/channels_redis/issues/332
|
||||
* https://github.com/ansible/awx/issues/13313
|
||||
|
||||
### hiredis
|
||||
|
||||
The hiredis 2.1.0 release doesn't provide source distribution on PyPI which prevents users to build that python package from the
|
||||
sources.
|
||||
Downgrading to 2.0.0 (which provides source distribution) until the channels-redis issue is fixed or a newer hiredis version is
|
||||
available on PyPi with source distribution.
|
||||
|
||||
* https://github.com/redis/hiredis-py/issues/138
|
||||
|
||||
## Library Notes
|
||||
|
||||
### pexpect
|
||||
|
||||
@@ -26,6 +26,7 @@ djangorestframework==3.13.1
|
||||
djangorestframework-yaml
|
||||
filelock
|
||||
GitPython
|
||||
hiredis==2.0.0 # see UPGRADE BLOCKERs
|
||||
irc
|
||||
jinja2
|
||||
JSON-log-formatter
|
||||
@@ -42,7 +43,7 @@ python-dsv-sdk
|
||||
python-tss-sdk==1.0.0
|
||||
python-ldap
|
||||
pyyaml
|
||||
receptorctl==1.2.3
|
||||
receptorctl==1.3.0
|
||||
schedule==0.6.0
|
||||
social-auth-core[openidconnect]==4.3.0 # see UPGRADE BLOCKERs
|
||||
social-auth-app-django==5.0.0 # see UPGRADE BLOCKERs
|
||||
|
||||
@@ -158,8 +158,10 @@ gitpython==3.1.29
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
google-auth==2.14.1
|
||||
# via kubernetes
|
||||
hiredis==2.1.0
|
||||
# via aioredis
|
||||
hiredis==2.0.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# aioredis
|
||||
hyperlink==21.0.0
|
||||
# via
|
||||
# autobahn
|
||||
@@ -336,7 +338,7 @@ pyyaml==6.0
|
||||
# djangorestframework-yaml
|
||||
# kubernetes
|
||||
# receptorctl
|
||||
receptorctl==1.2.3
|
||||
receptorctl==1.3.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
redis==4.3.5
|
||||
# via
|
||||
|
||||
@@ -116,7 +116,7 @@ RUN dnf -y update && dnf install -y 'dnf-command(config-manager)' && \
|
||||
python3-psycopg2 \
|
||||
python3-setuptools \
|
||||
rsync \
|
||||
"rsyslog >= 8.1911.0" \
|
||||
rsyslog-8.2102.0-106.el9 \
|
||||
subversion \
|
||||
sudo \
|
||||
vim-minimal \
|
||||
|
||||
Reference in New Issue
Block a user