Compare commits
78 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b44bb98c7e | ||
|
|
8cafdf0400 | ||
|
|
3f566c8737 | ||
|
|
c8021a25bf | ||
|
|
934646a0f6 | ||
|
|
9bb97dd658 | ||
|
|
7150f5edc6 | ||
|
|
93da15c0ee | ||
|
|
ab593bda45 | ||
|
|
065bd3ae2a | ||
|
|
8ff7260bc6 | ||
|
|
a635445082 | ||
|
|
949e7efab1 | ||
|
|
615f09226f | ||
|
|
d903c524f5 | ||
|
|
393d9c39c6 | ||
|
|
dfab342bb4 | ||
|
|
12843eccf7 | ||
|
|
dd9160135d | ||
|
|
ad96a92fa7 | ||
|
|
ca8085fe7e | ||
|
|
b076cb00a9 | ||
|
|
ee9eac15dc | ||
|
|
3f2f7b75a6 | ||
|
|
b71645f3b1 | ||
|
|
eb300252b8 | ||
|
|
2e2cd7f2de | ||
|
|
727278aaa3 | ||
|
|
81825ab755 | ||
|
|
7f2a1b6b03 | ||
|
|
1b56d94d30 | ||
|
|
e1e32c971c | ||
|
|
a4a2fabc01 | ||
|
|
b7b7bfa520 | ||
|
|
887604317e | ||
|
|
d35d8b6ed7 | ||
|
|
ec28eff7f7 | ||
|
|
a5d17539c6 | ||
|
|
a49d894cf1 | ||
|
|
b3466d4449 | ||
|
|
237adc6150 | ||
|
|
09b028ee3c | ||
|
|
fb83bfbc31 | ||
|
|
88e406e121 | ||
|
|
59d0bcc63f | ||
|
|
3fb3125bc3 | ||
|
|
d70c6b9474 | ||
|
|
5549516a37 | ||
|
|
14ac91a8a2 | ||
|
|
d5753818a0 | ||
|
|
33010a2e02 | ||
|
|
14454cc670 | ||
|
|
7ab2bca16e | ||
|
|
f0f655f2c3 | ||
|
|
4286d411a7 | ||
|
|
06ad32ed8e | ||
|
|
1ebff23232 | ||
|
|
700de14c76 | ||
|
|
8605e339df | ||
|
|
e50954ce40 | ||
|
|
7caca60308 | ||
|
|
f4e13af056 | ||
|
|
decdb56288 | ||
|
|
bcd4c2e8ef | ||
|
|
d663066ac5 | ||
|
|
1ceebb275c | ||
|
|
f78ba282a6 | ||
|
|
81d88df757 | ||
|
|
0bdb01a9e9 | ||
|
|
cd91fbf59f | ||
|
|
f240e640e5 | ||
|
|
46f489185e | ||
|
|
dbb80fb7e3 | ||
|
|
cb3d357ce1 | ||
|
|
dfa4db9266 | ||
|
|
6906a88dc9 | ||
|
|
1f7be9258c | ||
|
|
dcce024424 |
10
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,6 +11,12 @@ runs:
|
||||
shell: bash
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Log in to registry
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -18,11 +24,11 @@ runs:
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
run: docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
||||
run: docker pull ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
|
||||
- name: Build image for current source checkout
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
make docker-compose-build
|
||||
|
||||
2
.github/actions/run_awx_devel/action.yml
vendored
@@ -35,7 +35,7 @@ runs:
|
||||
- name: Start AWX
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
||||
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
COMPOSE_UP_OPTS="-d" \
|
||||
make docker-compose
|
||||
|
||||
3
.github/pr_labeler.yml
vendored
@@ -15,5 +15,4 @@
|
||||
|
||||
"dependencies":
|
||||
- any: ["awx/ui/package.json"]
|
||||
- any: ["requirements/*.txt"]
|
||||
- any: ["requirements/requirements.in"]
|
||||
- any: ["requirements/*"]
|
||||
|
||||
6
.github/workflows/ci.yml
vendored
@@ -107,7 +107,7 @@ jobs:
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||
env:
|
||||
AWX_TEST_IMAGE: awx
|
||||
AWX_TEST_VERSION: ci
|
||||
@@ -127,10 +127,6 @@ jobs:
|
||||
|
||||
- name: Run sanity tests
|
||||
run: make test_collection_sanity
|
||||
env:
|
||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||
# with the fix has not been made yet.
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
|
||||
collection-integration:
|
||||
name: awx_collection integration
|
||||
|
||||
64
.github/workflows/devel_images.yml
vendored
@@ -3,28 +3,50 @@ name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
- release_*
|
||||
- feature_*
|
||||
jobs:
|
||||
push:
|
||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
||||
push-development-images:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 120
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
build-targets:
|
||||
- image-name: awx_devel
|
||||
make-target: docker-compose-buildx
|
||||
- image-name: awx_kube_devel
|
||||
make-target: awx-kube-dev-buildx
|
||||
- image-name: awx
|
||||
make-target: awx-kube-buildx
|
||||
steps:
|
||||
|
||||
- name: Skipping build of awx image for non-awx repository
|
||||
run: |
|
||||
echo "Skipping build of awx image for non-awx repository"
|
||||
exit 0
|
||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set lower case owner name
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set GITHUB_ENV variables
|
||||
run: |
|
||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
||||
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
@@ -37,23 +59,19 @@ jobs:
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
|
||||
- name: Setup node and npm
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Build images
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Push development images
|
||||
- name: Build and push AWX devel images
|
||||
run: |
|
||||
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
|
||||
|
||||
- name: Push AWX k8s image, only for upstream and feature branches
|
||||
run: docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
|
||||
if: endsWith(github.repository, '/awx')
|
||||
make ${{ matrix.build-targets.make-target }}
|
||||
|
||||
20
.github/workflows/promote.yml
vendored
@@ -83,11 +83,15 @@ jobs:
|
||||
|
||||
- name: Re-tag and promote awx image
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository }}:latest
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository }}:latest
|
||||
|
||||
- name: Re-tag and promote awx-ee image
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
|
||||
6
.github/workflows/stage.yml
vendored
@@ -102,9 +102,9 @@ jobs:
|
||||
|
||||
- name: tag awx-ee:latest with version input
|
||||
run: |
|
||||
docker pull quay.io/ansible/awx-ee:latest
|
||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker buildx imagetools create \
|
||||
quay.io/ansible/awx-ee:latest \
|
||||
--tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Stage awx-operator image
|
||||
working-directory: awx-operator
|
||||
|
||||
5
.gitignore
vendored
@@ -46,6 +46,11 @@ tools/docker-compose/overrides/
|
||||
tools/docker-compose-minikube/_sources
|
||||
tools/docker-compose/keycloak.awx.realm.json
|
||||
|
||||
!tools/docker-compose/editable_dependencies
|
||||
tools/docker-compose/editable_dependencies/*
|
||||
!tools/docker-compose/editable_dependencies/README.md
|
||||
!tools/docker-compose/editable_dependencies/install.sh
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
**/provision_docker
|
||||
|
||||
113
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "run_ws_heartbeat",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_ws_heartbeat"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-ws-heartbeat",
|
||||
"postDebugTask": "start awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_callback_receiver",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_callback_receiver"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-receiver",
|
||||
"postDebugTask": "start awx-receiver"
|
||||
},
|
||||
{
|
||||
"name": "run_dispatcher",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_dispatcher"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-dispatcher",
|
||||
"postDebugTask": "start awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"name": "run_rsyslog_configurer",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_rsyslog_configurer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-rsyslog-configurer",
|
||||
"postDebugTask": "start awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_wsrelay",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_wsrelay"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-wsrelay",
|
||||
"postDebugTask": "start awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"name": "daphne",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
||||
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-daphne",
|
||||
"postDebugTask": "start awx-daphne"
|
||||
},
|
||||
{
|
||||
"name": "runserver(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "runserver_plus(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver_plus", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "shell_plus",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["shell_plus"],
|
||||
"django": true,
|
||||
},
|
||||
]
|
||||
}
|
||||
100
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "start awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "start awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "start awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "start awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "start awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi and install Werkzeug",
|
||||
"type": "shell",
|
||||
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "start awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "start awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
||||
}
|
||||
]
|
||||
}
|
||||
90
Makefile
@@ -1,6 +1,6 @@
|
||||
-include awx/ui_next/Makefile
|
||||
|
||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
||||
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||
SHELL := bash
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
@@ -47,6 +47,8 @@ VAULT ?= false
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -63,7 +65,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -75,6 +77,9 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
@@ -213,8 +218,6 @@ collectstatic:
|
||||
fi; \
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -222,7 +225,7 @@ uwsgi: collectstatic
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -302,7 +305,7 @@ swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
|
||||
check: black
|
||||
|
||||
@@ -532,16 +535,23 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP);
|
||||
-e enable_ldap=$(LDAP); \
|
||||
$(MAKE) docker-compose-up
|
||||
|
||||
docker-compose-up:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-down:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
@@ -586,35 +596,27 @@ docker-compose-build: Dockerfile.dev
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
# ## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
# docker-compose-buildx: Dockerfile.dev
|
||||
# DOCKER_BUILDKIT=1 docker build \
|
||||
# -f Dockerfile.dev \
|
||||
# -t $(DEVEL_IMAGE_NAME) \
|
||||
# --build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
# --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
# PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||
# architectures. (i.e. make docker-buildx IMG=myregistry/mypoperator:0.0.1). To use this option you need to:
|
||||
# - able to use docker buildx . More info: https://docs.docker.com/build/buildx/
|
||||
# - have enable BuildKit, More info: https://docs.docker.com/develop/develop-images/build_enhancements/
|
||||
# - be able to push the image for your registry (i.e. if you do not inform a valid value via IMG=<myregistry/image:<tag>> than the export will fail)
|
||||
# To properly provided solutions that supports more than one platform you should use this option.
|
||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||
.PHONY: docker-compose-buildx
|
||||
docker-compose-buildx: Dockerfile.dev ## Build and push docker image for the manager for cross-platform support
|
||||
- docker buildx create --name project-v3-builder
|
||||
docker buildx use project-v3-builder
|
||||
- docker buildx build --push $(BUILD_ARGS) --platform=$(PLATFORMS) --tag $(DEVEL_IMAGE_NAME) -f Dockerfile.dev .
|
||||
- docker buildx rm project-v3-builder
|
||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
docker-compose-buildx: Dockerfile.dev
|
||||
- docker buildx create --name docker-compose-buildx
|
||||
docker buildx use docker-compose-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEVEL_IMAGE_NAME) \
|
||||
-f Dockerfile.dev .
|
||||
- docker buildx rm docker-compose-buildx
|
||||
|
||||
docker-clean:
|
||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
@@ -636,9 +638,6 @@ clean-elk:
|
||||
docker rm tools_elasticsearch_1
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
@@ -671,6 +670,21 @@ awx-kube-build: Dockerfile
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||
|
||||
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||
awx-kube-buildx: Dockerfile
|
||||
- docker buildx create --name awx-kube-buildx
|
||||
docker buildx use awx-kube-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) \
|
||||
-f Dockerfile .
|
||||
- docker buildx rm awx-kube-buildx
|
||||
|
||||
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
@@ -687,6 +701,18 @@ awx-kube-dev-build: Dockerfile.kube-dev
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||
|
||||
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
- docker buildx create --name awx-kube-dev-buildx
|
||||
docker buildx use awx-kube-dev-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-f Dockerfile.kube-dev .
|
||||
- docker buildx rm awx-kube-dev-buildx
|
||||
|
||||
kind-dev-load: awx-kube-dev-build
|
||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
|
||||
@@ -154,10 +154,12 @@ def manage():
|
||||
from django.conf import settings
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
||||
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
||||
# The return of connection.pg_version is something like 12013
|
||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||
if (connection.pg_version // 10000) < 12:
|
||||
sys.stderr.write("Postgres version 12 is required\n")
|
||||
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
|
||||
@@ -93,6 +93,7 @@ register(
|
||||
default='',
|
||||
label=_('Login redirect override URL'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
||||
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -36,11 +36,13 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
field_info['hidden'] = getattr(field, 'hidden', False)
|
||||
|
||||
text_attrs = [
|
||||
'read_only',
|
||||
'label',
|
||||
'help_text',
|
||||
'warning_text',
|
||||
'min_length',
|
||||
'max_length',
|
||||
'min_value',
|
||||
|
||||
@@ -5594,7 +5594,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP]:
|
||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP] and not obj.managed:
|
||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||
if obj.node_type == 'execution':
|
||||
|
||||
@@ -24,6 +24,10 @@ def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **e
|
||||
else:
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
||||
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
||||
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||
|
||||
return url
|
||||
|
||||
|
||||
|
||||
@@ -272,16 +272,24 @@ class DashboardJobsGraphView(APIView):
|
||||
|
||||
success_query = user_unified_jobs.filter(status='successful')
|
||||
failed_query = user_unified_jobs.filter(status='failed')
|
||||
canceled_query = user_unified_jobs.filter(status='canceled')
|
||||
error_query = user_unified_jobs.filter(status='error')
|
||||
|
||||
if job_type == 'inv_sync':
|
||||
success_query = success_query.filter(instance_of=models.InventoryUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
|
||||
canceled_query = canceled_query.filter(instance_of=models.InventoryUpdate)
|
||||
error_query = error_query.filter(instance_of=models.InventoryUpdate)
|
||||
elif job_type == 'playbook_run':
|
||||
success_query = success_query.filter(instance_of=models.Job)
|
||||
failed_query = failed_query.filter(instance_of=models.Job)
|
||||
canceled_query = canceled_query.filter(instance_of=models.Job)
|
||||
error_query = error_query.filter(instance_of=models.Job)
|
||||
elif job_type == 'scm_update':
|
||||
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
||||
canceled_query = canceled_query.filter(instance_of=models.ProjectUpdate)
|
||||
error_query = error_query.filter(instance_of=models.ProjectUpdate)
|
||||
|
||||
end = now()
|
||||
interval = 'day'
|
||||
@@ -297,10 +305,12 @@ class DashboardJobsGraphView(APIView):
|
||||
else:
|
||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": [], "canceled": [], "error": []}}
|
||||
|
||||
succ_list = dashboard_data['jobs']['successful']
|
||||
fail_list = dashboard_data['jobs']['failed']
|
||||
canceled_list = dashboard_data['jobs']['canceled']
|
||||
error_list = dashboard_data['jobs']['error']
|
||||
|
||||
qs_s = (
|
||||
success_query.filter(finished__range=(start, end))
|
||||
@@ -318,6 +328,22 @@ class DashboardJobsGraphView(APIView):
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_f = {item['d']: item['agg'] for item in qs_f}
|
||||
qs_c = (
|
||||
canceled_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_c = {item['d']: item['agg'] for item in qs_c}
|
||||
qs_e = (
|
||||
error_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_e = {item['d']: item['agg'] for item in qs_e}
|
||||
|
||||
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
for d in itertools.count():
|
||||
@@ -326,6 +352,8 @@ class DashboardJobsGraphView(APIView):
|
||||
break
|
||||
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
||||
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
||||
canceled_list.append([time.mktime(date.timetuple()), data_c.get(date, 0)])
|
||||
error_list.append([time.mktime(date.timetuple()), data_e.get(date, 0)])
|
||||
|
||||
return Response(dashboard_data)
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ register(
|
||||
# Optional; category_slug will be slugified version of category if not
|
||||
# explicitly provided.
|
||||
category_slug='cows',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -127,6 +127,8 @@ class SettingsRegistry(object):
|
||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||
unit = field_kwargs.pop('unit', None)
|
||||
hidden = field_kwargs.pop('hidden', False)
|
||||
warning_text = field_kwargs.pop('warning_text', None)
|
||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||
field_kwargs['child'].source = None
|
||||
field_instance = field_class(**field_kwargs)
|
||||
@@ -134,12 +136,14 @@ class SettingsRegistry(object):
|
||||
field_instance.category = category
|
||||
field_instance.depends_on = depends_on
|
||||
field_instance.unit = unit
|
||||
field_instance.hidden = hidden
|
||||
if placeholder is not empty:
|
||||
field_instance.placeholder = placeholder
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
if field_instance.defined_in_file:
|
||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||
field_instance.encrypted = encrypted
|
||||
field_instance.warning_text = warning_text
|
||||
original_field_instance = field_instance
|
||||
if field_class != original_field_class:
|
||||
original_field_instance = original_field_class(**field_kwargs)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import contextlib
|
||||
import logging
|
||||
import psycopg
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
@@ -13,7 +14,7 @@ from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import Error as DBError, ProgrammingError
|
||||
from django.db.utils import DatabaseError, ProgrammingError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -80,18 +81,26 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except DBError as exc:
|
||||
except ProgrammingError as e:
|
||||
# Exception raised for programming errors
|
||||
# Examples may be table not found or already exists,
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
# syntax error in the SQL statement, wrong number of parameters specified, etc.
|
||||
if trans_safe:
|
||||
level = logger.warning
|
||||
if isinstance(exc, ProgrammingError):
|
||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
level = logger.debug
|
||||
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
||||
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}')
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
except DatabaseError as e:
|
||||
if trans_safe:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
finally:
|
||||
|
||||
@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
||||
resolved_action,
|
||||
resolved_role,
|
||||
-- '-' operator listed here:
|
||||
-- https://www.postgresql.org/docs/12/functions-json.html
|
||||
-- https://www.postgresql.org/docs/15/functions-json.html
|
||||
-- note that operator is only supported by jsonb objects
|
||||
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||
|
||||
@@ -92,6 +92,7 @@ register(
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -774,6 +775,7 @@ register(
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||
@@ -815,6 +817,7 @@ register(
|
||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -825,6 +828,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -835,6 +839,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -845,6 +850,7 @@ register(
|
||||
help_text=_('Enable preview of new user interface.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
from azure.identity import ClientSecretCredential
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
|
||||
from azure.common.credentials import ServicePrincipalCredentials
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
|
||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||
@@ -54,22 +55,9 @@ azure_keyvault_inputs = {
|
||||
|
||||
|
||||
def azure_keyvault_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
[cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
|
||||
|
||||
def auth_callback(server, resource, scope):
|
||||
credentials = ServicePrincipalCredentials(
|
||||
url=url,
|
||||
client_id=kwargs['client'],
|
||||
secret=kwargs['secret'],
|
||||
tenant=kwargs['tenant'],
|
||||
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
||||
)
|
||||
token = credentials.token
|
||||
return token['token_type'], token['access_token']
|
||||
|
||||
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
|
||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
||||
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret'])
|
||||
kv = SecretClient(credential=csc, vault_url=kwargs['url'])
|
||||
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
|
||||
|
||||
|
||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||
|
||||
@@ -259,6 +259,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
current_downtime = time.time() - self.pg_down_time
|
||||
if current_downtime > self.pg_max_wait:
|
||||
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
# Wait for a second before next attempt, but still listen for any shutdown signals
|
||||
for i in range(10):
|
||||
@@ -270,6 +276,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
except Exception:
|
||||
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
||||
logger.exception('Encountered unhandled error in dispatcher main loop')
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
|
||||
|
||||
|
||||
179
awx/main/management/commands/dump_auth_config.py
Normal file
@@ -0,0 +1,179 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
from typing import Any
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from awx.conf import settings_registry
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
|
||||
|
||||
DAB_SAML_AUTHENTICATOR_KEYS = {
|
||||
"SP_ENTITY_ID": True,
|
||||
"SP_PUBLIC_CERT": True,
|
||||
"SP_PRIVATE_KEY": True,
|
||||
"ORG_INFO": True,
|
||||
"TECHNICAL_CONTACT": True,
|
||||
"SUPPORT_CONTACT": True,
|
||||
"SP_EXTRA": False,
|
||||
"SECURITY_CONFIG": False,
|
||||
"EXTRA_DATA": False,
|
||||
"ENABLED_IDPS": True,
|
||||
"CALLBACK_URL": False,
|
||||
}
|
||||
|
||||
DAB_LDAP_AUTHENTICATOR_KEYS = {
|
||||
"SERVER_URI": True,
|
||||
"BIND_DN": False,
|
||||
"BIND_PASSWORD": False,
|
||||
"CONNECTION_OPTIONS": False,
|
||||
"GROUP_TYPE": True,
|
||||
"GROUP_TYPE_PARAMS": True,
|
||||
"GROUP_SEARCH": False,
|
||||
"START_TLS": False,
|
||||
"USER_DN_TEMPLATE": True,
|
||||
"USER_ATTR_MAP": True,
|
||||
"USER_SEARCH": False,
|
||||
}
|
||||
|
||||
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||
awx_ldap_settings = {}
|
||||
|
||||
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
|
||||
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
|
||||
value = getattr(settings, awx_ldap_setting, None)
|
||||
awx_ldap_settings[key] = value
|
||||
|
||||
grouped_settings = {}
|
||||
|
||||
for key, value in awx_ldap_settings.items():
|
||||
match = re.search(r'(\d+)', key)
|
||||
index = int(match.group()) if match else 0
|
||||
new_key = re.sub(r'\d+_', '', key)
|
||||
|
||||
if index not in grouped_settings:
|
||||
grouped_settings[index] = {}
|
||||
|
||||
grouped_settings[index][new_key] = value
|
||||
if new_key == "GROUP_TYPE" and value:
|
||||
grouped_settings[index][new_key] = type(value).__name__
|
||||
|
||||
if new_key == "SERVER_URI" and value:
|
||||
value = value.split(", ")
|
||||
|
||||
return grouped_settings
|
||||
|
||||
def is_enabled(self, settings, keys):
|
||||
for key, required in keys.items():
|
||||
if required and not settings.get(key):
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||
awx_saml_settings = {}
|
||||
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
|
||||
|
||||
return awx_saml_settings
|
||||
|
||||
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||
config = {
|
||||
"type": f"awx.authentication.authenticator_plugins.{type}",
|
||||
"name": name,
|
||||
"enabled": enabled,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {},
|
||||
}
|
||||
for k in keys:
|
||||
v = awx_settings.get(k)
|
||||
config["configuration"].update({k: v})
|
||||
|
||||
if type == "saml":
|
||||
idp_to_key_mapping = {
|
||||
"url": "IDP_URL",
|
||||
"x509cert": "IDP_X509_CERT",
|
||||
"entity_id": "IDP_ENTITY_ID",
|
||||
"attr_email": "IDP_ATTR_EMAIL",
|
||||
"attr_groups": "IDP_GROUPS",
|
||||
"attr_username": "IDP_ATTR_USERNAME",
|
||||
"attr_last_name": "IDP_ATTR_LAST_NAME",
|
||||
"attr_first_name": "IDP_ATTR_FIRST_NAME",
|
||||
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
|
||||
}
|
||||
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
|
||||
for key in idp_to_key_mapping:
|
||||
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
|
||||
if value is not None:
|
||||
config["name"] = idp_name
|
||||
config["configuration"].update({idp_to_key_mapping[key]: value})
|
||||
|
||||
return config
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"output_file",
|
||||
nargs="?",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Output JSON file path",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
data = []
|
||||
|
||||
# dump SAML settings
|
||||
awx_saml_settings = self.get_awx_saml_settings()
|
||||
awx_saml_enabled = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||
if awx_saml_enabled:
|
||||
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
awx_saml_enabled,
|
||||
awx_saml_settings,
|
||||
"saml",
|
||||
self.DAB_SAML_AUTHENTICATOR_KEYS,
|
||||
awx_saml_name,
|
||||
)
|
||||
)
|
||||
|
||||
# dump LDAP settings
|
||||
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||
for awx_ldap_name, awx_ldap_settings in enumerate(awx_ldap_group_settings.values()):
|
||||
enabled = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||
if enabled:
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
enabled,
|
||||
awx_ldap_settings,
|
||||
"ldap",
|
||||
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||
str(awx_ldap_name),
|
||||
)
|
||||
)
|
||||
|
||||
# write to file if requested
|
||||
if options["output_file"]:
|
||||
# Define the path for the output JSON file
|
||||
output_file = options["output_file"]
|
||||
|
||||
# Ensure the directory exists
|
||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||
|
||||
# Write data to the JSON file
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
|
||||
else:
|
||||
self.stdout.write(json.dumps(data, indent=4))
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
|
||||
sys.exit(1)
|
||||
@@ -92,8 +92,6 @@ class Command(BaseCommand):
|
||||
return host_stats
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
# it's necessary to delay this import in case
|
||||
# database migrations are still running
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -166,8 +164,15 @@ class Command(BaseCommand):
|
||||
|
||||
return
|
||||
|
||||
try:
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Terminating Websocket Relayer')
|
||||
WebsocketsMetricsServer().start()
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
|
||||
while True:
|
||||
try:
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Shutting down Websocket Relayer')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
|
||||
time.sleep(10)
|
||||
|
||||
@@ -5,11 +5,12 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db import connection
|
||||
from django.shortcuts import redirect
|
||||
from django.apps import apps
|
||||
@@ -17,9 +18,11 @@ from django.utils.deprecation import MiddlewareMixin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.urls import reverse, resolve
|
||||
|
||||
from awx.main import migrations
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
from awx.conf import fields, register
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
from awx.main.utils.common import memoize
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
@@ -198,9 +201,22 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
request.path_info = new_path
|
||||
|
||||
|
||||
@memoize(ttl=20)
|
||||
def is_migrating():
|
||||
latest_number = 0
|
||||
latest_name = ''
|
||||
for migration_path in Path(migrations.__path__[0]).glob('[0-9]*.py'):
|
||||
try:
|
||||
migration_number = int(migration_path.name.split('_', 1)[0])
|
||||
except ValueError:
|
||||
continue
|
||||
if migration_number > latest_number:
|
||||
latest_number = migration_number
|
||||
latest_name = migration_path.name[: -len('.py')]
|
||||
return not MigrationRecorder(connection).migration_qs.filter(app='main', name=latest_name).exists()
|
||||
|
||||
|
||||
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||
def process_request(self, request):
|
||||
executor = MigrationExecutor(connection)
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
return redirect(reverse("ui:migrations_notran"))
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
# Generated by Django 4.2.6 on 2024-02-15 20:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0189_inbound_hop_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -925,6 +925,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('rhv', _('Red Hat Virtualization')),
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1630,6 +1631,20 @@ class satellite6(PluginFileInjector):
|
||||
return ret
|
||||
|
||||
|
||||
class terraform(PluginFileInjector):
|
||||
plugin_name = 'terraform_state'
|
||||
base_injector = 'managed'
|
||||
namespace = 'cloud'
|
||||
collection = 'terraform'
|
||||
use_fqcn = True
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, None)
|
||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||
ret['backend_config_files'] = env["TF_BACKEND_CONFIG_FILE"]
|
||||
return ret
|
||||
|
||||
|
||||
class controller(PluginFileInjector):
|
||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||
base_injector = 'template'
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Copyright (c) 2019 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
# -*-coding:utf-8-*-
|
||||
|
||||
|
||||
class CustomNotificationBase(object):
|
||||
|
||||
@@ -12,6 +12,7 @@ from . import consumers
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.routing')
|
||||
_application = None
|
||||
|
||||
|
||||
class AWXProtocolTypeRouter(ProtocolTypeRouter):
|
||||
@@ -66,11 +67,52 @@ websocket_relay_urlpatterns = [
|
||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
application = AWXProtocolTypeRouter(
|
||||
{
|
||||
'websocket': MultipleURLRouterAdapter(
|
||||
URLRouter(websocket_relay_urlpatterns),
|
||||
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
def application_func(cls=AWXProtocolTypeRouter) -> ProtocolTypeRouter:
|
||||
return cls(
|
||||
{
|
||||
'websocket': MultipleURLRouterAdapter(
|
||||
URLRouter(websocket_relay_urlpatterns),
|
||||
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def __getattr__(name: str) -> ProtocolTypeRouter:
|
||||
"""
|
||||
Defer instantiating application.
|
||||
For testing, we just need it to NOT run on import.
|
||||
|
||||
https://peps.python.org/pep-0562/#specification
|
||||
|
||||
Normally, someone would get application from this module via:
|
||||
from awx.main.routing import application
|
||||
|
||||
and do something with the application:
|
||||
application.do_something()
|
||||
|
||||
What does the callstack look like when the import runs?
|
||||
...
|
||||
awx.main.routing.__getattribute__(...) # <-- we don't define this so NOOP as far as we are concerned
|
||||
if '__getattr__' in awx.main.routing.__dict__: # <-- this triggers the function we are in
|
||||
return awx.main.routing.__dict__.__getattr__("application")
|
||||
|
||||
Why isn't this function simply implemented as:
|
||||
def __getattr__(name):
|
||||
if not _application:
|
||||
_application = application_func()
|
||||
return _application
|
||||
|
||||
It could. I manually tested it and it passes test_routing.py.
|
||||
|
||||
But my understanding after reading the PEP-0562 specification link above is that
|
||||
performance would be a bit worse due to the extra __getattribute__ calls when
|
||||
we reference non-global variables.
|
||||
"""
|
||||
if name == "application":
|
||||
globs = globals()
|
||||
if not globs['_application']:
|
||||
globs['_application'] = application_func()
|
||||
return globs['_application']
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||
|
||||
@@ -6,6 +6,7 @@ import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import psycopg
|
||||
from io import StringIO
|
||||
from contextlib import redirect_stdout
|
||||
import shutil
|
||||
@@ -416,7 +417,7 @@ def handle_removed_image(remove_images=None):
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def cleanup_images_and_files():
|
||||
_cleanup_images_and_files()
|
||||
_cleanup_images_and_files(image_prune=True)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@@ -630,10 +631,18 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
||||
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
|
||||
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
|
||||
if sqlstate == psycopg.errors.NoData:
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
# Run local reaper
|
||||
if worker_tasks is not None:
|
||||
@@ -788,10 +797,19 @@ def update_inventory_computed_fields(inventory_id):
|
||||
try:
|
||||
i.update_computed_fields()
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
return
|
||||
raise
|
||||
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105
|
||||
# django raises DatabaseError("Forced update did not affect any rows.")
|
||||
|
||||
# if sqlstate is set then there was a database error and otherwise will re-raise that error
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
# otherwise
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
|
||||
|
||||
def update_smart_memberships_for_inventory(smart_inventory):
|
||||
|
||||
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"TF_BACKEND_CONFIG_FILE": "{{ file_reference }}"
|
||||
}
|
||||
@@ -1,13 +1,8 @@
|
||||
from awx.main.tests.functional.conftest import * # noqa
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
# This is called for every test. Only get/set command line arguments
|
||||
# if the argument is specified in the list of test "fixturenames".
|
||||
option_value = metafunc.config.option.release
|
||||
if 'release' in metafunc.fixturenames and option_value is not None:
|
||||
metafunc.parametrize("release", [option_value])
|
||||
@pytest.fixture()
|
||||
def release():
|
||||
return os.environ.get('VERSION_TARGET', '')
|
||||
|
||||
@@ -3,15 +3,19 @@ import pytest
|
||||
from unittest import mock
|
||||
import urllib.parse
|
||||
from unittest.mock import PropertyMock
|
||||
import importlib
|
||||
|
||||
# Django
|
||||
from django.urls import resolve
|
||||
from django.http import Http404
|
||||
from django.apps import apps
|
||||
from django.core.handlers.exception import response_for_exception
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -41,10 +45,19 @@ from awx.main.models.workflow import WorkflowJobTemplate
|
||||
from awx.main.models.ad_hoc_commands import AdHocCommand
|
||||
from awx.main.models.oauth import OAuth2Application as Application
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
__SWAGGER_REQUESTS__ = {}
|
||||
|
||||
|
||||
# HACK: the dab_resource_registry app required ServiceID in migrations which checks do not run
|
||||
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
|
||||
|
||||
|
||||
if is_testing():
|
||||
post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def swagger_autogen(requests=__SWAGGER_REQUESTS__):
|
||||
return requests
|
||||
|
||||
@@ -193,6 +193,7 @@ class TestInventorySourceInjectors:
|
||||
('satellite6', 'theforeman.foreman.foreman'),
|
||||
('insights', 'redhatinsights.insights.insights'),
|
||||
('controller', 'awx.awx.tower'),
|
||||
('terraform', 'cloud.terraform.terraform_state'),
|
||||
],
|
||||
)
|
||||
def test_plugin_proper_names(self, source, proper_name):
|
||||
|
||||
@@ -107,6 +107,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
for filename in os.listdir(os.path.join(private_data_dir, subdir)):
|
||||
filename_list.append(os.path.join(subdir, filename))
|
||||
filename_list = sorted(filename_list, key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
|
||||
inventory_content = ""
|
||||
for filename in filename_list:
|
||||
if filename in ('args', 'project'):
|
||||
continue # Ansible runner
|
||||
@@ -130,6 +131,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
dir_contents[abs_file_path] = f.read()
|
||||
# Declare a reference to inventory plugin file if it exists
|
||||
if abs_file_path.endswith('.yml') and 'plugin: ' in dir_contents[abs_file_path]:
|
||||
inventory_content = dir_contents[abs_file_path]
|
||||
referenced_paths.add(abs_file_path) # used as inventory file
|
||||
elif cache_file_regex.match(abs_file_path):
|
||||
file_aliases[abs_file_path] = 'cache_file'
|
||||
@@ -157,7 +159,11 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
content = {}
|
||||
for abs_file_path, file_content in dir_contents.items():
|
||||
# assert that all files laid down are used
|
||||
if abs_file_path not in referenced_paths and abs_file_path not in ignore_files:
|
||||
if (
|
||||
abs_file_path not in referenced_paths
|
||||
and to_container_path(abs_file_path, private_data_dir) not in inventory_content
|
||||
and abs_file_path not in ignore_files
|
||||
):
|
||||
raise AssertionError(
|
||||
"File {} is not referenced. References and files:\n{}\n{}".format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4))
|
||||
)
|
||||
|
||||
@@ -411,14 +411,14 @@ def test_project_delete(delete, organization, admin_user):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'order_by, expected_names, expected_ids',
|
||||
'order_by, expected_names',
|
||||
[
|
||||
('name', ['alice project', 'bob project', 'shared project'], [1, 2, 3]),
|
||||
('-name', ['shared project', 'bob project', 'alice project'], [3, 2, 1]),
|
||||
('name', ['alice project', 'bob project', 'shared project']),
|
||||
('-name', ['shared project', 'bob project', 'alice project']),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, expected_ids, organization_factory):
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, organization_factory):
|
||||
'ensure sorted order of project list is maintained correctly when the requested order is invalid or not applicable'
|
||||
objects = organization_factory(
|
||||
'org1',
|
||||
@@ -426,13 +426,11 @@ def test_project_list_ordering_by_name(get, order_by, expected_names, expected_i
|
||||
superusers=['admin'],
|
||||
)
|
||||
project_names = []
|
||||
project_ids = []
|
||||
# TODO: ask for an order by here that doesn't apply
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
for x in range(len(results)):
|
||||
project_names.append(results[x]['name'])
|
||||
project_ids.append(results[x]['id'])
|
||||
assert project_names == expected_names and project_ids == expected_ids
|
||||
assert project_names == expected_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize('order_by', ('name', '-name'))
|
||||
@@ -450,7 +448,8 @@ def test_project_list_ordering_with_duplicate_names(get, order_by, organization_
|
||||
for x in range(3):
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
project_ids[x] = [proj['id'] for proj in results]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2] == [1, 2, 3, 4, 5]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2]
|
||||
assert project_ids[0] == sorted(project_ids[0])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
90
awx/main/tests/functional/test_routing.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import pytest
|
||||
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
|
||||
from channels.routing import ProtocolTypeRouter
|
||||
from channels.testing.websocket import WebsocketCommunicator
|
||||
|
||||
|
||||
from awx.main.consumers import WebsocketSecretAuthHelper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def application():
|
||||
# code in routing hits the db on import because .. settings cache
|
||||
from awx.main.routing import application_func
|
||||
|
||||
yield application_func(ProtocolTypeRouter)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def websocket_server_generator(application):
|
||||
def fn(endpoint):
|
||||
return WebsocketCommunicator(application, endpoint)
|
||||
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.django_db
|
||||
class TestWebsocketRelay:
|
||||
@pytest.fixture
|
||||
def websocket_relay_secret_generator(self, settings):
|
||||
def fn(secret, set_broadcast_websocket_secret=False):
|
||||
secret_backup = settings.BROADCAST_WEBSOCKET_SECRET
|
||||
settings.BROADCAST_WEBSOCKET_SECRET = 'foobar'
|
||||
res = ('secret'.encode('utf-8'), WebsocketSecretAuthHelper.construct_secret().encode('utf-8'))
|
||||
if set_broadcast_websocket_secret is False:
|
||||
settings.BROADCAST_WEBSOCKET_SECRET = secret_backup
|
||||
return res
|
||||
|
||||
return fn
|
||||
|
||||
@pytest.fixture
|
||||
def websocket_relay_secret(self, settings, websocket_relay_secret_generator):
|
||||
return websocket_relay_secret_generator('foobar', set_broadcast_websocket_secret=True)
|
||||
|
||||
async def test_authorized(self, websocket_server_generator, websocket_relay_secret):
|
||||
server = websocket_server_generator('/websocket/relay/')
|
||||
|
||||
server.scope['headers'] = (websocket_relay_secret,)
|
||||
connected, _ = await server.connect()
|
||||
assert connected is True
|
||||
|
||||
async def test_not_authorized(self, websocket_server_generator):
|
||||
server = websocket_server_generator('/websocket/relay/')
|
||||
connected, _ = await server.connect()
|
||||
assert connected is False, "Connection to the relay websocket without auth. We expected the client to be denied."
|
||||
|
||||
async def test_wrong_secret(self, websocket_server_generator, websocket_relay_secret_generator):
|
||||
server = websocket_server_generator('/websocket/relay/')
|
||||
|
||||
server.scope['headers'] = (websocket_relay_secret_generator('foobar', set_broadcast_websocket_secret=False),)
|
||||
connected, _ = await server.connect()
|
||||
assert connected is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.django_db
|
||||
class TestWebsocketEventConsumer:
|
||||
async def test_unauthorized_anonymous(self, websocket_server_generator):
|
||||
server = websocket_server_generator('/websocket/')
|
||||
|
||||
server.scope['user'] = AnonymousUser()
|
||||
connected, _ = await server.connect()
|
||||
assert connected is False, "Anonymous user should NOT be allowed to login."
|
||||
|
||||
@pytest.mark.skip(reason="Ran out of coding time.")
|
||||
async def test_authorized(self, websocket_server_generator, application, admin):
|
||||
server = websocket_server_generator('/websocket/')
|
||||
|
||||
"""
|
||||
I ran out of time. Here is what I was thinking ...
|
||||
Inject a valid session into the cookies in the header
|
||||
|
||||
server.scope['headers'] = (
|
||||
(b'cookie', ...),
|
||||
)
|
||||
"""
|
||||
connected, _ = await server.connect()
|
||||
assert connected is True, "User should be allowed in via cookies auth via a session key in the cookies"
|
||||
@@ -1,11 +1,6 @@
|
||||
# Python
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
# patch python-ldap
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap # NOQA
|
||||
|
||||
# Load development settings for base variables.
|
||||
from awx.settings.development import * # NOQA
|
||||
|
||||
|
||||
122
awx/main/tests/unit/commands/test_dump_auth_config.py
Normal file
@@ -0,0 +1,122 @@
|
||||
from io import StringIO
|
||||
import json
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
|
||||
settings_dict = {
|
||||
"SOCIAL_AUTH_SAML_SP_ENTITY_ID": "SP_ENTITY_ID",
|
||||
"SOCIAL_AUTH_SAML_SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
|
||||
"SOCIAL_AUTH_SAML_SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
|
||||
"SOCIAL_AUTH_SAML_ORG_INFO": "ORG_INFO",
|
||||
"SOCIAL_AUTH_SAML_TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
|
||||
"SOCIAL_AUTH_SAML_SUPPORT_CONTACT": "SUPPORT_CONTACT",
|
||||
"SOCIAL_AUTH_SAML_SP_EXTRA": "SP_EXTRA",
|
||||
"SOCIAL_AUTH_SAML_SECURITY_CONFIG": "SECURITY_CONFIG",
|
||||
"SOCIAL_AUTH_SAML_EXTRA_DATA": "EXTRA_DATA",
|
||||
"SOCIAL_AUTH_SAML_ENABLED_IDPS": {
|
||||
"Keycloak": {
|
||||
"attr_last_name": "last_name",
|
||||
"attr_groups": "groups",
|
||||
"attr_email": "email",
|
||||
"attr_user_permanent_id": "name_id",
|
||||
"attr_username": "username",
|
||||
"entity_id": "https://example.com/auth/realms/awx",
|
||||
"url": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"attr_first_name": "first_name",
|
||||
}
|
||||
},
|
||||
"SOCIAL_AUTH_SAML_CALLBACK_URL": "CALLBACK_URL",
|
||||
"AUTH_LDAP_1_SERVER_URI": "SERVER_URI",
|
||||
"AUTH_LDAP_1_BIND_DN": "BIND_DN",
|
||||
"AUTH_LDAP_1_BIND_PASSWORD": "BIND_PASSWORD",
|
||||
"AUTH_LDAP_1_GROUP_SEARCH": ["GROUP_SEARCH"],
|
||||
"AUTH_LDAP_1_GROUP_TYPE": "string object",
|
||||
"AUTH_LDAP_1_GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
|
||||
"AUTH_LDAP_1_USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
|
||||
"AUTH_LDAP_1_USER_SEARCH": ["USER_SEARCH"],
|
||||
"AUTH_LDAP_1_USER_ATTR_MAP": {
|
||||
"email": "email",
|
||||
"last_name": "last_name",
|
||||
"first_name": "first_name",
|
||||
},
|
||||
"AUTH_LDAP_1_CONNECTION_OPTIONS": {},
|
||||
"AUTH_LDAP_1_START_TLS": None,
|
||||
}
|
||||
|
||||
|
||||
@override_settings(**settings_dict)
|
||||
class TestDumpAuthConfigCommand(TestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.expected_config = [
|
||||
{
|
||||
"type": "awx.authentication.authenticator_plugins.saml",
|
||||
"name": "Keycloak",
|
||||
"enabled": True,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {
|
||||
"SP_ENTITY_ID": "SP_ENTITY_ID",
|
||||
"SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
|
||||
"SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
|
||||
"ORG_INFO": "ORG_INFO",
|
||||
"TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
|
||||
"SUPPORT_CONTACT": "SUPPORT_CONTACT",
|
||||
"SP_EXTRA": "SP_EXTRA",
|
||||
"SECURITY_CONFIG": "SECURITY_CONFIG",
|
||||
"EXTRA_DATA": "EXTRA_DATA",
|
||||
"ENABLED_IDPS": {
|
||||
"Keycloak": {
|
||||
"attr_last_name": "last_name",
|
||||
"attr_groups": "groups",
|
||||
"attr_email": "email",
|
||||
"attr_user_permanent_id": "name_id",
|
||||
"attr_username": "username",
|
||||
"entity_id": "https://example.com/auth/realms/awx",
|
||||
"url": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"attr_first_name": "first_name",
|
||||
}
|
||||
},
|
||||
"CALLBACK_URL": "CALLBACK_URL",
|
||||
"IDP_URL": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"IDP_X509_CERT": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"IDP_ENTITY_ID": "https://example.com/auth/realms/awx",
|
||||
"IDP_ATTR_EMAIL": "email",
|
||||
"IDP_GROUPS": "groups",
|
||||
"IDP_ATTR_USERNAME": "username",
|
||||
"IDP_ATTR_LAST_NAME": "last_name",
|
||||
"IDP_ATTR_FIRST_NAME": "first_name",
|
||||
"IDP_ATTR_USER_PERMANENT_ID": "name_id",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "awx.authentication.authenticator_plugins.ldap",
|
||||
"name": "1",
|
||||
"enabled": True,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {
|
||||
"SERVER_URI": "SERVER_URI",
|
||||
"BIND_DN": "BIND_DN",
|
||||
"BIND_PASSWORD": "BIND_PASSWORD",
|
||||
"CONNECTION_OPTIONS": {},
|
||||
"GROUP_TYPE": "str",
|
||||
"GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
|
||||
"GROUP_SEARCH": ["GROUP_SEARCH"],
|
||||
"START_TLS": None,
|
||||
"USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
|
||||
"USER_ATTR_MAP": {"email": "email", "last_name": "last_name", "first_name": "first_name"},
|
||||
"USER_SEARCH": ["USER_SEARCH"],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
def test_json_returned_from_cmd(self):
|
||||
output = StringIO()
|
||||
call_command("dump_auth_config", stdout=output)
|
||||
assert json.loads(output.getvalue()) == self.expected_config
|
||||
64
awx/main/tests/unit/tasks/test_system.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from awx.main.tasks.system import update_inventory_computed_fields
|
||||
from awx.main.models import Inventory
|
||||
from django.db import DatabaseError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_logger():
|
||||
with patch("awx.main.tasks.system.logger") as logger:
|
||||
yield logger
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_inventory():
|
||||
return MagicMock(spec=Inventory)
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_existing_inventory(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
|
||||
# You can add more assertions based on your specific requirements
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_missing_inventory(mock_logger):
|
||||
# Mocking the Inventory.objects.filter method to return an empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_logger.error.assert_called_once_with("Update Inventory Computed Fields failed due to missing inventory: 1")
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_database_error_nosqlstate(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
# Simulating the update_computed_fields method to explicitly raise a DatabaseError
|
||||
mock_update_computed_fields.side_effect = DatabaseError("Some error")
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
mock_inventory.update_computed_fields.assert_called_once()
|
||||
@@ -121,6 +121,10 @@ def test_get_model_for_valid_type(model_type, model_class):
|
||||
assert common.get_model_for_type(model_type) == model_class
|
||||
|
||||
|
||||
def test_is_testing():
|
||||
assert common.is_testing() is True
|
||||
|
||||
|
||||
@pytest.mark.parametrize("model_type,model_class", [(name, cls) for cls, name in TEST_MODELS])
|
||||
def test_get_capacity_type(model_type, model_class):
|
||||
if model_type in ('job', 'ad_hoc_command', 'inventory_update', 'job_template'):
|
||||
|
||||
@@ -7,6 +7,7 @@ import json
|
||||
import yaml
|
||||
import logging
|
||||
import time
|
||||
import psycopg
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
@@ -23,7 +24,7 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
from django.db import connection, transaction, ProgrammingError, IntegrityError
|
||||
from django.db import connection, DatabaseError, transaction, ProgrammingError, IntegrityError
|
||||
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
|
||||
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor
|
||||
from django.db.models.query import QuerySet
|
||||
@@ -136,7 +137,7 @@ def underscore_to_camelcase(s):
|
||||
@functools.cache
|
||||
def is_testing(argv=None):
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'awx.main.tests.settings_for_test':
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
@@ -1155,11 +1156,25 @@ def create_partition(tblname, start=None):
|
||||
f'ALTER TABLE {tblname} ATTACH PARTITION {tblname}_{partition_label} '
|
||||
f'FOR VALUES FROM (\'{start_timestamp}\') TO (\'{end_timestamp}\');'
|
||||
)
|
||||
|
||||
except (ProgrammingError, IntegrityError) as e:
|
||||
if 'already exists' in str(e):
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
raise
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_cls = psycopg.errors.lookup(sqlstate)
|
||||
|
||||
if psycopg.errors.DuplicateTable == sqlstate_cls or psycopg.errors.UniqueViolation == sqlstate_cls:
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_cls))
|
||||
raise
|
||||
except DatabaseError as e:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
|
||||
def cleanup_new_process(func):
|
||||
|
||||
@@ -302,20 +302,35 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr.start()
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = settings.DATABASES['default']
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
database_conf = settings.DATABASES['default'].copy()
|
||||
database_conf['OPTIONS'] = database_conf.get('OPTIONS', {}).copy()
|
||||
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
database_conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
database_conf['OPTIONS'][k] = v
|
||||
|
||||
task = None
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
while True:
|
||||
if not task or task.done():
|
||||
try:
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
|
||||
task = event_loop.create_task(self.on_ws_heartbeat(async_conn), name="on_ws_heartbeat")
|
||||
logger.info("Creating `on_ws_heartbeat` task in event loop.")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to database for pg_notify: {e}")
|
||||
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
|
||||
@@ -216,42 +216,59 @@
|
||||
- block:
|
||||
- name: Fetch galaxy roles from roles/requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy role install -r {{ item }} {{ verbosity }}"
|
||||
cmd: "ansible-galaxy role install -r {{ req_file }} {{ verbosity }}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
files:
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
skip: True
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
when: roles_enabled | bool
|
||||
when:
|
||||
- roles_enabled | bool
|
||||
- req_file
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy collection install -r {{ item }} {{ verbosity }}"
|
||||
cmd: "ansible-galaxy collection install -r {{ req_file }} {{ verbosity }}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
files:
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
skip: True
|
||||
changed_when: "'Nothing to do.' not in galaxy_collection_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled | bool
|
||||
- req_file
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
# requirements.yml in project root can be either "old" (roles only) or "new" (collections+roles) format
|
||||
- name: Fetch galaxy roles and collections from requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy install -r {{ item }} {{ verbosity }}"
|
||||
cmd: "ansible-galaxy install -r {{ req_file }} {{ verbosity }}"
|
||||
register: galaxy_combined_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
files:
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
skip: True
|
||||
changed_when: "'Nothing to do.' not in galaxy_combined_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.10', '>=')"
|
||||
- collections_enabled | bool
|
||||
- roles_enabled | bool
|
||||
- req_file
|
||||
tags:
|
||||
- install_collections
|
||||
- install_roles
|
||||
|
||||
22
awx/resource_api.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
from awx.main import models
|
||||
|
||||
|
||||
class APIConfig(ServiceAPIConfig):
|
||||
service_type = "awx"
|
||||
|
||||
|
||||
RESOURCE_LIST = (
|
||||
ResourceConfig(
|
||||
models.Organization,
|
||||
shared_resource=SharedResource(serializer=OrganizationType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(models.User, shared_resource=SharedResource(serializer=UserType, is_provider=False), name_field="username"),
|
||||
ResourceConfig(
|
||||
models.Team,
|
||||
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
||||
parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
|
||||
),
|
||||
)
|
||||
@@ -353,8 +353,11 @@ INSTALLED_APPS = [
|
||||
'awx.sso',
|
||||
'solo',
|
||||
'ansible_base.rest_filters',
|
||||
'ansible_base.jwt_consumer',
|
||||
'ansible_base.resource_registry',
|
||||
]
|
||||
|
||||
|
||||
INTERNAL_IPS = ('127.0.0.1',)
|
||||
|
||||
MAX_PAGE_SIZE = 200
|
||||
@@ -362,6 +365,7 @@ REST_FRAMEWORK = {
|
||||
'DEFAULT_PAGINATION_CLASS': 'awx.api.pagination.Pagination',
|
||||
'PAGE_SIZE': 25,
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'ansible_base.jwt_consumer.awx.auth.AwxJWTAuthentication',
|
||||
'awx.api.authentication.LoggedOAuth2Authentication',
|
||||
'awx.api.authentication.SessionAuthentication',
|
||||
'awx.api.authentication.LoggedBasicAuthentication',
|
||||
@@ -755,6 +759,14 @@ SATELLITE6_INSTANCE_ID_VAR = 'foreman_id,foreman.id'
|
||||
INSIGHTS_INSTANCE_ID_VAR = 'insights_id'
|
||||
INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
|
||||
# ----------------
|
||||
# -- Terraform State --
|
||||
# ----------------
|
||||
# TERRAFORM_ENABLED_VAR =
|
||||
# TERRAFORM_ENABLED_VALUE =
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
@@ -1108,8 +1120,14 @@ METRICS_SUBSYSTEM_CONFIG = {
|
||||
# django-ansible-base
|
||||
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
||||
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
||||
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
|
||||
from ansible_base.lib import dynamic_config # noqa: E402
|
||||
|
||||
settings_file = os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py')
|
||||
include(settings_file)
|
||||
|
||||
# Add a postfix to the API URL patterns
|
||||
# example if set to '' API pattern will be /api
|
||||
# example if set to 'controller' API pattern will be /api AND /api/controller
|
||||
OPTIONAL_API_URLPATTERN_PREFIX = ''
|
||||
|
||||
@@ -72,6 +72,8 @@ AWX_CALLBACK_PROFILE = True
|
||||
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
||||
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
|
||||
@@ -59,6 +59,7 @@ register(
|
||||
help_text=_('Maximum number of job events for the UI to retrieve within a single request.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -68,4 +69,5 @@ register(
|
||||
help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
46
awx/ui/package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"axios": "^1.6.7",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
@@ -5940,12 +5940,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios/node_modules/form-data": {
|
||||
@@ -10387,9 +10388,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==",
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
@@ -18349,6 +18350,11 @@
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"node_modules/pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
@@ -26915,12 +26921,13 @@
|
||||
"dev": true
|
||||
},
|
||||
"axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"requires": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"form-data": {
|
||||
@@ -30371,9 +30378,9 @@
|
||||
}
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA=="
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw=="
|
||||
},
|
||||
"fork-ts-checker-webpack-plugin": {
|
||||
"version": "6.5.2",
|
||||
@@ -36325,6 +36332,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"axios": "^1.6.7",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
|
||||
@@ -67,27 +67,18 @@ function getInitialValues(launchConfig, surveyConfig, resource) {
|
||||
const values = {};
|
||||
if (surveyConfig?.spec) {
|
||||
surveyConfig.spec.forEach((question) => {
|
||||
if (question.type === 'multiselect') {
|
||||
if (resource?.extra_data && resource?.extra_data[question.variable]) {
|
||||
values[`survey_${question.variable}`] =
|
||||
resource.extra_data[question.variable];
|
||||
} else if (question.type === 'multiselect') {
|
||||
values[`survey_${question.variable}`] = question.default
|
||||
? question.default.split('\n')
|
||||
: [];
|
||||
} else {
|
||||
values[`survey_${question.variable}`] = question.default ?? '';
|
||||
}
|
||||
if (resource?.extra_data) {
|
||||
Object.entries(resource.extra_data).forEach(([key, value]) => {
|
||||
if (key === question.variable) {
|
||||
if (question.type === 'multiselect') {
|
||||
values[`survey_${question.variable}`] = value;
|
||||
} else {
|
||||
values[`survey_${question.variable}`] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
|
||||
@@ -257,12 +257,17 @@ function PromptDetail({
|
||||
numChips={5}
|
||||
ouiaId="prompt-job-tag-chips"
|
||||
totalChips={
|
||||
!overrides.job_tags || overrides.job_tags === ''
|
||||
overrides.job_tags === undefined ||
|
||||
overrides.job_tags === null ||
|
||||
overrides.job_tags === ''
|
||||
? 0
|
||||
: overrides.job_tags.split(',').length
|
||||
}
|
||||
>
|
||||
{overrides.job_tags.length > 0 &&
|
||||
{overrides.job_tags !== undefined &&
|
||||
overrides.job_tags !== null &&
|
||||
overrides.job_tags !== '' &&
|
||||
overrides.job_tags.length > 0 &&
|
||||
overrides.job_tags.split(',').map((jobTag) => (
|
||||
<Chip
|
||||
key={jobTag}
|
||||
@@ -284,13 +289,18 @@ function PromptDetail({
|
||||
<ChipGroup
|
||||
numChips={5}
|
||||
totalChips={
|
||||
!overrides.skip_tags || overrides.skip_tags === ''
|
||||
overrides.skip_tags === undefined ||
|
||||
overrides.skip_tags === null ||
|
||||
overrides.skip_tags === ''
|
||||
? 0
|
||||
: overrides.skip_tags.split(',').length
|
||||
}
|
||||
ouiaId="prompt-skip-tag-chips"
|
||||
>
|
||||
{overrides.skip_tags.length > 0 &&
|
||||
{overrides.skip_tags !== undefined &&
|
||||
overrides.skip_tags !== null &&
|
||||
overrides.skip_tags !== '' &&
|
||||
overrides.skip_tags.length > 0 &&
|
||||
overrides.skip_tags.split(',').map((skipTag) => (
|
||||
<Chip
|
||||
key={skipTag}
|
||||
|
||||
@@ -13,6 +13,18 @@ import ScheduleForm from '../shared/ScheduleForm';
|
||||
import buildRuleSet from '../shared/buildRuleSet';
|
||||
import { CardBody } from '../../Card';
|
||||
|
||||
function generateExtraData(extra_vars, surveyValues, surveyConfiguration) {
|
||||
const extraVars = parseVariableField(
|
||||
yaml.dump(mergeExtraVars(extra_vars, surveyValues))
|
||||
);
|
||||
surveyConfiguration.spec.forEach((q) => {
|
||||
if (!surveyValues[q.variable]) {
|
||||
delete extraVars[q.variable];
|
||||
}
|
||||
});
|
||||
return extraVars;
|
||||
}
|
||||
|
||||
function ScheduleEdit({
|
||||
hasDaysToKeepField,
|
||||
schedule,
|
||||
@@ -33,10 +45,12 @@ function ScheduleEdit({
|
||||
surveyConfiguration,
|
||||
originalInstanceGroups,
|
||||
originalLabels,
|
||||
scheduleCredentials = []
|
||||
scheduleCredentials = [],
|
||||
isPromptTouched = false
|
||||
) => {
|
||||
const {
|
||||
execution_environment,
|
||||
extra_vars = null,
|
||||
instance_groups,
|
||||
inventory,
|
||||
credentials = [],
|
||||
@@ -48,45 +62,54 @@ function ScheduleEdit({
|
||||
labels,
|
||||
...submitValues
|
||||
} = values;
|
||||
let extraVars;
|
||||
|
||||
const surveyValues = getSurveyValues(values);
|
||||
|
||||
if (
|
||||
!Object.values(surveyValues).length &&
|
||||
surveyConfiguration?.spec?.length
|
||||
isPromptTouched &&
|
||||
surveyConfiguration?.spec &&
|
||||
launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
surveyConfiguration.spec.forEach((q) => {
|
||||
surveyValues[q.variable] = q.default;
|
||||
});
|
||||
submitValues.extra_data = generateExtraData(
|
||||
extra_vars,
|
||||
surveyValues,
|
||||
surveyConfiguration
|
||||
);
|
||||
} else if (
|
||||
isPromptTouched &&
|
||||
surveyConfiguration?.spec &&
|
||||
!launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
submitValues.extra_data = generateExtraData(
|
||||
schedule.extra_data,
|
||||
surveyValues,
|
||||
surveyConfiguration
|
||||
);
|
||||
} else if (
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
submitValues.extra_data = parseVariableField(extra_vars);
|
||||
}
|
||||
|
||||
const initialExtraVars =
|
||||
launchConfiguration?.ask_variables_on_launch &&
|
||||
(values.extra_vars || '---');
|
||||
if (surveyConfiguration?.spec) {
|
||||
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, surveyValues));
|
||||
} else {
|
||||
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, {}));
|
||||
}
|
||||
submitValues.extra_data = extraVars && parseVariableField(extraVars);
|
||||
|
||||
if (
|
||||
Object.keys(submitValues.extra_data).length === 0 &&
|
||||
Object.keys(schedule.extra_data).length > 0
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_inventory_on_launch &&
|
||||
inventory
|
||||
) {
|
||||
submitValues.extra_data = schedule.extra_data;
|
||||
}
|
||||
delete values.extra_vars;
|
||||
if (inventory) {
|
||||
submitValues.inventory = inventory.id;
|
||||
}
|
||||
|
||||
if (execution_environment) {
|
||||
if (
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_execution_environment_on_launch &&
|
||||
execution_environment
|
||||
) {
|
||||
submitValues.execution_environment = execution_environment.id;
|
||||
}
|
||||
|
||||
try {
|
||||
if (launchConfiguration?.ask_labels_on_launch) {
|
||||
if (isPromptTouched && launchConfiguration?.ask_labels_on_launch) {
|
||||
const { labelIds, error } = createNewLabels(
|
||||
values.labels,
|
||||
resource.organization
|
||||
@@ -120,9 +143,16 @@ function ScheduleEdit({
|
||||
}
|
||||
}
|
||||
|
||||
const cleanedRequestData = Object.keys(requestData)
|
||||
.filter((key) => !key.startsWith('survey_'))
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = requestData[key];
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const {
|
||||
data: { id: scheduleId },
|
||||
} = await SchedulesAPI.update(schedule.id, requestData);
|
||||
} = await SchedulesAPI.update(schedule.id, cleanedRequestData);
|
||||
|
||||
const { added: addedCredentials, removed: removedCredentials } =
|
||||
getAddedAndRemoved(
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
InventoriesAPI,
|
||||
CredentialsAPI,
|
||||
CredentialTypesAPI,
|
||||
JobTemplatesAPI,
|
||||
} from 'api';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import ScheduleEdit from './ScheduleEdit';
|
||||
@@ -125,6 +126,7 @@ describe('<ScheduleEdit />', () => {
|
||||
id: 27,
|
||||
},
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ScheduleEdit
|
||||
@@ -206,7 +208,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run once schedule',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
@@ -233,7 +234,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run every 10 minutes 10 times',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T103000 RRULE:INTERVAL=10;FREQ=MINUTELY;COUNT=10',
|
||||
});
|
||||
@@ -262,7 +262,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run every hour until date',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=HOURLY;UNTIL=20200326T144500Z',
|
||||
});
|
||||
@@ -288,7 +287,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run daily',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=DAILY',
|
||||
});
|
||||
@@ -316,7 +314,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run weekly on mon/wed/fri',
|
||||
extra_data: {},
|
||||
rrule: `DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=WEEKLY;BYDAY=${RRule.MO},${RRule.WE},${RRule.FR}`,
|
||||
});
|
||||
});
|
||||
@@ -344,7 +341,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run on the first day of the month',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200401T104500 RRULE:INTERVAL=1;FREQ=MONTHLY;BYMONTHDAY=1',
|
||||
});
|
||||
@@ -376,7 +372,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run monthly on the last Tuesday',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200331T110000 RRULE:INTERVAL=1;FREQ=MONTHLY;BYSETPOS=-1;BYDAY=TU',
|
||||
});
|
||||
@@ -406,7 +401,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the first day of March',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200301T000000 RRULE:INTERVAL=1;FREQ=YEARLY;BYMONTH=3;BYMONTHDAY=1',
|
||||
});
|
||||
@@ -437,7 +431,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the second Friday in April',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=2;BYDAY=FR;BYMONTH=4',
|
||||
});
|
||||
@@ -468,7 +461,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the first weekday in October',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=1;BYDAY=MO,TU,WE,TH,FR;BYMONTH=10',
|
||||
});
|
||||
@@ -562,7 +554,6 @@ describe('<ScheduleEdit />', () => {
|
||||
wrapper.update();
|
||||
|
||||
expect(SchedulesAPI.update).toBeCalledWith(27, {
|
||||
extra_data: {},
|
||||
name: 'mock schedule',
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20210128T141500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
@@ -633,15 +624,13 @@ describe('<ScheduleEdit />', () => {
|
||||
endDateTime: undefined,
|
||||
startDateTime: undefined,
|
||||
description: '',
|
||||
extra_data: {},
|
||||
name: 'foo',
|
||||
inventory: 702,
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
|
||||
test('should submit survey with default values properly, without opening prompt wizard', async () => {
|
||||
test('should submit update values properly when prompt is not opened', async () => {
|
||||
let scheduleSurveyWrapper;
|
||||
await act(async () => {
|
||||
scheduleSurveyWrapper = mountWithContexts(
|
||||
@@ -746,9 +735,195 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run once schedule',
|
||||
extra_data: { mc: 'first', text: 'text variable' },
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
test('should submit update values properly when survey values change', async () => {
|
||||
JobTemplatesAPI.readSurvey.mockResolvedValue({
|
||||
data: {
|
||||
spec: [
|
||||
{
|
||||
question_name: 'text',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'text',
|
||||
variable: 'text',
|
||||
min: 0,
|
||||
max: 1024,
|
||||
default: 'text variable',
|
||||
choices: '',
|
||||
new_question: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
JobTemplatesAPI.readLaunch.mockResolvedValue({
|
||||
data: {
|
||||
can_start_without_user_input: false,
|
||||
passwords_needed_to_start: [],
|
||||
ask_scm_branch_on_launch: false,
|
||||
ask_variables_on_launch: false,
|
||||
ask_tags_on_launch: false,
|
||||
ask_diff_mode_on_launch: false,
|
||||
ask_skip_tags_on_launch: false,
|
||||
ask_job_type_on_launch: false,
|
||||
ask_limit_on_launch: false,
|
||||
ask_verbosity_on_launch: false,
|
||||
ask_inventory_on_launch: true,
|
||||
ask_credential_on_launch: true,
|
||||
survey_enabled: true,
|
||||
variables_needed_to_start: [],
|
||||
credential_needed_to_start: true,
|
||||
inventory_needed_to_start: true,
|
||||
job_template_data: {
|
||||
name: 'Demo Job Template',
|
||||
id: 7,
|
||||
description: '',
|
||||
},
|
||||
defaults: {
|
||||
extra_vars: '---',
|
||||
diff_mode: false,
|
||||
limit: '',
|
||||
job_tags: '',
|
||||
skip_tags: '',
|
||||
job_type: 'run',
|
||||
verbosity: 0,
|
||||
inventory: {
|
||||
name: null,
|
||||
id: null,
|
||||
},
|
||||
scm_branch: '',
|
||||
credentials: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let scheduleSurveyWrapper;
|
||||
await act(async () => {
|
||||
scheduleSurveyWrapper = mountWithContexts(
|
||||
<ScheduleEdit
|
||||
schedule={mockSchedule}
|
||||
resource={{
|
||||
id: 700,
|
||||
type: 'job_template',
|
||||
iventory: 1,
|
||||
summary_fields: {
|
||||
credentials: [
|
||||
{ name: 'job template credential', id: 75, kind: 'ssh' },
|
||||
],
|
||||
},
|
||||
name: 'Foo Job Template',
|
||||
description: '',
|
||||
}}
|
||||
resourceDefaultCredentials={[]}
|
||||
launchConfig={{
|
||||
can_start_without_user_input: false,
|
||||
passwords_needed_to_start: [],
|
||||
ask_scm_branch_on_launch: false,
|
||||
ask_variables_on_launch: false,
|
||||
ask_tags_on_launch: false,
|
||||
ask_diff_mode_on_launch: false,
|
||||
ask_skip_tags_on_launch: false,
|
||||
ask_job_type_on_launch: false,
|
||||
ask_limit_on_launch: false,
|
||||
ask_verbosity_on_launch: false,
|
||||
ask_inventory_on_launch: true,
|
||||
ask_credential_on_launch: true,
|
||||
survey_enabled: true,
|
||||
variables_needed_to_start: [],
|
||||
credential_needed_to_start: true,
|
||||
inventory_needed_to_start: true,
|
||||
job_template_data: {
|
||||
name: 'Demo Job Template',
|
||||
id: 7,
|
||||
description: '',
|
||||
},
|
||||
defaults: {
|
||||
extra_vars: '---',
|
||||
diff_mode: false,
|
||||
limit: '',
|
||||
job_tags: '',
|
||||
skip_tags: '',
|
||||
job_type: 'run',
|
||||
verbosity: 0,
|
||||
inventory: {
|
||||
name: null,
|
||||
id: null,
|
||||
},
|
||||
scm_branch: '',
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
surveyConfig={{
|
||||
spec: [
|
||||
{
|
||||
question_name: 'text',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'text',
|
||||
variable: 'text',
|
||||
min: 0,
|
||||
max: 1024,
|
||||
default: 'text variable',
|
||||
choices: '',
|
||||
new_question: true,
|
||||
},
|
||||
],
|
||||
}}
|
||||
/>
|
||||
);
|
||||
});
|
||||
scheduleSurveyWrapper.update();
|
||||
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper
|
||||
.find('Button[aria-label="Prompt"]')
|
||||
.prop('onClick')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
expect(scheduleSurveyWrapper.find('WizardNavItem').length).toBe(4);
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper
|
||||
.find('input#survey-question-text')
|
||||
.simulate('change', {
|
||||
target: { value: 'foo', name: 'survey_text' },
|
||||
})
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
|
||||
expect(scheduleSurveyWrapper.find('Wizard').length).toBe(0);
|
||||
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('Button[aria-label="Save"]').prop('onClick')()
|
||||
);
|
||||
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: '',
|
||||
name: 'mock schedule',
|
||||
inventory: 702,
|
||||
extra_data: {
|
||||
text: 'foo',
|
||||
},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -40,6 +40,7 @@ function ScheduleForm({
|
||||
resourceDefaultCredentials,
|
||||
}) {
|
||||
const [isWizardOpen, setIsWizardOpen] = useState(false);
|
||||
const [isPromptTouched, setIsPromptTouched] = useState(false);
|
||||
const [isSaveDisabled, setIsSaveDisabled] = useState(false);
|
||||
const originalLabels = useRef([]);
|
||||
const originalInstanceGroups = useRef([]);
|
||||
@@ -492,7 +493,8 @@ function ScheduleForm({
|
||||
surveyConfig,
|
||||
originalInstanceGroups.current,
|
||||
originalLabels.current,
|
||||
credentials
|
||||
credentials,
|
||||
isPromptTouched
|
||||
);
|
||||
}}
|
||||
validate={validate}
|
||||
@@ -518,6 +520,7 @@ function ScheduleForm({
|
||||
onSave={() => {
|
||||
setIsWizardOpen(false);
|
||||
setIsSaveDisabled(false);
|
||||
setIsPromptTouched(true);
|
||||
}}
|
||||
resourceDefaultCredentials={resourceDefaultCredentials}
|
||||
labels={originalLabels.current}
|
||||
|
||||
@@ -115,8 +115,11 @@ function SessionProvider({ children }) {
|
||||
}, [setSessionTimeout, setSessionCountdown]);
|
||||
|
||||
useEffect(() => {
|
||||
const isRedirectCondition = (location, histLength) =>
|
||||
location.pathname === '/login' && histLength === 2;
|
||||
|
||||
const unlisten = history.listen((location, action) => {
|
||||
if (action === 'POP') {
|
||||
if (action === 'POP' || isRedirectCondition(location, history.length)) {
|
||||
setIsRedirectLinkReceived(true);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -784,7 +784,7 @@ msgstr "Branche à utiliser dans l’exécution de la tâche. Projet par défaut
|
||||
|
||||
#: screens/Inventory/shared/Inventory.helptext.js:155
|
||||
msgid "Branch to use on inventory sync. Project default used if blank. Only allowed if project allow_override field is set to true."
|
||||
msgstr ""
|
||||
msgstr "Branche à utiliser pour la synchronisation de l'inventaire. La valeur par défaut du projet est utilisée si elle est vide. Cette option n'est autorisée que si le champ allow_override du projet est défini sur vrai."
|
||||
|
||||
#: components/About/About.js:45
|
||||
msgid "Brand Image"
|
||||
@@ -2832,7 +2832,7 @@ msgstr "Entrez les variables avec la syntaxe JSON ou YAML. Consultez la documen
|
||||
|
||||
#: screens/Inventory/shared/SmartInventoryForm.js:94
|
||||
msgid "Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Controller documentation for example syntax."
|
||||
msgstr ""
|
||||
msgstr "Entrez les variables d'inventaire en utilisant la syntaxe JSON ou YAML. Utilisez le bouton d'option pour basculer entre les deux. Référez-vous à la documentation du contrôleur Ansible pour les exemples de syntaxe."
|
||||
|
||||
#: screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.js:87
|
||||
msgid "Environment variables or extra variables that specify the values a credential type can inject."
|
||||
@@ -3015,7 +3015,7 @@ msgstr "Recherche exacte sur le champ d'identification."
|
||||
|
||||
#: components/Search/RelatedLookupTypeInput.js:38
|
||||
msgid "Exact search on name field."
|
||||
msgstr ""
|
||||
msgstr "Recherche exacte sur le champ nom."
|
||||
|
||||
#: screens/Project/shared/Project.helptext.js:23
|
||||
msgid "Example URLs for GIT Source Control include:"
|
||||
@@ -3242,7 +3242,7 @@ msgstr "Jobs ayant échoué"
|
||||
|
||||
#: screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js:262
|
||||
msgid "Failed to approve one or more workflow approval."
|
||||
msgstr ""
|
||||
msgstr "Échec de l'approbation d'une ou plusieurs validations de flux de travail."
|
||||
|
||||
#: screens/WorkflowApproval/shared/WorkflowApprovalButton.js:56
|
||||
msgid "Failed to approve {0}."
|
||||
@@ -3474,7 +3474,7 @@ msgstr "N'a pas réussi à supprimer {name}."
|
||||
|
||||
#: screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js:263
|
||||
msgid "Failed to deny one or more workflow approval."
|
||||
msgstr ""
|
||||
msgstr "Échec du refus d'une ou plusieurs validations de flux de travail."
|
||||
|
||||
#: screens/WorkflowApproval/shared/WorkflowDenyButton.js:51
|
||||
msgid "Failed to deny {0}."
|
||||
@@ -3520,7 +3520,7 @@ msgstr "Echec du lancement du Job."
|
||||
|
||||
#: screens/Inventory/InventoryHosts/InventoryHostItem.js:121
|
||||
msgid "Failed to load related groups."
|
||||
msgstr ""
|
||||
msgstr "Impossible de charger les groupes associés."
|
||||
|
||||
#: screens/Instances/InstanceDetail/InstanceDetail.js:388
|
||||
#: screens/Instances/InstanceList/InstanceList.js:266
|
||||
@@ -3972,12 +3972,12 @@ msgstr "Demande(s) de bilan de santé soumise(s). Veuillez patienter et recharge
|
||||
#: screens/Instances/InstanceDetail/InstanceDetail.js:234
|
||||
#: screens/Instances/InstanceList/InstanceListItem.js:242
|
||||
msgid "Health checks are asynchronous tasks. See the"
|
||||
msgstr ""
|
||||
msgstr "Les bilans de santé sont des tâches asynchrones. Veuillez consulter la documentation pour plus d'informations."
|
||||
|
||||
#: screens/InstanceGroup/Instances/InstanceList.js:286
|
||||
#: screens/Instances/InstanceList/InstanceList.js:219
|
||||
msgid "Health checks can only be run on execution nodes."
|
||||
msgstr ""
|
||||
msgstr "Les bilans de santé ne peuvent être exécutées que sur les nœuds d'exécution."
|
||||
|
||||
#: components/StatusLabel/StatusLabel.js:42
|
||||
msgid "Healthy"
|
||||
@@ -5048,7 +5048,7 @@ msgstr "Lancer"
|
||||
|
||||
#: components/TemplateList/TemplateListItem.js:214
|
||||
msgid "Launch Template"
|
||||
msgstr "Lacer le modèle."
|
||||
msgstr "Lancer le modèle."
|
||||
|
||||
#: screens/ManagementJob/ManagementJobList/LaunchManagementPrompt.js:32
|
||||
#: screens/ManagementJob/ManagementJobList/LaunchManagementPrompt.js:34
|
||||
@@ -9637,7 +9637,7 @@ msgstr "Utilisateur"
|
||||
|
||||
#: components/AppContainer/PageHeaderToolbar.js:160
|
||||
msgid "User Details"
|
||||
msgstr "Détails de l'erreur"
|
||||
msgstr "Détails de l'utilisateur"
|
||||
|
||||
#: screens/Setting/SettingList.js:121
|
||||
#: screens/Setting/Settings.js:118
|
||||
|
||||
@@ -80,7 +80,7 @@ function Dashboard() {
|
||||
<Trans>
|
||||
<p>
|
||||
<InfoCircleIcon /> A tech preview of the new {brandName} user
|
||||
interface can be found <a href="/ui_next/dashboard">here</a>.
|
||||
interface can be found <a href="/ui_next">here</a>.
|
||||
</p>
|
||||
</Trans>
|
||||
</Banner>
|
||||
|
||||
@@ -191,7 +191,7 @@ function InstancePeerList({ setBreadcrumb }) {
|
||||
fetchPeers();
|
||||
addToast({
|
||||
id: instancesPeerToAssociate,
|
||||
title: t`Peers update on ${instance.hostname}. Please be sure to run the install bundle for ${instance.hostname} again in order to see changes take effect.`,
|
||||
title: t`Please be sure to run the install bundle for the selected instance(s) again in order to see changes take effect.`,
|
||||
variant: AlertVariant.success,
|
||||
hasTimeout: true,
|
||||
});
|
||||
|
||||
@@ -21,6 +21,8 @@ const ansibleDocUrls = {
|
||||
'https://docs.ansible.com/ansible/latest/collections/community/vmware/vmware_vm_inventory_inventory.html',
|
||||
constructed:
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -119,10 +121,10 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
@@ -138,8 +140,8 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
</div>
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
SCMSubForm,
|
||||
SatelliteSubForm,
|
||||
ControllerSubForm,
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
@@ -214,6 +215,14 @@ const InventorySourceFormFields = ({
|
||||
}
|
||||
/>
|
||||
),
|
||||
terraform: (
|
||||
<TerraformSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id || source?.source !== 'terraform'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
vmware: (
|
||||
<VMwareSubForm
|
||||
autoPopulateCredential={
|
||||
|
||||
@@ -38,6 +38,7 @@ describe('<InventorySourceForm />', () => {
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
['terraform', 'Terraform State'],
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
import { t } from '@lingui/macro';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const TerraformSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="terraform"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(docsBaseUrl, 'terraform')}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TerraformSubForm;
|
||||
@@ -0,0 +1,70 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import TerraformSubForm from './TerraformSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
const mockSourceOptions = {
|
||||
actions: {
|
||||
POST: {},
|
||||
},
|
||||
};
|
||||
|
||||
describe('<TerraformSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<TerraformSubForm sourceOptions={mockSourceOptions} />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'terraform',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,5 +6,6 @@ export { default as OpenStackSubForm } from './OpenStackSubForm';
|
||||
export { default as SCMSubForm } from './SCMSubForm';
|
||||
export { default as SatelliteSubForm } from './SatelliteSubForm';
|
||||
export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Modal, Tab, Tabs, TabTitleText } from '@patternfly/react-core';
|
||||
import PropTypes from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
import { encode } from 'html-entities';
|
||||
import { jsonToYaml } from 'util/yaml';
|
||||
import StatusLabel from '../../../components/StatusLabel';
|
||||
import { DetailList, Detail } from '../../../components/DetailList';
|
||||
import ContentEmpty from '../../../components/ContentEmpty';
|
||||
@@ -144,9 +145,28 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
<ContentEmpty title={t`No JSON Available`} />
|
||||
)}
|
||||
</Tab>
|
||||
<Tab
|
||||
eventKey={2}
|
||||
title={<TabTitleText>{t`YAML`}</TabTitleText>}
|
||||
aria-label={t`YAML tab`}
|
||||
ouiaId="yaml-tab"
|
||||
>
|
||||
{activeTabKey === 2 && jsonObj ? (
|
||||
<CodeEditor
|
||||
mode="javascript"
|
||||
readOnly
|
||||
value={jsonToYaml(JSON.stringify(jsonObj))}
|
||||
onChange={() => {}}
|
||||
rows={20}
|
||||
hasErrors={false}
|
||||
/>
|
||||
) : (
|
||||
<ContentEmpty title={t`No YAML Available`} />
|
||||
)}
|
||||
</Tab>
|
||||
{stdOut?.length ? (
|
||||
<Tab
|
||||
eventKey={2}
|
||||
eventKey={3}
|
||||
title={<TabTitleText>{t`Output`}</TabTitleText>}
|
||||
aria-label={t`Output tab`}
|
||||
ouiaId="standard-out-tab"
|
||||
@@ -163,7 +183,7 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
) : null}
|
||||
{stdErr?.length ? (
|
||||
<Tab
|
||||
eventKey={3}
|
||||
eventKey={4}
|
||||
title={<TabTitleText>{t`Standard Error`}</TabTitleText>}
|
||||
aria-label={t`Standard error tab`}
|
||||
ouiaId="standard-error-tab"
|
||||
|
||||
@@ -2,6 +2,7 @@ import React from 'react';
|
||||
import { shallow } from 'enzyme';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import HostEventModal from './HostEventModal';
|
||||
import { jsonToYaml } from 'util/yaml';
|
||||
|
||||
const hostEvent = {
|
||||
changed: true,
|
||||
@@ -167,6 +168,8 @@ const jsonValue = `{
|
||||
]
|
||||
}`;
|
||||
|
||||
const yamlValue = jsonToYaml(jsonValue);
|
||||
|
||||
describe('HostEventModal', () => {
|
||||
test('initially renders successfully', () => {
|
||||
const wrapper = shallow(
|
||||
@@ -187,7 +190,7 @@ describe('HostEventModal', () => {
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
);
|
||||
|
||||
expect(wrapper.find('Tabs Tab').length).toEqual(4);
|
||||
expect(wrapper.find('Tabs Tab').length).toEqual(5);
|
||||
});
|
||||
|
||||
test('should initially show details tab', () => {
|
||||
@@ -287,7 +290,7 @@ describe('HostEventModal', () => {
|
||||
expect(codeEditor.prop('value')).toEqual(jsonValue);
|
||||
});
|
||||
|
||||
test('should display Standard Out tab content on tab click', () => {
|
||||
test('should display YAML tab content on tab click', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
);
|
||||
@@ -299,6 +302,21 @@ describe('HostEventModal', () => {
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual(yamlValue);
|
||||
});
|
||||
|
||||
test('should display Standard Out tab content on tab click', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual(hostEvent.event_data.res.stdout);
|
||||
});
|
||||
|
||||
@@ -316,10 +334,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 3);
|
||||
handleTabClick(null, 4);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=4] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual('error content');
|
||||
@@ -351,10 +369,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 2);
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual('foo bar');
|
||||
@@ -375,10 +393,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 2);
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual('baz\nbar');
|
||||
@@ -394,10 +412,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 2);
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual(
|
||||
|
||||
@@ -30,7 +30,7 @@ function SubscriptionUsage() {
|
||||
<Trans>
|
||||
<p>
|
||||
<InfoCircleIcon /> A tech preview of the new {brandName} user
|
||||
interface can be found <a href="/ui_next/dashboard">here</a>.
|
||||
interface can be found <a href="/ui_next">here</a>.
|
||||
</p>
|
||||
</Trans>
|
||||
</Banner>
|
||||
|
||||
@@ -201,7 +201,11 @@ function NodeViewModal({ readOnly }) {
|
||||
overrides.limit = originalNodeObject.limit;
|
||||
}
|
||||
if (launchConfig.ask_verbosity_on_launch) {
|
||||
overrides.verbosity = originalNodeObject.verbosity.toString();
|
||||
overrides.verbosity =
|
||||
originalNodeObject.verbosity !== undefined &&
|
||||
originalNodeObject.verbosity !== null
|
||||
? originalNodeObject.verbosity.toString()
|
||||
: '0';
|
||||
}
|
||||
if (launchConfig.ask_credential_on_launch) {
|
||||
overrides.credentials = originalNodeCredentials || [];
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export default function getSurveyValues(values) {
|
||||
const surveyValues = {};
|
||||
Object.keys(values).forEach((key) => {
|
||||
if (key.startsWith('survey_') && values[key] !== []) {
|
||||
if (key.startsWith('survey_')) {
|
||||
if (Array.isArray(values[key]) && values[key].length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
export default function mergeExtraVars(extraVars = '', survey = {}) {
|
||||
const vars = yaml.load(extraVars) || {};
|
||||
let vars = {};
|
||||
if (typeof extraVars === 'string') {
|
||||
vars = yaml.load(extraVars);
|
||||
} else if (typeof extraVars === 'object') {
|
||||
vars = extraVars;
|
||||
}
|
||||
return {
|
||||
...vars,
|
||||
...survey,
|
||||
|
||||
@@ -35,7 +35,7 @@ ui-next/src/build: $(UI_NEXT_DIR)/src/build/awx
|
||||
## True target for ui-next/src/build. Build ui_next from source.
|
||||
$(UI_NEXT_DIR)/src/build/awx: $(UI_NEXT_DIR)/src $(UI_NEXT_DIR)/src/node_modules/webpack
|
||||
@echo "=== Building ui_next ==="
|
||||
@cd $(UI_NEXT_DIR)/src && PRODUCT="$(PRODUCT)" PUBLIC_PATH=/static/awx/ npm run build:awx
|
||||
@cd $(UI_NEXT_DIR)/src && PRODUCT="$(PRODUCT)" PUBLIC_PATH=/static/awx/ ROUTE_PREFIX=/ui_next npm run build:awx
|
||||
@mv $(UI_NEXT_DIR)/src/build/awx/index.html $(UI_NEXT_DIR)/src/build/awx/index_awx.html
|
||||
|
||||
.PHONY: ui-next/src
|
||||
|
||||
15
awx/urls.py
@@ -2,7 +2,9 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import re_path, include
|
||||
from django.urls import path, re_path, include
|
||||
|
||||
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
|
||||
|
||||
from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect
|
||||
|
||||
@@ -10,7 +12,16 @@ from awx.main.views import handle_400, handle_403, handle_404, handle_500, handl
|
||||
urlpatterns = [
|
||||
re_path(r'', include('awx.ui.urls', namespace='ui')),
|
||||
re_path(r'^ui_next/.*', include('awx.ui_next.urls', namespace='ui_next')),
|
||||
re_path(r'^api/', include('awx.api.urls', namespace='api')),
|
||||
path('api/', include('awx.api.urls', namespace='api')),
|
||||
]
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||
urlpatterns += [
|
||||
path(f'api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/', include('awx.api.urls')),
|
||||
]
|
||||
|
||||
urlpatterns += [
|
||||
re_path(r'^api/v2/', include(resource_api_urls)),
|
||||
re_path(r'^sso/', include('awx.sso.urls', namespace='sso')),
|
||||
re_path(r'^sso/', include('social_django.urls', namespace='social')),
|
||||
re_path(r'^(?:api/)?400.html$', handle_400),
|
||||
|
||||
@@ -18,7 +18,7 @@ documentation: https://github.com/ansible/awx/blob/devel/awx_collection/README.m
|
||||
homepage: https://www.ansible.com/
|
||||
issues: https://github.com/ansible/awx/issues?q=is%3Aissue+label%3Acomponent%3Aawx_collection
|
||||
license:
|
||||
- GPL-3.0-only
|
||||
- GPL-3.0-or-later
|
||||
name: awx
|
||||
namespace: awx
|
||||
readme: README.md
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c), Wayne Witzel III <wayne@riotousliving.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import traceback
|
||||
|
||||
TOWER_CLI_IMP_ERR = None
|
||||
try:
|
||||
import tower_cli.utils.exceptions as exc
|
||||
from tower_cli.utils import parser
|
||||
from tower_cli.api import client
|
||||
|
||||
HAS_TOWER_CLI = True
|
||||
except ImportError:
|
||||
TOWER_CLI_IMP_ERR = traceback.format_exc()
|
||||
HAS_TOWER_CLI = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
def tower_auth_config(module):
|
||||
"""
|
||||
`tower_auth_config` attempts to load the tower-cli.cfg file
|
||||
specified from the `tower_config_file` parameter. If found,
|
||||
if returns the contents of the file as a dictionary, else
|
||||
it will attempt to fetch values from the module params and
|
||||
only pass those values that have been set.
|
||||
"""
|
||||
config_file = module.params.pop('tower_config_file', None)
|
||||
if config_file:
|
||||
if not os.path.exists(config_file):
|
||||
module.fail_json(msg='file not found: %s' % config_file)
|
||||
if os.path.isdir(config_file):
|
||||
module.fail_json(msg='directory can not be used as config file: %s' % config_file)
|
||||
|
||||
with open(config_file, 'r') as f:
|
||||
return parser.string_to_dict(f.read())
|
||||
else:
|
||||
auth_config = {}
|
||||
host = module.params.pop('tower_host', None)
|
||||
if host:
|
||||
auth_config['host'] = host
|
||||
username = module.params.pop('tower_username', None)
|
||||
if username:
|
||||
auth_config['username'] = username
|
||||
password = module.params.pop('tower_password', None)
|
||||
if password:
|
||||
auth_config['password'] = password
|
||||
module.params.pop('tower_verify_ssl', None) # pop alias if used
|
||||
verify_ssl = module.params.pop('validate_certs', None)
|
||||
if verify_ssl is not None:
|
||||
auth_config['verify_ssl'] = verify_ssl
|
||||
return auth_config
|
||||
|
||||
|
||||
def tower_check_mode(module):
|
||||
'''Execute check mode logic for Ansible Tower modules'''
|
||||
if module.check_mode:
|
||||
try:
|
||||
result = client.get('/ping').json()
|
||||
module.exit_json(changed=True, tower_version='{0}'.format(result['version']))
|
||||
except (exc.ServerError, exc.ConnectionError, exc.BadRequest) as excinfo:
|
||||
module.fail_json(changed=False, msg='Failed check mode: {0}'.format(excinfo))
|
||||
|
||||
|
||||
class TowerLegacyModule(AnsibleModule):
|
||||
def __init__(self, argument_spec, **kwargs):
|
||||
args = dict(
|
||||
tower_host=dict(),
|
||||
tower_username=dict(),
|
||||
tower_password=dict(no_log=True),
|
||||
validate_certs=dict(type='bool', aliases=['tower_verify_ssl']),
|
||||
tower_config_file=dict(type='path'),
|
||||
)
|
||||
args.update(argument_spec)
|
||||
|
||||
kwargs.setdefault('mutually_exclusive', [])
|
||||
kwargs['mutually_exclusive'].extend(
|
||||
(
|
||||
('tower_config_file', 'tower_host'),
|
||||
('tower_config_file', 'tower_username'),
|
||||
('tower_config_file', 'tower_password'),
|
||||
('tower_config_file', 'validate_certs'),
|
||||
)
|
||||
)
|
||||
|
||||
super().__init__(argument_spec=args, **kwargs)
|
||||
|
||||
if not HAS_TOWER_CLI:
|
||||
self.fail_json(msg=missing_required_lib('ansible-tower-cli'), exception=TOWER_CLI_IMP_ERR)
|
||||
@@ -181,10 +181,8 @@ def run_module(request, collection_import):
|
||||
resource_class = resource_module.ControllerAWXKitModule
|
||||
elif getattr(resource_module, 'ControllerAPIModule', None):
|
||||
resource_class = resource_module.ControllerAPIModule
|
||||
elif getattr(resource_module, 'TowerLegacyModule', None):
|
||||
resource_class = resource_module.TowerLegacyModule
|
||||
else:
|
||||
raise RuntimeError("The module has neither a TowerLegacyModule, ControllerAWXKitModule or a ControllerAPIModule")
|
||||
raise RuntimeError("The module has neither a ControllerAWXKitModule or a ControllerAPIModule")
|
||||
|
||||
with mock.patch.object(resource_class, '_load_params', new=mock_load_params):
|
||||
# Call the test utility (like a mock server) instead of issuing HTTP requests
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
name: "{{ project_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
|
||||
- name: Create a git project with same name, different org
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
register: result
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: exists
|
||||
register: result
|
||||
@@ -58,7 +58,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: exists
|
||||
request_timeout: .001
|
||||
@@ -75,7 +75,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: absent
|
||||
register: result
|
||||
@@ -89,7 +89,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: exists
|
||||
register: result
|
||||
@@ -103,7 +103,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: false
|
||||
register: result
|
||||
ignore_errors: true
|
||||
@@ -137,7 +137,7 @@
|
||||
name: "{{ project_name2 }}"
|
||||
organization: "{{ org_name }}"
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
scm_credential: "{{ cred_name }}"
|
||||
check_mode: true
|
||||
|
||||
@@ -162,7 +162,7 @@
|
||||
name: "{{ project_name2 }}"
|
||||
organization: Non_Existing_Org
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
scm_credential: "{{ cred_name }}"
|
||||
register: result
|
||||
ignore_errors: true
|
||||
@@ -179,7 +179,7 @@
|
||||
name: "{{ project_name2 }}"
|
||||
organization: "{{ org_name }}"
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
scm_credential: Non_Existing_Credential
|
||||
register: result
|
||||
ignore_errors: true
|
||||
@@ -191,7 +191,7 @@
|
||||
- "'Non_Existing_Credential' in result.msg"
|
||||
- "result.total_results == 0"
|
||||
|
||||
- name: Create a git project without credentials without waiting
|
||||
- name: Create a git project using a branch and allowing branch override
|
||||
project:
|
||||
name: "{{ project_name3 }}"
|
||||
organization: Default
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: false
|
||||
register: project_create_result
|
||||
|
||||
|
||||
@@ -19,8 +19,6 @@ homepage: https://www.ansible.com/
|
||||
issues: https://github.com/ansible/awx/issues?q=is%3Aissue+label%3Acomponent%3Aawx_collection
|
||||
license:
|
||||
- GPL-3.0-or-later
|
||||
# plugins/module_utils/tower_legacy.py
|
||||
- BSD-2-Clause
|
||||
name: {{ collection_package }}
|
||||
namespace: {{ collection_namespace }}
|
||||
readme: README.md
|
||||
|
||||
@@ -96,6 +96,7 @@ credential_type_name_to_config_kind_map = {
|
||||
'vault': 'vault',
|
||||
'vmware vcenter': 'vmware',
|
||||
'gpg public key': 'gpg_public_key',
|
||||
'terraform backend configuration': 'terraform',
|
||||
}
|
||||
|
||||
config_kind_to_credential_type_name_map = {kind: name for name, kind in credential_type_name_to_config_kind_map.items()}
|
||||
|
||||
@@ -51,7 +51,16 @@ class WSClient(object):
|
||||
# Subscription group types
|
||||
|
||||
def __init__(
|
||||
self, token=None, hostname='', port=443, secure=True, session_id=None, csrftoken=None, add_received_time=False, session_cookie_name='awx_sessionid'
|
||||
self,
|
||||
token=None,
|
||||
hostname='',
|
||||
port=443,
|
||||
secure=True,
|
||||
ws_suffix='websocket/',
|
||||
session_id=None,
|
||||
csrftoken=None,
|
||||
add_received_time=False,
|
||||
session_cookie_name='awx_sessionid',
|
||||
):
|
||||
# delay this import, because this is an optional dependency
|
||||
import websocket
|
||||
@@ -68,6 +77,7 @@ class WSClient(object):
|
||||
hostname = result.hostname
|
||||
|
||||
self.port = port
|
||||
self.suffix = ws_suffix
|
||||
self._use_ssl = secure
|
||||
self.hostname = hostname
|
||||
self.token = token
|
||||
@@ -85,7 +95,7 @@ class WSClient(object):
|
||||
else:
|
||||
auth_cookie = ''
|
||||
pref = 'wss://' if self._use_ssl else 'ws://'
|
||||
url = '{0}{1.hostname}:{1.port}/websocket/'.format(pref, self)
|
||||
url = '{0}{1.hostname}:{1.port}/{1.suffix}'.format(pref, self)
|
||||
self.ws = websocket.WebSocketApp(
|
||||
url, on_open=self._on_open, on_message=self._on_message, on_error=self._on_error, on_close=self._on_close, cookie=auth_cookie
|
||||
)
|
||||
|
||||
@@ -175,9 +175,10 @@ class TestOptions(unittest.TestCase):
|
||||
assert '--verbosity {0,1,2,3,4,5}' in out.getvalue()
|
||||
|
||||
def test_actions_with_primary_key(self):
|
||||
page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
|
||||
ResourceOptionsParser(None, page, 'jobs', self.parser)
|
||||
|
||||
for method in ('get', 'modify', 'delete'):
|
||||
page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
|
||||
ResourceOptionsParser(None, page, 'jobs', self.parser)
|
||||
assert method in self.parser.choices
|
||||
|
||||
out = StringIO()
|
||||
|
||||
@@ -17,6 +17,11 @@ def test_explicit_hostname():
|
||||
assert client.token == "token"
|
||||
|
||||
|
||||
def test_websocket_suffix():
|
||||
client = WSClient("token", "hostname", 566, ws_suffix='my-websocket/')
|
||||
assert client.suffix == 'my-websocket/'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'url, result',
|
||||
[
|
||||
|
||||
@@ -8,7 +8,7 @@ skip_missing_interpreters = true
|
||||
# skipsdist = true
|
||||
|
||||
[testenv]
|
||||
basepython = python3.9
|
||||
basepython = python3.11
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:.
|
||||
deps =
|
||||
|
||||
@@ -6,7 +6,7 @@ The *awx-manage* Utility
|
||||
.. index::
|
||||
single: awx-manage
|
||||
|
||||
The ``awx-manage`` utility is used to access detailed internal information of AWX. Commands for ``awx-manage`` should run as the ``awx`` or ``root`` user.
|
||||
The ``awx-manage`` utility is used to access detailed internal information of AWX. Commands for ``awx-manage`` should run as the ``awx`` user only.
|
||||
|
||||
.. warning::
|
||||
Running awx-manage commands via playbook is not recommended or supported.
|
||||
|
||||
@@ -557,7 +557,7 @@ Terminal Access Controller Access-Control System Plus (TACACS+) is a protocol th
|
||||
|
||||
Generic OIDC settings
|
||||
----------------------
|
||||
Similar to SAML, OpenID Connect (OIDC) is uses the OAuth 2.0 framework. It allows third-party applications to verify the identity and obtain basic end-user information. The main difference between OIDC and SMAL is that SAML has a service provider (SP)-to-IdP trust relationship, whereas OIDC establishes the trust with the channel (HTTPS) that is used to obtain the security token. To obtain the credentials needed to setup OIDC with AWX, refer to the documentation from the identity provider (IdP) of your choice that has OIDC support.
|
||||
Similar to SAML, OpenID Connect (OIDC) is uses the OAuth 2.0 framework. It allows third-party applications to verify the identity and obtain basic end-user information. The main difference between OIDC and SAML is that SAML has a service provider (SP)-to-IdP trust relationship, whereas OIDC establishes the trust with the channel (HTTPS) that is used to obtain the security token. To obtain the credentials needed to setup OIDC with AWX, refer to the documentation from the identity provider (IdP) of your choice that has OIDC support.
|
||||
|
||||
To configure OIDC in AWX:
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ Scaling your mesh is only available on Openshift and Kubernetes (K8S) deployment
|
||||
|
||||
Instances serve as nodes in your mesh topology. Automation mesh allows you to extend the footprint of your automation. Where you launch a job and where the ``ansible-playbook`` runs can be in different locations.
|
||||
|
||||
.. image:: ../common/images/instances_mesh_concept.png
|
||||
.. image:: ../common/images/instances_mesh_concept.drawio.png
|
||||
:alt: Site A pointing to Site B and dotted arrows to two hosts from Site B
|
||||
|
||||
Automation mesh is useful for:
|
||||
@@ -23,7 +23,7 @@ Automation mesh is useful for:
|
||||
|
||||
The nodes (control, hop, and execution instances) are interconnected via receptor, forming a virtual mesh.
|
||||
|
||||
.. image:: ../common/images/instances_mesh_concept_with_nodes.png
|
||||
.. image:: ../common/images/instances_mesh_concept_with_nodes.drawio.png
|
||||
:alt: Control node pointing to hop node, which is pointing to two execution nodes.
|
||||
|
||||
|
||||
@@ -78,11 +78,11 @@ Hop nodes can be added to sit between the control plane of AWX and standalone ex
|
||||
|
||||
Below is an example of an AWX task pod with two execution nodes. Traffic to execution node 2 flows through a hop node that is setup between it and the control plane.
|
||||
|
||||
.. image:: ../common/images/instances_awx_task_pods_hopnode.png
|
||||
.. image:: ../common/images/instances_awx_task_pods_hopnode.drawio.png
|
||||
:alt: AWX task pod with a hop node between the control plane of AWX and standalone execution nodes.
|
||||
|
||||
|
||||
An example of a simple topology may look like the following:
|
||||
Below are sample values used to configure each node in a simple topology:
|
||||
|
||||
.. list-table::
|
||||
:widths: 20 30 10 20 15
|
||||
@@ -95,9 +95,9 @@ An example of a simple topology may look like the following:
|
||||
- Peers
|
||||
* - Control plane
|
||||
- awx-task-65d6d96987-mgn9j
|
||||
- 27199
|
||||
- True
|
||||
- []
|
||||
- n/a
|
||||
- n/a
|
||||
- [hop node]
|
||||
* - Hop node
|
||||
- awx-hop-node
|
||||
- 27199
|
||||
@@ -107,7 +107,7 @@ An example of a simple topology may look like the following:
|
||||
- awx-example.com
|
||||
- n/a
|
||||
- False
|
||||
- ["hop node"]
|
||||
- [hop node]
|
||||
|
||||
|
||||
|
||||
@@ -117,11 +117,11 @@ Mesh topology
|
||||
Mesh ingress is a feature that allows remote nodes to connect inbound to the control plane. This is especially useful when creating remote nodes in restricted networking environments that disallow inbound traffic.
|
||||
|
||||
|
||||
.. image:: ../common/images/instances_mesh_ingress_topology.png
|
||||
.. image:: ../common/images/instances_mesh_ingress_topology.drawio.png
|
||||
:alt: Mesh ingress architecture showing the peering relationship between nodes.
|
||||
|
||||
|
||||
An example of a topology that uses mesh ingress may look like the following:
|
||||
Below are sample values used to configure each node in a mesh ingress topology:
|
||||
|
||||
.. list-table::
|
||||
:widths: 20 30 10 20 15
|
||||
@@ -133,12 +133,12 @@ An example of a topology that uses mesh ingress may look like the following:
|
||||
- Peers from control nodes
|
||||
- Peers
|
||||
* - Control plane
|
||||
- awx-task-xyz
|
||||
- 27199
|
||||
- True
|
||||
- []
|
||||
- awx-task-65d6d96987-mgn9j
|
||||
- n/a
|
||||
- n/a
|
||||
- [hop node]
|
||||
* - Hop node
|
||||
- awx-hop-node
|
||||
- awx-mesh-ingress-1
|
||||
- 27199
|
||||
- True
|
||||
- []
|
||||
@@ -146,13 +146,14 @@ An example of a topology that uses mesh ingress may look like the following:
|
||||
- awx-example.com
|
||||
- n/a
|
||||
- False
|
||||
- ["hop node"]
|
||||
- [hop node]
|
||||
|
||||
|
||||
In order to create a mesh ingress for AWX, see the `Mesh Ingress <https://ansible.readthedocs.io/projects/awx-operator/en/latest/user-guide/advanced-configuration/mesh-ingress.html>`_ chapter of the AWX Operator Documentation for information on setting up this type of topology. The last step is to create a remote execution node and add the execution node to an instance group in order for it to be used in your job execution. Whatever execution environment image used to run a playbook needs to be accessible for your remote execution node. Everything you are using in your playbook also needs to be accessible from this remote execution node.
|
||||
|
||||
.. image:: ../common/images/instances-job-template-using-remote-execution-ig.png
|
||||
:alt: Job template using the instance group with the execution node to run jobs.
|
||||
|
||||
:alt: Job template using the instance group with the execution node to run jobs.
|
||||
:width: 1400px
|
||||
|
||||
|
||||
.. _ag_instances_add:
|
||||
@@ -167,7 +168,8 @@ To create an instance in AWX:
|
||||
2. In the Instances list view, click the **Add** button and the Create new Instance window opens.
|
||||
|
||||
.. image:: ../common/images/instances_create_new.png
|
||||
:alt: Create a new instance form.
|
||||
:alt: Create a new instance form.
|
||||
:width: 1400px
|
||||
|
||||
An instance has several attributes that may be configured:
|
||||
|
||||
@@ -189,7 +191,8 @@ An instance has several attributes that may be configured:
|
||||
Upon successful creation, the Details of the one of the created instances opens.
|
||||
|
||||
.. image:: ../common/images/instances_create_details.png
|
||||
:alt: Details of the newly created instance.
|
||||
:alt: Details of the newly created instance.
|
||||
:width: 1400px
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -198,7 +201,8 @@ Upon successful creation, the Details of the one of the created instances opens.
|
||||
4. Click the download button next to the **Install Bundle** field to download the tarball that contain files to allow AWX to make proper TCP connections to the remote machine.
|
||||
|
||||
.. image:: ../common/images/instances_install_bundle.png
|
||||
:alt: Instance details showing the Download button in the Install Bundle field of the Details tab.
|
||||
:alt: Instance details showing the Download button in the Install Bundle field of the Details tab.
|
||||
:width: 1400px
|
||||
|
||||
5. Extract the downloaded ``tar.gz`` file from the location you downloaded it. The install bundle contains TLS certificates and keys, a certificate authority, and a proper Receptor configuration file. To facilitate that these files will be in the right location on the remote machine, the install bundle includes an ``install_receptor.yml`` playbook. The playbook requires the Receptor collection which can be obtained via:
|
||||
|
||||
@@ -236,12 +240,14 @@ Wait a few minutes for the periodic AWX task to do a health check against the ne
|
||||
9. To view other instances within the same topology or associate peers, click the **Peers** tab.
|
||||
|
||||
.. image:: ../common/images/instances_peers_tab.png
|
||||
:alt: "Peers" tab showing two peers.
|
||||
:alt: "Peers" tab showing two peers.
|
||||
:width: 1400px
|
||||
|
||||
To associate peers with your node, click the **Associate** button to open a dialog box of instances eligible for peering.
|
||||
|
||||
.. image:: ../common/images/instances_associate_peer.png
|
||||
:alt: Instances available to peer with the example hop node.
|
||||
:alt: Instances available to peer with the example hop node.
|
||||
:width: 1400px
|
||||
|
||||
Execution nodes can peer with either hop nodes or other execution nodes. Hop nodes can only peer with execution nodes unless you check the **Peers from control nodes** check box from the **Options** field.
|
||||
|
||||
@@ -249,8 +255,8 @@ Execution nodes can peer with either hop nodes or other execution nodes. Hop nod
|
||||
|
||||
If you associate or disassociate a peer, a notification will inform you to re-run the install bundle from the Peer Detail view (the :ref:`ag_topology_viewer` has the download link).
|
||||
|
||||
.. image:: ../common/images/instances_associate_peer_reinstallmsg.png
|
||||
:alt: Notification to re-run the installation bundle due to change in the peering.
|
||||
.. image:: ../common/images/instances_associate_peer_reinstallmsg.png
|
||||
:alt: Notification to re-run the installation bundle due to change in the peering.
|
||||
|
||||
You can remove an instance by clicking **Remove** in the Instances page, or by setting the instance ``node_state = deprovisioning`` via the API. Upon deleting, a pop-up message will appear to notify that you may need to re-run the install bundle to make sure things that were removed are no longer connected.
|
||||
|
||||
@@ -264,7 +270,8 @@ Manage instances
|
||||
Click **Instances** from the left side navigation menu to access the Instances list.
|
||||
|
||||
.. image:: ../common/images/instances_list_view.png
|
||||
:alt: List view of instances in AWX
|
||||
:alt: List view of instances in AWX
|
||||
:width: 1400px
|
||||
|
||||
The Instances list displays all the current nodes in your topology, along with relevant details:
|
||||
|
||||
@@ -290,7 +297,9 @@ The Instances list displays all the current nodes in your topology, along with r
|
||||
From this page, you can add, remove or run health checks on your nodes. Use the check boxes next to an instance to select it to remove or run a health check against. When a button is grayed-out, you do not have permission for that particular action. Contact your Administrator to grant you the required level of access. If you are able to remove an instance, you will receive a prompt for confirmation, like the one below:
|
||||
|
||||
.. image:: ../common/images/instances_delete_prompt.png
|
||||
:alt: Prompt for deleting instances in AWX.
|
||||
:alt: Prompt for deleting instances in AWX
|
||||
:width: 1400px
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -303,7 +312,8 @@ Click **Remove** to confirm.
|
||||
If running a health check on an instance, at the top of the Details page, a message displays that the health check is in progress.
|
||||
|
||||
.. image:: ../common/images/instances_health_check.png
|
||||
:alt: Health check for instances in AWX
|
||||
:alt: Health check for instances in AWX
|
||||
:width: 1400px
|
||||
|
||||
Click **Reload** to refresh the instance status.
|
||||
|
||||
@@ -311,13 +321,14 @@ Click **Reload** to refresh the instance status.
|
||||
|
||||
Health checks are ran asynchronously, and may take up to a minute for the instance status to update, even with a refresh. The status may or may not change after the health check. At the bottom of the Details page, a timer/clock icon displays next to the last known health check date and time stamp if the health check task is currently running.
|
||||
|
||||
.. image:: ../common/images/instances_health_check_pending.png
|
||||
:alt: Health check for instance still in pending state.
|
||||
.. image:: ../common/images/instances_health_check_pending.png
|
||||
:alt: Health check for instance still in pending state.
|
||||
|
||||
The example health check shows the status updates with an error on node 'one':
|
||||
|
||||
.. image:: ../common/images/topology-viewer-instance-with-errors.png
|
||||
:alt: Health check showing an error in one of the instances.
|
||||
:alt: Health check showing an error in one of the instances.
|
||||
:width: 1400px
|
||||
|
||||
|
||||
Using a custom Receptor CA
|
||||
|
||||
@@ -7,6 +7,7 @@ Setting up LDAP Authentication
|
||||
single: LDAP
|
||||
pair: authentication; LDAP
|
||||
|
||||
This chapter describes how to integrate LDAP authentication with AWX.
|
||||
|
||||
.. note::
|
||||
|
||||
|
||||
|
After Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 40 KiB After Width: | Height: | Size: 40 KiB |
|
After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 123 KiB |
|
After Width: | Height: | Size: 44 KiB |
|
Before Width: | Height: | Size: 132 KiB After Width: | Height: | Size: 52 KiB |
|
Before Width: | Height: | Size: 55 KiB After Width: | Height: | Size: 50 KiB |
|
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 45 KiB |