mirror of
https://github.com/ansible/awx.git
synced 2026-02-09 05:24:42 -03:30
Compare commits
118 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
86204cf23b | ||
|
|
468949b899 | ||
|
|
f1d9966224 | ||
|
|
b022b50966 | ||
|
|
e2f4213839 | ||
|
|
ae1235b223 | ||
|
|
c061f59f1c | ||
|
|
3edaaebba2 | ||
|
|
7cdf1c7f96 | ||
|
|
d558204192 | ||
|
|
d06ce8f911 | ||
|
|
4b6f7e0ebe | ||
|
|
370c567be1 | ||
|
|
9be64f3de5 | ||
|
|
30500e5a95 | ||
|
|
bb323c5710 | ||
|
|
7571df49d5 | ||
|
|
1559c21033 | ||
|
|
d9b81731e9 | ||
|
|
2034cca3a9 | ||
|
|
0b5e59d9cb | ||
|
|
f48b2d1ae5 | ||
|
|
b44bb98c7e | ||
|
|
8cafdf0400 | ||
|
|
3f566c8737 | ||
|
|
c8021a25bf | ||
|
|
934646a0f6 | ||
|
|
9bb97dd658 | ||
|
|
7150f5edc6 | ||
|
|
93da15c0ee | ||
|
|
ab593bda45 | ||
|
|
065bd3ae2a | ||
|
|
8ff7260bc6 | ||
|
|
a635445082 | ||
|
|
949e7efab1 | ||
|
|
615f09226f | ||
|
|
d903c524f5 | ||
|
|
393d9c39c6 | ||
|
|
dfab342bb4 | ||
|
|
12843eccf7 | ||
|
|
dd9160135d | ||
|
|
ad96a92fa7 | ||
|
|
ca8085fe7e | ||
|
|
b076cb00a9 | ||
|
|
ee9eac15dc | ||
|
|
3f2f7b75a6 | ||
|
|
b71645f3b1 | ||
|
|
eb300252b8 | ||
|
|
2e2cd7f2de | ||
|
|
727278aaa3 | ||
|
|
81825ab755 | ||
|
|
7f2a1b6b03 | ||
|
|
1b56d94d30 | ||
|
|
e1e32c971c | ||
|
|
a4a2fabc01 | ||
|
|
b7b7bfa520 | ||
|
|
887604317e | ||
|
|
d35d8b6ed7 | ||
|
|
ec28eff7f7 | ||
|
|
a5d17539c6 | ||
|
|
a49d894cf1 | ||
|
|
b3466d4449 | ||
|
|
237adc6150 | ||
|
|
09b028ee3c | ||
|
|
fb83bfbc31 | ||
|
|
88e406e121 | ||
|
|
59d0bcc63f | ||
|
|
3fb3125bc3 | ||
|
|
d70c6b9474 | ||
|
|
5549516a37 | ||
|
|
14ac91a8a2 | ||
|
|
d5753818a0 | ||
|
|
33010a2e02 | ||
|
|
14454cc670 | ||
|
|
7ab2bca16e | ||
|
|
f0f655f2c3 | ||
|
|
4286d411a7 | ||
|
|
06ad32ed8e | ||
|
|
1ebff23232 | ||
|
|
700de14c76 | ||
|
|
8605e339df | ||
|
|
e50954ce40 | ||
|
|
7caca60308 | ||
|
|
f4e13af056 | ||
|
|
decdb56288 | ||
|
|
bcd4c2e8ef | ||
|
|
d663066ac5 | ||
|
|
1ceebb275c | ||
|
|
f78ba282a6 | ||
|
|
81d88df757 | ||
|
|
0bdb01a9e9 | ||
|
|
cd91fbf59f | ||
|
|
f240e640e5 | ||
|
|
46f489185e | ||
|
|
dbb80fb7e3 | ||
|
|
cb3d357ce1 | ||
|
|
dfa4db9266 | ||
|
|
6906a88dc9 | ||
|
|
1f7be9258c | ||
|
|
dcce024424 | ||
|
|
79d7179c72 | ||
|
|
4d80f886e0 | ||
|
|
5179333185 | ||
|
|
362e11aaf2 | ||
|
|
decff01fa4 | ||
|
|
a14cc8199d | ||
|
|
b6436826f6 | ||
|
|
2109b5039e | ||
|
|
b6f9b73418 | ||
|
|
40a8a3cb2f | ||
|
|
19f80c0a26 | ||
|
|
5d1bb2125e | ||
|
|
99c512bcef | ||
|
|
ed0329f5db | ||
|
|
dd53345397 | ||
|
|
f66cde51d7 | ||
|
|
d1c31687fc | ||
|
|
38424487f1 |
10
.github/actions/awx_devel_image/action.yml
vendored
10
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,6 +11,12 @@ runs:
|
||||
shell: bash
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Log in to registry
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -18,11 +24,11 @@ runs:
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
run: docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
||||
run: docker pull ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
|
||||
- name: Build image for current source checkout
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
make docker-compose-build
|
||||
|
||||
4
.github/actions/run_awx_devel/action.yml
vendored
4
.github/actions/run_awx_devel/action.yml
vendored
@@ -35,7 +35,7 @@ runs:
|
||||
- name: Start AWX
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
||||
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
COMPOSE_UP_OPTS="-d" \
|
||||
make docker-compose
|
||||
@@ -71,7 +71,7 @@ runs:
|
||||
id: data
|
||||
shell: bash
|
||||
run: |
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks._sources_awx.IPAddress}}' tools_awx_1)
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
|
||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
||||
|
||||
3
.github/pr_labeler.yml
vendored
3
.github/pr_labeler.yml
vendored
@@ -15,5 +15,4 @@
|
||||
|
||||
"dependencies":
|
||||
- any: ["awx/ui/package.json"]
|
||||
- any: ["requirements/*.txt"]
|
||||
- any: ["requirements/requirements.in"]
|
||||
- any: ["requirements/*"]
|
||||
|
||||
18
.github/workflows/ci.yml
vendored
18
.github/workflows/ci.yml
vendored
@@ -94,11 +94,11 @@ jobs:
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
run: |
|
||||
ansible-playbook -v tools/ansible/build.yml \
|
||||
-e headless=yes \
|
||||
-e awx_image=awx \
|
||||
-e awx_image_tag=ci \
|
||||
-e ansible_python_interpreter=$(which python3)
|
||||
VERSION=`make version-for-buildyml` make awx-kube-build
|
||||
env:
|
||||
COMPOSE_TAG: ci
|
||||
DEV_DOCKER_TAG_BASE: local
|
||||
HEADLESS: yes
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
@@ -107,9 +107,9 @@ jobs:
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||
env:
|
||||
AWX_TEST_IMAGE: awx
|
||||
AWX_TEST_IMAGE: local/awx
|
||||
AWX_TEST_VERSION: ci
|
||||
|
||||
collection-sanity:
|
||||
@@ -127,10 +127,6 @@ jobs:
|
||||
|
||||
- name: Run sanity tests
|
||||
run: make test_collection_sanity
|
||||
env:
|
||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||
# with the fix has not been made yet.
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
|
||||
collection-integration:
|
||||
name: awx_collection integration
|
||||
|
||||
64
.github/workflows/devel_images.yml
vendored
64
.github/workflows/devel_images.yml
vendored
@@ -3,28 +3,50 @@ name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
- release_*
|
||||
- feature_*
|
||||
jobs:
|
||||
push:
|
||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
||||
push-development-images:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 120
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
build-targets:
|
||||
- image-name: awx_devel
|
||||
make-target: docker-compose-buildx
|
||||
- image-name: awx_kube_devel
|
||||
make-target: awx-kube-dev-buildx
|
||||
- image-name: awx
|
||||
make-target: awx-kube-buildx
|
||||
steps:
|
||||
|
||||
- name: Skipping build of awx image for non-awx repository
|
||||
run: |
|
||||
echo "Skipping build of awx image for non-awx repository"
|
||||
exit 0
|
||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set lower case owner name
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set GITHUB_ENV variables
|
||||
run: |
|
||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
||||
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
@@ -37,23 +59,19 @@ jobs:
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
|
||||
- name: Setup node and npm
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Build images
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Push development images
|
||||
- name: Build and push AWX devel images
|
||||
run: |
|
||||
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
|
||||
|
||||
- name: Push AWX k8s image, only for upstream and feature branches
|
||||
run: docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
|
||||
if: endsWith(github.repository, '/awx')
|
||||
make ${{ matrix.build-targets.make-target }}
|
||||
|
||||
12
.github/workflows/feature_branch_deletion.yml
vendored
12
.github/workflows/feature_branch_deletion.yml
vendored
@@ -2,12 +2,10 @@
|
||||
name: Feature branch deletion cleanup
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
on:
|
||||
delete:
|
||||
branches:
|
||||
- feature_**
|
||||
on: delete
|
||||
jobs:
|
||||
push:
|
||||
branch_delete:
|
||||
if: ${{ github.event.ref_type == 'branch' && startsWith(github.event.ref, 'feature_') }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
permissions:
|
||||
@@ -22,6 +20,4 @@ jobs:
|
||||
run: |
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||
|
||||
|
||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"
|
||||
|
||||
20
.github/workflows/promote.yml
vendored
20
.github/workflows/promote.yml
vendored
@@ -83,11 +83,15 @@ jobs:
|
||||
|
||||
- name: Re-tag and promote awx image
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository }}:latest
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository }}:latest
|
||||
|
||||
- name: Re-tag and promote awx-ee image
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
|
||||
102
.github/workflows/stage.yml
vendored
102
.github/workflows/stage.yml
vendored
@@ -49,13 +49,11 @@ jobs:
|
||||
with:
|
||||
path: awx
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Checkout awx-logos
|
||||
uses: actions/checkout@v3
|
||||
@@ -63,50 +61,84 @@ jobs:
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
- name: Get python version from Makefile
|
||||
working-directory: awx
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python3 -m pip install docker
|
||||
|
||||
- name: Build and stage AWX
|
||||
- name: Log into registry ghcr.io
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Copy logos for inclusion in sdist for official build
|
||||
working-directory: awx
|
||||
run: |
|
||||
ansible-playbook -v tools/ansible/build.yml \
|
||||
-e registry=ghcr.io \
|
||||
-e registry_username=${{ github.actor }} \
|
||||
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
|
||||
-e awx_image=${{ github.repository }} \
|
||||
-e awx_version=${{ github.event.inputs.version }} \
|
||||
-e ansible_python_interpreter=$(which python3) \
|
||||
-e push=yes \
|
||||
-e awx_official=yes
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Log in to GHCR
|
||||
run: |
|
||||
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
- name: Setup node and npm
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
|
||||
- name: Log in to Quay
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
run: |
|
||||
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
|
||||
- name: Set build env variables
|
||||
run: |
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
|
||||
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
|
||||
- name: Build and stage AWX
|
||||
working-directory: awx
|
||||
env:
|
||||
DOCKER_BUILDX_PUSH: true
|
||||
HEADLESS: false
|
||||
PLATFORMS: linux/amd64,linux/arm64
|
||||
run: |
|
||||
make awx-kube-buildx
|
||||
|
||||
- name: tag awx-ee:latest with version input
|
||||
run: |
|
||||
docker pull quay.io/ansible/awx-ee:latest
|
||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker buildx imagetools create \
|
||||
quay.io/ansible/awx-ee:latest \
|
||||
--tag ${AWX_EE_TEST_IMAGE}
|
||||
|
||||
- name: Build and stage awx-operator
|
||||
- name: Stage awx-operator image
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }} \
|
||||
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
||||
IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \
|
||||
VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push
|
||||
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \
|
||||
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
||||
IMG=${AWX_OPERATOR_TEST_IMAGE} \
|
||||
make docker-buildx
|
||||
|
||||
- name: Pulling images for test deployment with awx-operator
|
||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||
run: |
|
||||
docker pull ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull ${AWX_EE_TEST_IMAGE}
|
||||
docker pull ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
@@ -116,10 +148,6 @@ jobs:
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
||||
env:
|
||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Create draft release for AWX
|
||||
working-directory: awx
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -46,6 +46,11 @@ tools/docker-compose/overrides/
|
||||
tools/docker-compose-minikube/_sources
|
||||
tools/docker-compose/keycloak.awx.realm.json
|
||||
|
||||
!tools/docker-compose/editable_dependencies
|
||||
tools/docker-compose/editable_dependencies/*
|
||||
!tools/docker-compose/editable_dependencies/README.md
|
||||
!tools/docker-compose/editable_dependencies/install.sh
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
**/provision_docker
|
||||
|
||||
113
.vscode/launch.json
vendored
Normal file
113
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "run_ws_heartbeat",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_ws_heartbeat"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-ws-heartbeat",
|
||||
"postDebugTask": "start awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_callback_receiver",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_callback_receiver"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-receiver",
|
||||
"postDebugTask": "start awx-receiver"
|
||||
},
|
||||
{
|
||||
"name": "run_dispatcher",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_dispatcher"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-dispatcher",
|
||||
"postDebugTask": "start awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"name": "run_rsyslog_configurer",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_rsyslog_configurer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-rsyslog-configurer",
|
||||
"postDebugTask": "start awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_wsrelay",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_wsrelay"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-wsrelay",
|
||||
"postDebugTask": "start awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"name": "daphne",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
||||
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-daphne",
|
||||
"postDebugTask": "start awx-daphne"
|
||||
},
|
||||
{
|
||||
"name": "runserver(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "runserver_plus(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver_plus", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "shell_plus",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["shell_plus"],
|
||||
"django": true,
|
||||
},
|
||||
]
|
||||
}
|
||||
100
.vscode/tasks.json
vendored
Normal file
100
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "start awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "start awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "start awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "start awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "start awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi and install Werkzeug",
|
||||
"type": "shell",
|
||||
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "start awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "start awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
||||
}
|
||||
]
|
||||
}
|
||||
77
Makefile
77
Makefile
@@ -1,8 +1,8 @@
|
||||
-include awx/ui_next/Makefile
|
||||
|
||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
||||
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||
SHELL := bash
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
DOCKER_COMPOSE ?= docker compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
@@ -10,7 +10,7 @@ KIND_BIN ?= $(shell which kind)
|
||||
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
MANAGEMENT_COMMAND ?= awx-manage
|
||||
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py 2> /dev/null)
|
||||
|
||||
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||
@@ -47,6 +47,8 @@ VAULT ?= false
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -63,7 +65,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -75,6 +77,9 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
@@ -213,8 +218,6 @@ collectstatic:
|
||||
fi; \
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -222,7 +225,7 @@ uwsgi: collectstatic
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -302,7 +305,7 @@ swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
|
||||
check: black
|
||||
|
||||
@@ -532,16 +535,23 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP);
|
||||
-e enable_ldap=$(LDAP); \
|
||||
$(MAKE) docker-compose-up
|
||||
|
||||
docker-compose-up:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-down:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
@@ -586,12 +596,27 @@ docker-compose-build: Dockerfile.dev
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
|
||||
.PHONY: docker-compose-buildx
|
||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
docker-compose-buildx: Dockerfile.dev
|
||||
- docker buildx create --name docker-compose-buildx
|
||||
docker buildx use docker-compose-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEVEL_IMAGE_NAME) \
|
||||
-f Dockerfile.dev .
|
||||
- docker buildx rm docker-compose-buildx
|
||||
|
||||
docker-clean:
|
||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_awx_db tools_vault_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
@@ -613,9 +638,6 @@ clean-elk:
|
||||
docker rm tools_elasticsearch_1
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
@@ -648,6 +670,21 @@ awx-kube-build: Dockerfile
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||
|
||||
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||
awx-kube-buildx: Dockerfile
|
||||
- docker buildx create --name awx-kube-buildx
|
||||
docker buildx use awx-kube-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) \
|
||||
-f Dockerfile .
|
||||
- docker buildx rm awx-kube-buildx
|
||||
|
||||
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
@@ -664,6 +701,18 @@ awx-kube-dev-build: Dockerfile.kube-dev
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||
|
||||
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
- docker buildx create --name awx-kube-dev-buildx
|
||||
docker buildx use awx-kube-dev-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-f Dockerfile.kube-dev .
|
||||
- docker buildx rm awx-kube-dev-buildx
|
||||
|
||||
kind-dev-load: awx-kube-dev-build
|
||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
|
||||
@@ -154,10 +154,12 @@ def manage():
|
||||
from django.conf import settings
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
||||
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
||||
# The return of connection.pg_version is something like 12013
|
||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||
if (connection.pg_version // 10000) < 12:
|
||||
sys.stderr.write("Postgres version 12 is required\n")
|
||||
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
|
||||
@@ -93,6 +93,7 @@ register(
|
||||
default='',
|
||||
label=_('Login redirect override URL'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
||||
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -36,11 +36,13 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
field_info['hidden'] = getattr(field, 'hidden', False)
|
||||
|
||||
text_attrs = [
|
||||
'read_only',
|
||||
'label',
|
||||
'help_text',
|
||||
'warning_text',
|
||||
'min_length',
|
||||
'max_length',
|
||||
'min_value',
|
||||
|
||||
@@ -191,6 +191,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
||||
'credential_type': DEFAULT_SUMMARY_FIELDS,
|
||||
'resource': ('ansible_id', 'resource_type'),
|
||||
}
|
||||
|
||||
|
||||
@@ -5594,7 +5595,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP]:
|
||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP] and not obj.managed:
|
||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||
if obj.node_type == 'execution':
|
||||
|
||||
@@ -2,28 +2,21 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import NoReverseMatch
|
||||
|
||||
from rest_framework.reverse import _reverse
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
||||
|
||||
|
||||
def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
||||
"""
|
||||
Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
|
||||
query string parameters.
|
||||
"""
|
||||
scheme = getattr(request, 'versioning_scheme', None)
|
||||
if scheme is not None:
|
||||
try:
|
||||
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
|
||||
except NoReverseMatch:
|
||||
# In case the versioning scheme reversal fails, fallback to the
|
||||
# default implementation
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
else:
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
def is_optional_api_urlpattern_prefix_request(request):
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
||||
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def transform_optional_api_urlpattern_prefix_url(request, url):
|
||||
if is_optional_api_urlpattern_prefix_request(request):
|
||||
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||
return url
|
||||
|
||||
|
||||
@@ -36,7 +29,9 @@ def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra
|
||||
kwargs = {}
|
||||
if 'version' not in kwargs:
|
||||
kwargs['version'] = settings.REST_FRAMEWORK['DEFAULT_VERSION']
|
||||
return drf_reverse(viewname, args, kwargs, request, format, **extra)
|
||||
url = drf_reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
return transform_optional_api_urlpattern_prefix_url(request, url)
|
||||
|
||||
|
||||
class URLPathVersioning(BaseVersioning):
|
||||
|
||||
@@ -272,16 +272,24 @@ class DashboardJobsGraphView(APIView):
|
||||
|
||||
success_query = user_unified_jobs.filter(status='successful')
|
||||
failed_query = user_unified_jobs.filter(status='failed')
|
||||
canceled_query = user_unified_jobs.filter(status='canceled')
|
||||
error_query = user_unified_jobs.filter(status='error')
|
||||
|
||||
if job_type == 'inv_sync':
|
||||
success_query = success_query.filter(instance_of=models.InventoryUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
|
||||
canceled_query = canceled_query.filter(instance_of=models.InventoryUpdate)
|
||||
error_query = error_query.filter(instance_of=models.InventoryUpdate)
|
||||
elif job_type == 'playbook_run':
|
||||
success_query = success_query.filter(instance_of=models.Job)
|
||||
failed_query = failed_query.filter(instance_of=models.Job)
|
||||
canceled_query = canceled_query.filter(instance_of=models.Job)
|
||||
error_query = error_query.filter(instance_of=models.Job)
|
||||
elif job_type == 'scm_update':
|
||||
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
||||
canceled_query = canceled_query.filter(instance_of=models.ProjectUpdate)
|
||||
error_query = error_query.filter(instance_of=models.ProjectUpdate)
|
||||
|
||||
end = now()
|
||||
interval = 'day'
|
||||
@@ -297,10 +305,12 @@ class DashboardJobsGraphView(APIView):
|
||||
else:
|
||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": [], "canceled": [], "error": []}}
|
||||
|
||||
succ_list = dashboard_data['jobs']['successful']
|
||||
fail_list = dashboard_data['jobs']['failed']
|
||||
canceled_list = dashboard_data['jobs']['canceled']
|
||||
error_list = dashboard_data['jobs']['error']
|
||||
|
||||
qs_s = (
|
||||
success_query.filter(finished__range=(start, end))
|
||||
@@ -318,6 +328,22 @@ class DashboardJobsGraphView(APIView):
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_f = {item['d']: item['agg'] for item in qs_f}
|
||||
qs_c = (
|
||||
canceled_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_c = {item['d']: item['agg'] for item in qs_c}
|
||||
qs_e = (
|
||||
error_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_e = {item['d']: item['agg'] for item in qs_e}
|
||||
|
||||
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
for d in itertools.count():
|
||||
@@ -326,6 +352,8 @@ class DashboardJobsGraphView(APIView):
|
||||
break
|
||||
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
||||
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
||||
canceled_list.append([time.mktime(date.timetuple()), data_c.get(date, 0)])
|
||||
error_list.append([time.mktime(date.timetuple()), data_e.get(date, 0)])
|
||||
|
||||
return Response(dashboard_data)
|
||||
|
||||
|
||||
@@ -48,23 +48,23 @@ class AnalyticsRootView(APIView):
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorized'] = reverse('api:analytics_authorized')
|
||||
data['reports'] = reverse('api:analytics_reports_list')
|
||||
data['report_options'] = reverse('api:analytics_report_options_list')
|
||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate')
|
||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options')
|
||||
data['event_explorer'] = reverse('api:analytics_event_explorer')
|
||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options')
|
||||
data['host_explorer'] = reverse('api:analytics_host_explorer')
|
||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options')
|
||||
data['job_explorer'] = reverse('api:analytics_job_explorer')
|
||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options')
|
||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer')
|
||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options')
|
||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer')
|
||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options')
|
||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer')
|
||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options')
|
||||
data['authorized'] = reverse('api:analytics_authorized', request=request)
|
||||
data['reports'] = reverse('api:analytics_reports_list', request=request)
|
||||
data['report_options'] = reverse('api:analytics_report_options_list', request=request)
|
||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request)
|
||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request)
|
||||
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request)
|
||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request)
|
||||
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request)
|
||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request)
|
||||
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request)
|
||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request)
|
||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request)
|
||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request)
|
||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request)
|
||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request)
|
||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request)
|
||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.urls import reverse as django_reverse
|
||||
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -27,7 +28,7 @@ from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
@@ -39,19 +40,19 @@ logger = logging.getLogger('awx.api.views.root')
|
||||
class ApiRootView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
name = _('REST API')
|
||||
versioning_class = None
|
||||
versioning_class = URLPathVersioning
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@method_decorator(ensure_csrf_cookie)
|
||||
def get(self, request, format=None):
|
||||
'''List supported API versions'''
|
||||
|
||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v2=v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
if not is_optional_api_urlpattern_prefix_request(request):
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
@@ -130,6 +131,7 @@ class ApiVersionRootView(APIView):
|
||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||
data['bulk'] = reverse('api:bulk', request=request)
|
||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
||||
data['service_index'] = django_reverse('service-index-root')
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ register(
|
||||
# Optional; category_slug will be slugified version of category if not
|
||||
# explicitly provided.
|
||||
category_slug='cows',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -127,6 +127,8 @@ class SettingsRegistry(object):
|
||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||
unit = field_kwargs.pop('unit', None)
|
||||
hidden = field_kwargs.pop('hidden', False)
|
||||
warning_text = field_kwargs.pop('warning_text', None)
|
||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||
field_kwargs['child'].source = None
|
||||
field_instance = field_class(**field_kwargs)
|
||||
@@ -134,12 +136,14 @@ class SettingsRegistry(object):
|
||||
field_instance.category = category
|
||||
field_instance.depends_on = depends_on
|
||||
field_instance.unit = unit
|
||||
field_instance.hidden = hidden
|
||||
if placeholder is not empty:
|
||||
field_instance.placeholder = placeholder
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
if field_instance.defined_in_file:
|
||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||
field_instance.encrypted = encrypted
|
||||
field_instance.warning_text = warning_text
|
||||
original_field_instance = field_instance
|
||||
if field_class != original_field_class:
|
||||
original_field_instance = original_field_class(**field_kwargs)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import contextlib
|
||||
import logging
|
||||
import psycopg
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
@@ -13,7 +14,7 @@ from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import Error as DBError, ProgrammingError
|
||||
from django.db.utils import DatabaseError, ProgrammingError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -80,18 +81,26 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except DBError as exc:
|
||||
except ProgrammingError as e:
|
||||
# Exception raised for programming errors
|
||||
# Examples may be table not found or already exists,
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
# syntax error in the SQL statement, wrong number of parameters specified, etc.
|
||||
if trans_safe:
|
||||
level = logger.warning
|
||||
if isinstance(exc, ProgrammingError):
|
||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
level = logger.debug
|
||||
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
||||
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}')
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
except DatabaseError as e:
|
||||
if trans_safe:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
finally:
|
||||
|
||||
@@ -639,7 +639,10 @@ class UserAccess(BaseAccess):
|
||||
"""
|
||||
|
||||
model = User
|
||||
prefetch_related = ('profile',)
|
||||
prefetch_related = (
|
||||
'profile',
|
||||
'resource',
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
@@ -835,6 +838,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
||||
prefetch_related = (
|
||||
'created_by',
|
||||
'modified_by',
|
||||
'resource', # dab_resource_registry
|
||||
)
|
||||
# organization admin_role is not a parent of organization auditor_role
|
||||
notification_attach_roles = ['admin_role', 'auditor_role']
|
||||
@@ -1303,6 +1307,7 @@ class TeamAccess(BaseAccess):
|
||||
'created_by',
|
||||
'modified_by',
|
||||
'organization',
|
||||
'resource', # dab_resource_registry
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
|
||||
@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
||||
resolved_action,
|
||||
resolved_role,
|
||||
-- '-' operator listed here:
|
||||
-- https://www.postgresql.org/docs/12/functions-json.html
|
||||
-- https://www.postgresql.org/docs/15/functions-json.html
|
||||
-- note that operator is only supported by jsonb objects
|
||||
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||
|
||||
@@ -92,6 +92,7 @@ register(
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -774,6 +775,7 @@ register(
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||
@@ -815,6 +817,7 @@ register(
|
||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -825,6 +828,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -835,6 +839,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -845,6 +850,7 @@ register(
|
||||
help_text=_('Enable preview of new user interface.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
from azure.identity import ClientSecretCredential
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
|
||||
from azure.common.credentials import ServicePrincipalCredentials
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
|
||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||
@@ -54,22 +55,9 @@ azure_keyvault_inputs = {
|
||||
|
||||
|
||||
def azure_keyvault_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
[cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
|
||||
|
||||
def auth_callback(server, resource, scope):
|
||||
credentials = ServicePrincipalCredentials(
|
||||
url=url,
|
||||
client_id=kwargs['client'],
|
||||
secret=kwargs['secret'],
|
||||
tenant=kwargs['tenant'],
|
||||
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
||||
)
|
||||
token = credentials.token
|
||||
return token['token_type'], token['access_token']
|
||||
|
||||
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
|
||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
||||
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret'])
|
||||
kv = SecretClient(credential=csc, vault_url=kwargs['url'])
|
||||
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
|
||||
|
||||
|
||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
import psycopg
|
||||
import select
|
||||
from copy import deepcopy
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -94,8 +95,8 @@ class PubSub(object):
|
||||
|
||||
|
||||
def create_listener_connection():
|
||||
conf = settings.DATABASES['default'].copy()
|
||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
||||
conf = deepcopy(settings.DATABASES['default'])
|
||||
conf['OPTIONS'] = deepcopy(conf.get('OPTIONS', {}))
|
||||
# Modify the application name to distinguish from other connections the process might use
|
||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||
|
||||
|
||||
@@ -162,7 +162,7 @@ class AWXConsumerRedis(AWXConsumerBase):
|
||||
class AWXConsumerPG(AWXConsumerBase):
|
||||
def __init__(self, *args, schedule=None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.pg_max_wait = settings.DISPATCHER_DB_DOWNTIME_TOLERANCE
|
||||
self.pg_max_wait = getattr(settings, 'DISPATCHER_DB_DOWNTOWN_TOLLERANCE', settings.DISPATCHER_DB_DOWNTIME_TOLERANCE)
|
||||
# if no successful loops have ran since startup, then we should fail right away
|
||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||
init_time = time.time()
|
||||
@@ -259,6 +259,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
current_downtime = time.time() - self.pg_down_time
|
||||
if current_downtime > self.pg_max_wait:
|
||||
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
# Wait for a second before next attempt, but still listen for any shutdown signals
|
||||
for i in range(10):
|
||||
@@ -270,6 +276,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
except Exception:
|
||||
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
||||
logger.exception('Encountered unhandled error in dispatcher main loop')
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
|
||||
|
||||
|
||||
179
awx/main/management/commands/dump_auth_config.py
Normal file
179
awx/main/management/commands/dump_auth_config.py
Normal file
@@ -0,0 +1,179 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
from typing import Any
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from awx.conf import settings_registry
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
|
||||
|
||||
DAB_SAML_AUTHENTICATOR_KEYS = {
|
||||
"SP_ENTITY_ID": True,
|
||||
"SP_PUBLIC_CERT": True,
|
||||
"SP_PRIVATE_KEY": True,
|
||||
"ORG_INFO": True,
|
||||
"TECHNICAL_CONTACT": True,
|
||||
"SUPPORT_CONTACT": True,
|
||||
"SP_EXTRA": False,
|
||||
"SECURITY_CONFIG": False,
|
||||
"EXTRA_DATA": False,
|
||||
"ENABLED_IDPS": True,
|
||||
"CALLBACK_URL": False,
|
||||
}
|
||||
|
||||
DAB_LDAP_AUTHENTICATOR_KEYS = {
|
||||
"SERVER_URI": True,
|
||||
"BIND_DN": False,
|
||||
"BIND_PASSWORD": False,
|
||||
"CONNECTION_OPTIONS": False,
|
||||
"GROUP_TYPE": True,
|
||||
"GROUP_TYPE_PARAMS": True,
|
||||
"GROUP_SEARCH": False,
|
||||
"START_TLS": False,
|
||||
"USER_DN_TEMPLATE": True,
|
||||
"USER_ATTR_MAP": True,
|
||||
"USER_SEARCH": False,
|
||||
}
|
||||
|
||||
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||
awx_ldap_settings = {}
|
||||
|
||||
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
|
||||
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
|
||||
value = getattr(settings, awx_ldap_setting, None)
|
||||
awx_ldap_settings[key] = value
|
||||
|
||||
grouped_settings = {}
|
||||
|
||||
for key, value in awx_ldap_settings.items():
|
||||
match = re.search(r'(\d+)', key)
|
||||
index = int(match.group()) if match else 0
|
||||
new_key = re.sub(r'\d+_', '', key)
|
||||
|
||||
if index not in grouped_settings:
|
||||
grouped_settings[index] = {}
|
||||
|
||||
grouped_settings[index][new_key] = value
|
||||
if new_key == "GROUP_TYPE" and value:
|
||||
grouped_settings[index][new_key] = type(value).__name__
|
||||
|
||||
if new_key == "SERVER_URI" and value:
|
||||
value = value.split(", ")
|
||||
|
||||
return grouped_settings
|
||||
|
||||
def is_enabled(self, settings, keys):
|
||||
for key, required in keys.items():
|
||||
if required and not settings.get(key):
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||
awx_saml_settings = {}
|
||||
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
|
||||
|
||||
return awx_saml_settings
|
||||
|
||||
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||
config = {
|
||||
"type": f"awx.authentication.authenticator_plugins.{type}",
|
||||
"name": name,
|
||||
"enabled": enabled,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {},
|
||||
}
|
||||
for k in keys:
|
||||
v = awx_settings.get(k)
|
||||
config["configuration"].update({k: v})
|
||||
|
||||
if type == "saml":
|
||||
idp_to_key_mapping = {
|
||||
"url": "IDP_URL",
|
||||
"x509cert": "IDP_X509_CERT",
|
||||
"entity_id": "IDP_ENTITY_ID",
|
||||
"attr_email": "IDP_ATTR_EMAIL",
|
||||
"attr_groups": "IDP_GROUPS",
|
||||
"attr_username": "IDP_ATTR_USERNAME",
|
||||
"attr_last_name": "IDP_ATTR_LAST_NAME",
|
||||
"attr_first_name": "IDP_ATTR_FIRST_NAME",
|
||||
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
|
||||
}
|
||||
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
|
||||
for key in idp_to_key_mapping:
|
||||
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
|
||||
if value is not None:
|
||||
config["name"] = idp_name
|
||||
config["configuration"].update({idp_to_key_mapping[key]: value})
|
||||
|
||||
return config
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"output_file",
|
||||
nargs="?",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Output JSON file path",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
data = []
|
||||
|
||||
# dump SAML settings
|
||||
awx_saml_settings = self.get_awx_saml_settings()
|
||||
awx_saml_enabled = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||
if awx_saml_enabled:
|
||||
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
awx_saml_enabled,
|
||||
awx_saml_settings,
|
||||
"saml",
|
||||
self.DAB_SAML_AUTHENTICATOR_KEYS,
|
||||
awx_saml_name,
|
||||
)
|
||||
)
|
||||
|
||||
# dump LDAP settings
|
||||
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||
for awx_ldap_name, awx_ldap_settings in enumerate(awx_ldap_group_settings.values()):
|
||||
enabled = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||
if enabled:
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
enabled,
|
||||
awx_ldap_settings,
|
||||
"ldap",
|
||||
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||
str(awx_ldap_name),
|
||||
)
|
||||
)
|
||||
|
||||
# write to file if requested
|
||||
if options["output_file"]:
|
||||
# Define the path for the output JSON file
|
||||
output_file = options["output_file"]
|
||||
|
||||
# Ensure the directory exists
|
||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||
|
||||
# Write data to the JSON file
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
|
||||
else:
|
||||
self.stdout.write(json.dumps(data, indent=4))
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
|
||||
sys.exit(1)
|
||||
@@ -92,8 +92,6 @@ class Command(BaseCommand):
|
||||
return host_stats
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
# it's necessary to delay this import in case
|
||||
# database migrations are still running
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -166,8 +164,15 @@ class Command(BaseCommand):
|
||||
|
||||
return
|
||||
|
||||
try:
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Terminating Websocket Relayer')
|
||||
WebsocketsMetricsServer().start()
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
|
||||
while True:
|
||||
try:
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Shutting down Websocket Relayer')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
|
||||
time.sleep(10)
|
||||
|
||||
@@ -5,11 +5,12 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db import connection
|
||||
from django.shortcuts import redirect
|
||||
from django.apps import apps
|
||||
@@ -17,9 +18,11 @@ from django.utils.deprecation import MiddlewareMixin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.urls import reverse, resolve
|
||||
|
||||
from awx.main import migrations
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
from awx.conf import fields, register
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
from awx.main.utils.common import memoize
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
@@ -198,9 +201,22 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
request.path_info = new_path
|
||||
|
||||
|
||||
@memoize(ttl=20)
|
||||
def is_migrating():
|
||||
latest_number = 0
|
||||
latest_name = ''
|
||||
for migration_path in Path(migrations.__path__[0]).glob('[0-9]*.py'):
|
||||
try:
|
||||
migration_number = int(migration_path.name.split('_', 1)[0])
|
||||
except ValueError:
|
||||
continue
|
||||
if migration_number > latest_number:
|
||||
latest_number = migration_number
|
||||
latest_name = migration_path.name[: -len('.py')]
|
||||
return not MigrationRecorder(connection).migration_qs.filter(app='main', name=latest_name).exists()
|
||||
|
||||
|
||||
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||
def process_request(self, request):
|
||||
executor = MigrationExecutor(connection)
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
return redirect(reverse("ui:migrations_notran"))
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
# Generated by Django 4.2.6 on 2024-02-15 20:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0189_inbound_hop_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -6,6 +6,8 @@ from django.conf import settings # noqa
|
||||
from django.db import connection
|
||||
from django.db.models.signals import pre_delete # noqa
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
@@ -99,6 +101,7 @@ from awx.main.access import get_user_queryset, check_user_access, check_user_acc
|
||||
User.add_to_class('get_queryset', get_user_queryset)
|
||||
User.add_to_class('can_access', check_user_access)
|
||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
|
||||
|
||||
|
||||
def convert_jsonfields():
|
||||
|
||||
@@ -925,6 +925,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('rhv', _('Red Hat Virtualization')),
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1630,6 +1631,20 @@ class satellite6(PluginFileInjector):
|
||||
return ret
|
||||
|
||||
|
||||
class terraform(PluginFileInjector):
|
||||
plugin_name = 'terraform_state'
|
||||
base_injector = 'managed'
|
||||
namespace = 'cloud'
|
||||
collection = 'terraform'
|
||||
use_fqcn = True
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, None)
|
||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||
ret['backend_config_files'] = env["TF_BACKEND_CONFIG_FILE"]
|
||||
return ret
|
||||
|
||||
|
||||
class controller(PluginFileInjector):
|
||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||
base_injector = 'template'
|
||||
|
||||
@@ -5,6 +5,7 @@ from copy import deepcopy
|
||||
import datetime
|
||||
import logging
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
@@ -484,14 +485,29 @@ class JobNotificationMixin(object):
|
||||
if msg_template:
|
||||
try:
|
||||
msg = env.from_string(msg_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
msg = ''
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError) as e:
|
||||
msg = '\r\n'.join([e.message, ''.join(traceback.format_exception(None, e, e.__traceback__).replace('\n', '\r\n'))])
|
||||
|
||||
if body_template:
|
||||
try:
|
||||
body = env.from_string(body_template).render(**context)
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError):
|
||||
body = ''
|
||||
except (TemplateSyntaxError, UndefinedError, SecurityError) as e:
|
||||
body = '\r\n'.join([e.message, ''.join(traceback.format_exception(None, e, e.__traceback__).replace('\n', '\r\n'))])
|
||||
|
||||
# https://datatracker.ietf.org/doc/html/rfc2822#section-2.2
|
||||
# Body should have at least 2 CRLF, some clients will interpret
|
||||
# the email incorrectly with blank body. So we will check that
|
||||
|
||||
if len(body.strip().splitlines()) < 1:
|
||||
# blank body
|
||||
body = '\r\n'.join(
|
||||
[
|
||||
"The template rendering return a blank body.",
|
||||
"Please check the template.",
|
||||
"Refer to https://github.com/ansible/awx/issues/13983",
|
||||
"for further information.",
|
||||
]
|
||||
)
|
||||
|
||||
return (msg, body)
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ from django.contrib.sessions.models import Session
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -103,6 +105,7 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
||||
approval_role = ImplicitRoleField(
|
||||
parent_role='admin_role',
|
||||
)
|
||||
resource = AnsibleResourceField(primary_key_field="id")
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
|
||||
@@ -151,6 +154,7 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
||||
read_role = ImplicitRoleField(
|
||||
parent_role=['organization.auditor_role', 'member_role'],
|
||||
)
|
||||
resource = AnsibleResourceField(primary_key_field="id")
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -1599,7 +1599,8 @@ class UnifiedJob(
|
||||
extra["controller_node"] = self.controller_node or "NOT_SET"
|
||||
elif state == "execution_node_chosen":
|
||||
extra["execution_node"] = self.execution_node or "NOT_SET"
|
||||
logger_job_lifecycle.info(msg, extra=extra)
|
||||
|
||||
logger_job_lifecycle.info(f"{msg} {json.dumps(extra)}")
|
||||
|
||||
@property
|
||||
def launched_by(self):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Copyright (c) 2019 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
# -*-coding:utf-8-*-
|
||||
|
||||
|
||||
class CustomNotificationBase(object):
|
||||
|
||||
@@ -12,6 +12,7 @@ from . import consumers
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.routing')
|
||||
_application = None
|
||||
|
||||
|
||||
class AWXProtocolTypeRouter(ProtocolTypeRouter):
|
||||
@@ -27,14 +28,91 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class MultipleURLRouterAdapter:
|
||||
"""
|
||||
Django channels doesn't nicely support Auth_1(urls_1), Auth_2(urls_2), ..., Auth_n(urls_n)
|
||||
This class allows assocating a websocket url with an auth
|
||||
Ordering matters. The first matching url will be used.
|
||||
"""
|
||||
|
||||
def __init__(self, *auths):
|
||||
self._auths = [a for a in auths]
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
"""
|
||||
Loop through the list of passed in URLRouter's (they may or may not be wrapped by auth).
|
||||
We know we have exhausted the list of URLRouter patterns when we get a
|
||||
ValueError('No route found for path %s'). When that happens, move onto the next
|
||||
URLRouter.
|
||||
If the final URLRouter raises an error, re-raise it in the end.
|
||||
|
||||
We know that we found a match when no error is raised, end the loop.
|
||||
"""
|
||||
last_index = len(self._auths) - 1
|
||||
for i, auth in enumerate(self._auths):
|
||||
try:
|
||||
return await auth.__call__(scope, receive, send)
|
||||
except ValueError as e:
|
||||
if str(e).startswith('No route found for path'):
|
||||
# Only surface the error if on the last URLRouter
|
||||
if i == last_index:
|
||||
raise
|
||||
|
||||
|
||||
websocket_urlpatterns = [
|
||||
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
]
|
||||
websocket_relay_urlpatterns = [
|
||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
application = AWXProtocolTypeRouter(
|
||||
{
|
||||
'websocket': DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||
}
|
||||
)
|
||||
|
||||
def application_func(cls=AWXProtocolTypeRouter) -> ProtocolTypeRouter:
|
||||
return cls(
|
||||
{
|
||||
'websocket': MultipleURLRouterAdapter(
|
||||
URLRouter(websocket_relay_urlpatterns),
|
||||
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def __getattr__(name: str) -> ProtocolTypeRouter:
|
||||
"""
|
||||
Defer instantiating application.
|
||||
For testing, we just need it to NOT run on import.
|
||||
|
||||
https://peps.python.org/pep-0562/#specification
|
||||
|
||||
Normally, someone would get application from this module via:
|
||||
from awx.main.routing import application
|
||||
|
||||
and do something with the application:
|
||||
application.do_something()
|
||||
|
||||
What does the callstack look like when the import runs?
|
||||
...
|
||||
awx.main.routing.__getattribute__(...) # <-- we don't define this so NOOP as far as we are concerned
|
||||
if '__getattr__' in awx.main.routing.__dict__: # <-- this triggers the function we are in
|
||||
return awx.main.routing.__dict__.__getattr__("application")
|
||||
|
||||
Why isn't this function simply implemented as:
|
||||
def __getattr__(name):
|
||||
if not _application:
|
||||
_application = application_func()
|
||||
return _application
|
||||
|
||||
It could. I manually tested it and it passes test_routing.py.
|
||||
|
||||
But my understanding after reading the PEP-0562 specification link above is that
|
||||
performance would be a bit worse due to the extra __getattribute__ calls when
|
||||
we reference non-global variables.
|
||||
"""
|
||||
if name == "application":
|
||||
globs = globals()
|
||||
if not globs['_application']:
|
||||
globs['_application'] = application_func()
|
||||
return globs['_application']
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||
|
||||
@@ -29,7 +29,7 @@ class RunnerCallback:
|
||||
self.safe_env = {}
|
||||
self.event_ct = 0
|
||||
self.model = model
|
||||
self.update_attempts = int(settings.DISPATCHER_DB_DOWNTIME_TOLERANCE / 5)
|
||||
self.update_attempts = int(getattr(settings, 'DISPATCHER_DB_DOWNTOWN_TOLLERANCE', settings.DISPATCHER_DB_DOWNTIME_TOLERANCE) / 5)
|
||||
self.wrapup_event_dispatched = False
|
||||
self.artifacts_processed = False
|
||||
self.extra_update_fields = {}
|
||||
|
||||
@@ -114,7 +114,7 @@ class BaseTask(object):
|
||||
|
||||
def __init__(self):
|
||||
self.cleanup_paths = []
|
||||
self.update_attempts = int(settings.DISPATCHER_DB_DOWNTIME_TOLERANCE / 5)
|
||||
self.update_attempts = int(getattr(settings, 'DISPATCHER_DB_DOWNTOWN_TOLLERANCE', settings.DISPATCHER_DB_DOWNTIME_TOLERANCE) / 5)
|
||||
self.runner_callback = self.callback_class(model=self.model)
|
||||
|
||||
def update_model(self, pk, _attempt=0, **updates):
|
||||
|
||||
@@ -49,6 +49,70 @@ class ReceptorConnectionType(Enum):
|
||||
STREAMTLS = 2
|
||||
|
||||
|
||||
"""
|
||||
Translate receptorctl messages that come in over stdout into
|
||||
structured messages. Currently, these are error messages.
|
||||
"""
|
||||
|
||||
|
||||
class ReceptorErrorBase:
|
||||
_MESSAGE = 'Receptor Error'
|
||||
|
||||
def __init__(self, node: str = 'N/A', state_name: str = 'N/A'):
|
||||
self.node = node
|
||||
self.state_name = state_name
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__} '{self._MESSAGE}' on node '{self.node}' with state '{self.state_name}'"
|
||||
|
||||
|
||||
class WorkUnitError(ReceptorErrorBase):
|
||||
_MESSAGE = 'unknown work unit '
|
||||
|
||||
def __init__(self, work_unit_id: str, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.work_unit_id = work_unit_id
|
||||
|
||||
def __str__(self):
|
||||
return f"{super().__str__()} work unit id '{self.work_unit_id}'"
|
||||
|
||||
|
||||
class WorkUnitCancelError(WorkUnitError):
|
||||
_MESSAGE = 'error cancelling remote unit: unknown work unit '
|
||||
|
||||
|
||||
class WorkUnitResultsError(WorkUnitError):
|
||||
_MESSAGE = 'Failed to get results: unknown work unit '
|
||||
|
||||
|
||||
class UnknownError(ReceptorErrorBase):
|
||||
_MESSAGE = 'Unknown receptor ctl error'
|
||||
|
||||
def __init__(self, msg, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._MESSAGE = msg
|
||||
|
||||
|
||||
class FuzzyError:
|
||||
def __new__(self, e: RuntimeError, node: str, state_name: str):
|
||||
"""
|
||||
At the time of writing this comment all of the sub-classes detection
|
||||
is centralized in this parent class. It's like a Router().
|
||||
Someone may find it better to push down the error detection logic into
|
||||
each sub-class.
|
||||
"""
|
||||
msg = e.args[0]
|
||||
|
||||
common_startswith = (WorkUnitCancelError, WorkUnitResultsError, WorkUnitError)
|
||||
|
||||
for klass in common_startswith:
|
||||
if msg.startswith(klass._MESSAGE):
|
||||
work_unit_id = msg[len(klass._MESSAGE) :]
|
||||
return klass(work_unit_id, node=node, state_name=state_name)
|
||||
|
||||
return UnknownError(msg, node=node, state_name=state_name)
|
||||
|
||||
|
||||
def read_receptor_config():
|
||||
# for K8S deployments, getting a lock is necessary as another process
|
||||
# may be re-writing the config at this time
|
||||
@@ -185,6 +249,7 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
timing_data['transmit_timing'] = run_start - transmit_start
|
||||
run_timing = 0.0
|
||||
stdout = ''
|
||||
state_name = 'local var never set'
|
||||
|
||||
try:
|
||||
resultfile = receptor_ctl.get_work_results(unit_id)
|
||||
@@ -205,13 +270,33 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
stdout = resultfile.read()
|
||||
stdout = str(stdout, encoding='utf-8')
|
||||
|
||||
except RuntimeError as e:
|
||||
receptor_e = FuzzyError(e, node, state_name)
|
||||
if type(receptor_e) in (
|
||||
WorkUnitError,
|
||||
WorkUnitResultsError,
|
||||
):
|
||||
logger.warning(f'While consuming job results: {receptor_e}')
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
if settings.RECEPTOR_RELEASE_WORK:
|
||||
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
||||
if res != {'released': unit_id}:
|
||||
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
||||
try:
|
||||
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
||||
|
||||
receptor_ctl.close()
|
||||
if res != {'released': unit_id}:
|
||||
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
||||
|
||||
receptor_ctl.close()
|
||||
except RuntimeError as e:
|
||||
receptor_e = FuzzyError(e, node, state_name)
|
||||
if type(receptor_e) in (
|
||||
WorkUnitError,
|
||||
WorkUnitCancelError,
|
||||
):
|
||||
logger.warning(f"While releasing work: {receptor_e}")
|
||||
else:
|
||||
logger.error(f"While releasing work: {receptor_e}")
|
||||
|
||||
if state_name.lower() == 'failed':
|
||||
work_detail = status.get('Detail', '')
|
||||
@@ -275,7 +360,7 @@ def _convert_args_to_cli(vargs):
|
||||
args = ['cleanup']
|
||||
for option in ('exclude_strings', 'remove_images'):
|
||||
if vargs.get(option):
|
||||
args.append('--{}={}'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
||||
args.append('--{}="{}"'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
||||
for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'):
|
||||
if vargs.get(option) is True:
|
||||
args.append('--{}'.format(option.replace('_', '-')))
|
||||
|
||||
@@ -6,6 +6,7 @@ import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import psycopg
|
||||
from io import StringIO
|
||||
from contextlib import redirect_stdout
|
||||
import shutil
|
||||
@@ -416,7 +417,7 @@ def handle_removed_image(remove_images=None):
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def cleanup_images_and_files():
|
||||
_cleanup_images_and_files()
|
||||
_cleanup_images_and_files(image_prune=True)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@@ -630,10 +631,18 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
||||
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
|
||||
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
|
||||
if sqlstate == psycopg.errors.NoData:
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
# Run local reaper
|
||||
if worker_tasks is not None:
|
||||
@@ -788,10 +797,19 @@ def update_inventory_computed_fields(inventory_id):
|
||||
try:
|
||||
i.update_computed_fields()
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
return
|
||||
raise
|
||||
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105
|
||||
# django raises DatabaseError("Forced update did not affect any rows.")
|
||||
|
||||
# if sqlstate is set then there was a database error and otherwise will re-raise that error
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
# otherwise
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
|
||||
|
||||
def update_smart_memberships_for_inventory(smart_inventory):
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "Hello World!"
|
||||
|
||||
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"TF_BACKEND_CONFIG_FILE": "{{ file_reference }}"
|
||||
}
|
||||
@@ -1,13 +1,8 @@
|
||||
from awx.main.tests.functional.conftest import * # noqa
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
# This is called for every test. Only get/set command line arguments
|
||||
# if the argument is specified in the list of test "fixturenames".
|
||||
option_value = metafunc.config.option.release
|
||||
if 'release' in metafunc.fixturenames and option_value is not None:
|
||||
metafunc.parametrize("release", [option_value])
|
||||
@pytest.fixture()
|
||||
def release():
|
||||
return os.environ.get('VERSION_TARGET', '')
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.test import Client
|
||||
from rest_framework.test import APIRequestFactory
|
||||
|
||||
from awx.api.generics import LoggedLoginView
|
||||
from awx.api.versioning import drf_reverse
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -8,8 +8,10 @@ from django.db import connection
|
||||
from django.test.utils import override_settings
|
||||
from django.utils.encoding import smart_str, smart_bytes
|
||||
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.oauth import OAuth2Application as Application, OAuth2AccessToken as AccessToken
|
||||
from awx.main.tests.functional import immediate_on_commit
|
||||
from awx.sso.models import UserEnterpriseAuth
|
||||
|
||||
@@ -3,15 +3,19 @@ import pytest
|
||||
from unittest import mock
|
||||
import urllib.parse
|
||||
from unittest.mock import PropertyMock
|
||||
import importlib
|
||||
|
||||
# Django
|
||||
from django.urls import resolve
|
||||
from django.http import Http404
|
||||
from django.apps import apps
|
||||
from django.core.handlers.exception import response_for_exception
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -41,10 +45,19 @@ from awx.main.models.workflow import WorkflowJobTemplate
|
||||
from awx.main.models.ad_hoc_commands import AdHocCommand
|
||||
from awx.main.models.oauth import OAuth2Application as Application
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
__SWAGGER_REQUESTS__ = {}
|
||||
|
||||
|
||||
# HACK: the dab_resource_registry app required ServiceID in migrations which checks do not run
|
||||
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
|
||||
|
||||
|
||||
if is_testing():
|
||||
post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def swagger_autogen(requests=__SWAGGER_REQUESTS__):
|
||||
return requests
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import pytest
|
||||
|
||||
from ansible_base.resource_registry.models import Resource
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
def assert_has_resource(list_response, obj=None):
|
||||
data = list_response.data
|
||||
assert 'resource' in data['results'][0]['summary_fields']
|
||||
resource_data = data['results'][0]['summary_fields']['resource']
|
||||
assert resource_data['ansible_id']
|
||||
resource = Resource.objects.filter(ansible_id=resource_data['ansible_id']).first()
|
||||
assert resource
|
||||
assert resource.content_object
|
||||
if obj:
|
||||
objects = [Resource.objects.get(ansible_id=entry['summary_fields']['resource']['ansible_id']).content_object for entry in data['results']]
|
||||
assert obj in objects
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_ansible_id(organization, admin_user, get):
|
||||
url = reverse('api:organization_list')
|
||||
response = get(url=url, user=admin_user, expect=200)
|
||||
assert_has_resource(response, obj=organization)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_ansible_id(team, admin_user, get):
|
||||
url = reverse('api:team_list')
|
||||
response = get(url=url, user=admin_user, expect=200)
|
||||
assert_has_resource(response, obj=team)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_ansible_id(rando, admin_user, get):
|
||||
url = reverse('api:user_list')
|
||||
response = get(url=url, user=admin_user, expect=200)
|
||||
assert_has_resource(response, obj=rando)
|
||||
@@ -193,6 +193,7 @@ class TestInventorySourceInjectors:
|
||||
('satellite6', 'theforeman.foreman.foreman'),
|
||||
('insights', 'redhatinsights.insights.insights'),
|
||||
('controller', 'awx.awx.tower'),
|
||||
('terraform', 'cloud.terraform.terraform_state'),
|
||||
],
|
||||
)
|
||||
def test_plugin_proper_names(self, source, proper_name):
|
||||
|
||||
@@ -107,6 +107,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
for filename in os.listdir(os.path.join(private_data_dir, subdir)):
|
||||
filename_list.append(os.path.join(subdir, filename))
|
||||
filename_list = sorted(filename_list, key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
|
||||
inventory_content = ""
|
||||
for filename in filename_list:
|
||||
if filename in ('args', 'project'):
|
||||
continue # Ansible runner
|
||||
@@ -130,6 +131,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
dir_contents[abs_file_path] = f.read()
|
||||
# Declare a reference to inventory plugin file if it exists
|
||||
if abs_file_path.endswith('.yml') and 'plugin: ' in dir_contents[abs_file_path]:
|
||||
inventory_content = dir_contents[abs_file_path]
|
||||
referenced_paths.add(abs_file_path) # used as inventory file
|
||||
elif cache_file_regex.match(abs_file_path):
|
||||
file_aliases[abs_file_path] = 'cache_file'
|
||||
@@ -157,7 +159,11 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
content = {}
|
||||
for abs_file_path, file_content in dir_contents.items():
|
||||
# assert that all files laid down are used
|
||||
if abs_file_path not in referenced_paths and abs_file_path not in ignore_files:
|
||||
if (
|
||||
abs_file_path not in referenced_paths
|
||||
and to_container_path(abs_file_path, private_data_dir) not in inventory_content
|
||||
and abs_file_path not in ignore_files
|
||||
):
|
||||
raise AssertionError(
|
||||
"File {} is not referenced. References and files:\n{}\n{}".format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4))
|
||||
)
|
||||
|
||||
@@ -411,14 +411,14 @@ def test_project_delete(delete, organization, admin_user):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'order_by, expected_names, expected_ids',
|
||||
'order_by, expected_names',
|
||||
[
|
||||
('name', ['alice project', 'bob project', 'shared project'], [1, 2, 3]),
|
||||
('-name', ['shared project', 'bob project', 'alice project'], [3, 2, 1]),
|
||||
('name', ['alice project', 'bob project', 'shared project']),
|
||||
('-name', ['shared project', 'bob project', 'alice project']),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, expected_ids, organization_factory):
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, organization_factory):
|
||||
'ensure sorted order of project list is maintained correctly when the requested order is invalid or not applicable'
|
||||
objects = organization_factory(
|
||||
'org1',
|
||||
@@ -426,13 +426,11 @@ def test_project_list_ordering_by_name(get, order_by, expected_names, expected_i
|
||||
superusers=['admin'],
|
||||
)
|
||||
project_names = []
|
||||
project_ids = []
|
||||
# TODO: ask for an order by here that doesn't apply
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
for x in range(len(results)):
|
||||
project_names.append(results[x]['name'])
|
||||
project_ids.append(results[x]['id'])
|
||||
assert project_names == expected_names and project_ids == expected_ids
|
||||
assert project_names == expected_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize('order_by', ('name', '-name'))
|
||||
@@ -450,7 +448,8 @@ def test_project_list_ordering_with_duplicate_names(get, order_by, organization_
|
||||
for x in range(3):
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
project_ids[x] = [proj['id'] for proj in results]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2] == [1, 2, 3, 4, 5]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2]
|
||||
assert project_ids[0] == sorted(project_ids[0])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
90
awx/main/tests/functional/test_routing.py
Normal file
90
awx/main/tests/functional/test_routing.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import pytest
|
||||
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
|
||||
from channels.routing import ProtocolTypeRouter
|
||||
from channels.testing.websocket import WebsocketCommunicator
|
||||
|
||||
|
||||
from awx.main.consumers import WebsocketSecretAuthHelper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def application():
|
||||
# code in routing hits the db on import because .. settings cache
|
||||
from awx.main.routing import application_func
|
||||
|
||||
yield application_func(ProtocolTypeRouter)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def websocket_server_generator(application):
|
||||
def fn(endpoint):
|
||||
return WebsocketCommunicator(application, endpoint)
|
||||
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.django_db
|
||||
class TestWebsocketRelay:
|
||||
@pytest.fixture
|
||||
def websocket_relay_secret_generator(self, settings):
|
||||
def fn(secret, set_broadcast_websocket_secret=False):
|
||||
secret_backup = settings.BROADCAST_WEBSOCKET_SECRET
|
||||
settings.BROADCAST_WEBSOCKET_SECRET = 'foobar'
|
||||
res = ('secret'.encode('utf-8'), WebsocketSecretAuthHelper.construct_secret().encode('utf-8'))
|
||||
if set_broadcast_websocket_secret is False:
|
||||
settings.BROADCAST_WEBSOCKET_SECRET = secret_backup
|
||||
return res
|
||||
|
||||
return fn
|
||||
|
||||
@pytest.fixture
|
||||
def websocket_relay_secret(self, settings, websocket_relay_secret_generator):
|
||||
return websocket_relay_secret_generator('foobar', set_broadcast_websocket_secret=True)
|
||||
|
||||
async def test_authorized(self, websocket_server_generator, websocket_relay_secret):
|
||||
server = websocket_server_generator('/websocket/relay/')
|
||||
|
||||
server.scope['headers'] = (websocket_relay_secret,)
|
||||
connected, _ = await server.connect()
|
||||
assert connected is True
|
||||
|
||||
async def test_not_authorized(self, websocket_server_generator):
|
||||
server = websocket_server_generator('/websocket/relay/')
|
||||
connected, _ = await server.connect()
|
||||
assert connected is False, "Connection to the relay websocket without auth. We expected the client to be denied."
|
||||
|
||||
async def test_wrong_secret(self, websocket_server_generator, websocket_relay_secret_generator):
|
||||
server = websocket_server_generator('/websocket/relay/')
|
||||
|
||||
server.scope['headers'] = (websocket_relay_secret_generator('foobar', set_broadcast_websocket_secret=False),)
|
||||
connected, _ = await server.connect()
|
||||
assert connected is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.django_db
|
||||
class TestWebsocketEventConsumer:
|
||||
async def test_unauthorized_anonymous(self, websocket_server_generator):
|
||||
server = websocket_server_generator('/websocket/')
|
||||
|
||||
server.scope['user'] = AnonymousUser()
|
||||
connected, _ = await server.connect()
|
||||
assert connected is False, "Anonymous user should NOT be allowed to login."
|
||||
|
||||
@pytest.mark.skip(reason="Ran out of coding time.")
|
||||
async def test_authorized(self, websocket_server_generator, application, admin):
|
||||
server = websocket_server_generator('/websocket/')
|
||||
|
||||
"""
|
||||
I ran out of time. Here is what I was thinking ...
|
||||
Inject a valid session into the cookies in the header
|
||||
|
||||
server.scope['headers'] = (
|
||||
(b'cookie', ...),
|
||||
)
|
||||
"""
|
||||
connected, _ = await server.connect()
|
||||
assert connected is True, "User should be allowed in via cookies auth via a session key in the cookies"
|
||||
@@ -1,11 +1,6 @@
|
||||
# Python
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
# patch python-ldap
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap # NOQA
|
||||
|
||||
# Load development settings for base variables.
|
||||
from awx.settings.development import * # NOQA
|
||||
|
||||
|
||||
122
awx/main/tests/unit/commands/test_dump_auth_config.py
Normal file
122
awx/main/tests/unit/commands/test_dump_auth_config.py
Normal file
@@ -0,0 +1,122 @@
|
||||
from io import StringIO
|
||||
import json
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
|
||||
settings_dict = {
|
||||
"SOCIAL_AUTH_SAML_SP_ENTITY_ID": "SP_ENTITY_ID",
|
||||
"SOCIAL_AUTH_SAML_SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
|
||||
"SOCIAL_AUTH_SAML_SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
|
||||
"SOCIAL_AUTH_SAML_ORG_INFO": "ORG_INFO",
|
||||
"SOCIAL_AUTH_SAML_TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
|
||||
"SOCIAL_AUTH_SAML_SUPPORT_CONTACT": "SUPPORT_CONTACT",
|
||||
"SOCIAL_AUTH_SAML_SP_EXTRA": "SP_EXTRA",
|
||||
"SOCIAL_AUTH_SAML_SECURITY_CONFIG": "SECURITY_CONFIG",
|
||||
"SOCIAL_AUTH_SAML_EXTRA_DATA": "EXTRA_DATA",
|
||||
"SOCIAL_AUTH_SAML_ENABLED_IDPS": {
|
||||
"Keycloak": {
|
||||
"attr_last_name": "last_name",
|
||||
"attr_groups": "groups",
|
||||
"attr_email": "email",
|
||||
"attr_user_permanent_id": "name_id",
|
||||
"attr_username": "username",
|
||||
"entity_id": "https://example.com/auth/realms/awx",
|
||||
"url": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"attr_first_name": "first_name",
|
||||
}
|
||||
},
|
||||
"SOCIAL_AUTH_SAML_CALLBACK_URL": "CALLBACK_URL",
|
||||
"AUTH_LDAP_1_SERVER_URI": "SERVER_URI",
|
||||
"AUTH_LDAP_1_BIND_DN": "BIND_DN",
|
||||
"AUTH_LDAP_1_BIND_PASSWORD": "BIND_PASSWORD",
|
||||
"AUTH_LDAP_1_GROUP_SEARCH": ["GROUP_SEARCH"],
|
||||
"AUTH_LDAP_1_GROUP_TYPE": "string object",
|
||||
"AUTH_LDAP_1_GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
|
||||
"AUTH_LDAP_1_USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
|
||||
"AUTH_LDAP_1_USER_SEARCH": ["USER_SEARCH"],
|
||||
"AUTH_LDAP_1_USER_ATTR_MAP": {
|
||||
"email": "email",
|
||||
"last_name": "last_name",
|
||||
"first_name": "first_name",
|
||||
},
|
||||
"AUTH_LDAP_1_CONNECTION_OPTIONS": {},
|
||||
"AUTH_LDAP_1_START_TLS": None,
|
||||
}
|
||||
|
||||
|
||||
@override_settings(**settings_dict)
|
||||
class TestDumpAuthConfigCommand(TestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.expected_config = [
|
||||
{
|
||||
"type": "awx.authentication.authenticator_plugins.saml",
|
||||
"name": "Keycloak",
|
||||
"enabled": True,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {
|
||||
"SP_ENTITY_ID": "SP_ENTITY_ID",
|
||||
"SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
|
||||
"SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
|
||||
"ORG_INFO": "ORG_INFO",
|
||||
"TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
|
||||
"SUPPORT_CONTACT": "SUPPORT_CONTACT",
|
||||
"SP_EXTRA": "SP_EXTRA",
|
||||
"SECURITY_CONFIG": "SECURITY_CONFIG",
|
||||
"EXTRA_DATA": "EXTRA_DATA",
|
||||
"ENABLED_IDPS": {
|
||||
"Keycloak": {
|
||||
"attr_last_name": "last_name",
|
||||
"attr_groups": "groups",
|
||||
"attr_email": "email",
|
||||
"attr_user_permanent_id": "name_id",
|
||||
"attr_username": "username",
|
||||
"entity_id": "https://example.com/auth/realms/awx",
|
||||
"url": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"attr_first_name": "first_name",
|
||||
}
|
||||
},
|
||||
"CALLBACK_URL": "CALLBACK_URL",
|
||||
"IDP_URL": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"IDP_X509_CERT": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"IDP_ENTITY_ID": "https://example.com/auth/realms/awx",
|
||||
"IDP_ATTR_EMAIL": "email",
|
||||
"IDP_GROUPS": "groups",
|
||||
"IDP_ATTR_USERNAME": "username",
|
||||
"IDP_ATTR_LAST_NAME": "last_name",
|
||||
"IDP_ATTR_FIRST_NAME": "first_name",
|
||||
"IDP_ATTR_USER_PERMANENT_ID": "name_id",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "awx.authentication.authenticator_plugins.ldap",
|
||||
"name": "1",
|
||||
"enabled": True,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {
|
||||
"SERVER_URI": "SERVER_URI",
|
||||
"BIND_DN": "BIND_DN",
|
||||
"BIND_PASSWORD": "BIND_PASSWORD",
|
||||
"CONNECTION_OPTIONS": {},
|
||||
"GROUP_TYPE": "str",
|
||||
"GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
|
||||
"GROUP_SEARCH": ["GROUP_SEARCH"],
|
||||
"START_TLS": None,
|
||||
"USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
|
||||
"USER_ATTR_MAP": {"email": "email", "last_name": "last_name", "first_name": "first_name"},
|
||||
"USER_SEARCH": ["USER_SEARCH"],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
def test_json_returned_from_cmd(self):
|
||||
output = StringIO()
|
||||
call_command("dump_auth_config", stdout=output)
|
||||
assert json.loads(output.getvalue()) == self.expected_config
|
||||
64
awx/main/tests/unit/tasks/test_system.py
Normal file
64
awx/main/tests/unit/tasks/test_system.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from awx.main.tasks.system import update_inventory_computed_fields
|
||||
from awx.main.models import Inventory
|
||||
from django.db import DatabaseError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_logger():
|
||||
with patch("awx.main.tasks.system.logger") as logger:
|
||||
yield logger
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_inventory():
|
||||
return MagicMock(spec=Inventory)
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_existing_inventory(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
|
||||
# You can add more assertions based on your specific requirements
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_missing_inventory(mock_logger):
|
||||
# Mocking the Inventory.objects.filter method to return an empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_logger.error.assert_called_once_with("Update Inventory Computed Fields failed due to missing inventory: 1")
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_database_error_nosqlstate(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
# Simulating the update_computed_fields method to explicitly raise a DatabaseError
|
||||
mock_update_computed_fields.side_effect = DatabaseError("Some error")
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
mock_inventory.update_computed_fields.assert_called_once()
|
||||
@@ -121,6 +121,10 @@ def test_get_model_for_valid_type(model_type, model_class):
|
||||
assert common.get_model_for_type(model_type) == model_class
|
||||
|
||||
|
||||
def test_is_testing():
|
||||
assert common.is_testing() is True
|
||||
|
||||
|
||||
@pytest.mark.parametrize("model_type,model_class", [(name, cls) for cls, name in TEST_MODELS])
|
||||
def test_get_capacity_type(model_type, model_class):
|
||||
if model_type in ('job', 'ad_hoc_command', 'inventory_update', 'job_template'):
|
||||
|
||||
@@ -3,7 +3,7 @@ from awx.main.tasks.receptor import _convert_args_to_cli
|
||||
|
||||
def test_file_cleanup_scenario():
|
||||
args = _convert_args_to_cli({'exclude_strings': ['awx_423_', 'awx_582_'], 'file_pattern': '/tmp/awx_*_*'})
|
||||
assert ' '.join(args) == 'cleanup --exclude-strings=awx_423_ awx_582_ --file-pattern=/tmp/awx_*_*'
|
||||
assert ' '.join(args) == 'cleanup --exclude-strings="awx_423_ awx_582_" --file-pattern=/tmp/awx_*_*'
|
||||
|
||||
|
||||
def test_image_cleanup_scenario():
|
||||
@@ -17,5 +17,5 @@ def test_image_cleanup_scenario():
|
||||
}
|
||||
)
|
||||
assert (
|
||||
' '.join(args) == 'cleanup --remove-images=quay.invalid/foo/bar:latest quay.invalid/foo/bar:devel --image-prune --process-isolation-executable=podman'
|
||||
' '.join(args) == 'cleanup --remove-images="quay.invalid/foo/bar:latest quay.invalid/foo/bar:devel" --image-prune --process-isolation-executable=podman'
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ import json
|
||||
import yaml
|
||||
import logging
|
||||
import time
|
||||
import psycopg
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
@@ -23,7 +24,7 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
from django.db import connection, transaction, ProgrammingError, IntegrityError
|
||||
from django.db import connection, DatabaseError, transaction, ProgrammingError, IntegrityError
|
||||
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
|
||||
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor
|
||||
from django.db.models.query import QuerySet
|
||||
@@ -136,7 +137,7 @@ def underscore_to_camelcase(s):
|
||||
@functools.cache
|
||||
def is_testing(argv=None):
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'awx.main.tests.settings_for_test':
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
@@ -1155,11 +1156,25 @@ def create_partition(tblname, start=None):
|
||||
f'ALTER TABLE {tblname} ATTACH PARTITION {tblname}_{partition_label} '
|
||||
f'FOR VALUES FROM (\'{start_timestamp}\') TO (\'{end_timestamp}\');'
|
||||
)
|
||||
|
||||
except (ProgrammingError, IntegrityError) as e:
|
||||
if 'already exists' in str(e):
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
raise
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_cls = psycopg.errors.lookup(sqlstate)
|
||||
|
||||
if psycopg.errors.DuplicateTable == sqlstate_cls or psycopg.errors.UniqueViolation == sqlstate_cls:
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_cls))
|
||||
raise
|
||||
except DatabaseError as e:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
|
||||
def cleanup_new_process(func):
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
from copy import copy
|
||||
import json
|
||||
import json_log_formatter
|
||||
import logging
|
||||
import traceback
|
||||
import socket
|
||||
@@ -15,15 +14,6 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class JobLifeCycleFormatter(json_log_formatter.JSONFormatter):
|
||||
def json_record(self, message: str, extra: dict, record: logging.LogRecord):
|
||||
if 'time' not in extra:
|
||||
extra['time'] = now()
|
||||
if record.exc_info:
|
||||
extra['exc_info'] = self.formatException(record.exc_info)
|
||||
return extra
|
||||
|
||||
|
||||
class TimeFormatter(logging.Formatter):
|
||||
"""
|
||||
Custom log formatter used for inventory imports
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
import logging
|
||||
import asyncio
|
||||
from typing import Dict
|
||||
from copy import deepcopy
|
||||
|
||||
import ipaddress
|
||||
|
||||
@@ -302,20 +303,38 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr.start()
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = settings.DATABASES['default']
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
database_conf = deepcopy(settings.DATABASES['default'])
|
||||
database_conf['OPTIONS'] = deepcopy(database_conf.get('OPTIONS', {}))
|
||||
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
database_conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
database_conf['OPTIONS'][k] = v
|
||||
|
||||
if 'PASSWORD' in database_conf:
|
||||
database_conf['OPTIONS']['password'] = database_conf.pop('PASSWORD')
|
||||
|
||||
task = None
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
while True:
|
||||
if not task or task.done():
|
||||
try:
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
|
||||
task = event_loop.create_task(self.on_ws_heartbeat(async_conn), name="on_ws_heartbeat")
|
||||
logger.info("Creating `on_ws_heartbeat` task in event loop.")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to database for pg_notify: {e}")
|
||||
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
@@ -339,7 +358,7 @@ class WebSocketRelayManager(object):
|
||||
|
||||
if deleted_remote_hosts:
|
||||
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
await asyncio.gather(self.cleanup_offline_host(h) for h in deleted_remote_hosts)
|
||||
await asyncio.gather(*[self.cleanup_offline_host(h) for h in deleted_remote_hosts])
|
||||
|
||||
if new_remote_hosts:
|
||||
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
@@ -216,42 +216,59 @@
|
||||
- block:
|
||||
- name: Fetch galaxy roles from roles/requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy role install -r {{ item }} {{ verbosity }}"
|
||||
cmd: "ansible-galaxy role install -r {{ req_file }} {{ verbosity }}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
files:
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
skip: True
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
when: roles_enabled | bool
|
||||
when:
|
||||
- roles_enabled | bool
|
||||
- req_file
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy collection install -r {{ item }} {{ verbosity }}"
|
||||
cmd: "ansible-galaxy collection install -r {{ req_file }} {{ verbosity }}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
files:
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
skip: True
|
||||
changed_when: "'Nothing to do.' not in galaxy_collection_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled | bool
|
||||
- req_file
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
# requirements.yml in project root can be either "old" (roles only) or "new" (collections+roles) format
|
||||
- name: Fetch galaxy roles and collections from requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy install -r {{ item }} {{ verbosity }}"
|
||||
cmd: "ansible-galaxy install -r {{ req_file }} {{ verbosity }}"
|
||||
register: galaxy_combined_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
files:
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
skip: True
|
||||
changed_when: "'Nothing to do.' not in galaxy_combined_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.10', '>=')"
|
||||
- collections_enabled | bool
|
||||
- roles_enabled | bool
|
||||
- req_file
|
||||
tags:
|
||||
- install_collections
|
||||
- install_roles
|
||||
|
||||
22
awx/resource_api.py
Normal file
22
awx/resource_api.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
from awx.main import models
|
||||
|
||||
|
||||
class APIConfig(ServiceAPIConfig):
|
||||
service_type = "awx"
|
||||
|
||||
|
||||
RESOURCE_LIST = (
|
||||
ResourceConfig(
|
||||
models.Organization,
|
||||
shared_resource=SharedResource(serializer=OrganizationType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(models.User, shared_resource=SharedResource(serializer=UserType, is_provider=False), name_field="username"),
|
||||
ResourceConfig(
|
||||
models.Team,
|
||||
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
||||
parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
|
||||
),
|
||||
)
|
||||
@@ -353,8 +353,11 @@ INSTALLED_APPS = [
|
||||
'awx.sso',
|
||||
'solo',
|
||||
'ansible_base.rest_filters',
|
||||
'ansible_base.jwt_consumer',
|
||||
'ansible_base.resource_registry',
|
||||
]
|
||||
|
||||
|
||||
INTERNAL_IPS = ('127.0.0.1',)
|
||||
|
||||
MAX_PAGE_SIZE = 200
|
||||
@@ -362,6 +365,7 @@ REST_FRAMEWORK = {
|
||||
'DEFAULT_PAGINATION_CLASS': 'awx.api.pagination.Pagination',
|
||||
'PAGE_SIZE': 25,
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'ansible_base.jwt_consumer.awx.auth.AwxJWTAuthentication',
|
||||
'awx.api.authentication.LoggedOAuth2Authentication',
|
||||
'awx.api.authentication.SessionAuthentication',
|
||||
'awx.api.authentication.LoggedBasicAuthentication',
|
||||
@@ -755,6 +759,14 @@ SATELLITE6_INSTANCE_ID_VAR = 'foreman_id,foreman.id'
|
||||
INSIGHTS_INSTANCE_ID_VAR = 'insights_id'
|
||||
INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
|
||||
# ----------------
|
||||
# -- Terraform State --
|
||||
# ----------------
|
||||
# TERRAFORM_ENABLED_VAR =
|
||||
# TERRAFORM_ENABLED_VALUE =
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
@@ -837,7 +849,6 @@ LOGGING = {
|
||||
'json': {'()': 'awx.main.utils.formatters.LogstashFormatter'},
|
||||
'timed_import': {'()': 'awx.main.utils.formatters.TimeFormatter', 'format': '%(relativeSeconds)9.3f %(levelname)-8s %(message)s'},
|
||||
'dispatcher': {'format': '%(asctime)s %(levelname)-8s [%(guid)s] %(name)s PID:%(process)d %(message)s'},
|
||||
'job_lifecycle': {'()': 'awx.main.utils.formatters.JobLifeCycleFormatter'},
|
||||
},
|
||||
# Extended below based on install scenario. You probably don't want to add something directly here.
|
||||
# See 'handler_config' below.
|
||||
@@ -905,7 +916,7 @@ handler_config = {
|
||||
'wsrelay': {'filename': 'wsrelay.log'},
|
||||
'task_system': {'filename': 'task_system.log'},
|
||||
'rbac_migrations': {'filename': 'tower_rbac_migrations.log'},
|
||||
'job_lifecycle': {'filename': 'job_lifecycle.log', 'formatter': 'job_lifecycle'},
|
||||
'job_lifecycle': {'filename': 'job_lifecycle.log'},
|
||||
'rsyslog_configurer': {'filename': 'rsyslog_configurer.log'},
|
||||
'cache_clear': {'filename': 'cache_clear.log'},
|
||||
'ws_heartbeat': {'filename': 'ws_heartbeat.log'},
|
||||
@@ -1108,8 +1119,17 @@ METRICS_SUBSYSTEM_CONFIG = {
|
||||
# django-ansible-base
|
||||
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
||||
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
||||
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
|
||||
from ansible_base.lib import dynamic_config # noqa: E402
|
||||
|
||||
settings_file = os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py')
|
||||
include(settings_file)
|
||||
|
||||
# Add a postfix to the API URL patterns
|
||||
# example if set to '' API pattern will be /api
|
||||
# example if set to 'controller' API pattern will be /api AND /api/controller
|
||||
OPTIONAL_API_URLPATTERN_PREFIX = ''
|
||||
|
||||
# Use AWX base view, to give 401 on unauthenticated requests
|
||||
ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'
|
||||
|
||||
@@ -72,6 +72,8 @@ AWX_CALLBACK_PROFILE = True
|
||||
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
||||
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
{% else %}
|
||||
<li><a href="{% url 'api:login' %}?next={{ request.get_full_path }}" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="Log in"><span class="glyphicon glyphicon-log-in"></span>Log in</a></li>
|
||||
{% endif %}
|
||||
<li><a href="//docs.ansible.com/ansible-tower/{{short_tower_version}}/html/towerapi/index.html" target="_blank" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'API Guide' %}"><span class="glyphicon glyphicon-question-sign"></span><span class="visible-xs-inline">{% trans 'API Guide' %}</span></a></li>
|
||||
<li><a href="//ansible.readthedocs.io/projects/awx/en/latest/rest_api/index.html" target="_blank" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'API Guide' %}"><span class="glyphicon glyphicon-question-sign"></span><span class="visible-xs-inline">{% trans 'API Guide' %}</span></a></li>
|
||||
<li><a href="/" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Back to application' %}"><span class="glyphicon glyphicon-circle-arrow-left"></span><span class="visible-xs-inline">{% trans 'Back to application' %}</span></a></li>
|
||||
<li class="hidden-xs"><a href="#" class="resize" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Resize' %}"><span class="glyphicon glyphicon-resize-full"></span></a></li>
|
||||
</ul>
|
||||
|
||||
@@ -59,6 +59,7 @@ register(
|
||||
help_text=_('Maximum number of job events for the UI to retrieve within a single request.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -68,4 +69,5 @@ register(
|
||||
help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
46
awx/ui/package-lock.json
generated
46
awx/ui/package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"axios": "^1.6.7",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
@@ -5940,12 +5940,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios/node_modules/form-data": {
|
||||
@@ -10387,9 +10388,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==",
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
@@ -18349,6 +18350,11 @@
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"node_modules/pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
@@ -26915,12 +26921,13 @@
|
||||
"dev": true
|
||||
},
|
||||
"axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"requires": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"form-data": {
|
||||
@@ -30371,9 +30378,9 @@
|
||||
}
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA=="
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw=="
|
||||
},
|
||||
"fork-ts-checker-webpack-plugin": {
|
||||
"version": "6.5.2",
|
||||
@@ -36325,6 +36332,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"axios": "^1.6.7",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
|
||||
@@ -67,27 +67,18 @@ function getInitialValues(launchConfig, surveyConfig, resource) {
|
||||
const values = {};
|
||||
if (surveyConfig?.spec) {
|
||||
surveyConfig.spec.forEach((question) => {
|
||||
if (question.type === 'multiselect') {
|
||||
if (resource?.extra_data && resource?.extra_data[question.variable]) {
|
||||
values[`survey_${question.variable}`] =
|
||||
resource.extra_data[question.variable];
|
||||
} else if (question.type === 'multiselect') {
|
||||
values[`survey_${question.variable}`] = question.default
|
||||
? question.default.split('\n')
|
||||
: [];
|
||||
} else {
|
||||
values[`survey_${question.variable}`] = question.default ?? '';
|
||||
}
|
||||
if (resource?.extra_data) {
|
||||
Object.entries(resource.extra_data).forEach(([key, value]) => {
|
||||
if (key === question.variable) {
|
||||
if (question.type === 'multiselect') {
|
||||
values[`survey_${question.variable}`] = value;
|
||||
} else {
|
||||
values[`survey_${question.variable}`] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
|
||||
@@ -257,12 +257,17 @@ function PromptDetail({
|
||||
numChips={5}
|
||||
ouiaId="prompt-job-tag-chips"
|
||||
totalChips={
|
||||
!overrides.job_tags || overrides.job_tags === ''
|
||||
overrides.job_tags === undefined ||
|
||||
overrides.job_tags === null ||
|
||||
overrides.job_tags === ''
|
||||
? 0
|
||||
: overrides.job_tags.split(',').length
|
||||
}
|
||||
>
|
||||
{overrides.job_tags.length > 0 &&
|
||||
{overrides.job_tags !== undefined &&
|
||||
overrides.job_tags !== null &&
|
||||
overrides.job_tags !== '' &&
|
||||
overrides.job_tags.length > 0 &&
|
||||
overrides.job_tags.split(',').map((jobTag) => (
|
||||
<Chip
|
||||
key={jobTag}
|
||||
@@ -284,13 +289,18 @@ function PromptDetail({
|
||||
<ChipGroup
|
||||
numChips={5}
|
||||
totalChips={
|
||||
!overrides.skip_tags || overrides.skip_tags === ''
|
||||
overrides.skip_tags === undefined ||
|
||||
overrides.skip_tags === null ||
|
||||
overrides.skip_tags === ''
|
||||
? 0
|
||||
: overrides.skip_tags.split(',').length
|
||||
}
|
||||
ouiaId="prompt-skip-tag-chips"
|
||||
>
|
||||
{overrides.skip_tags.length > 0 &&
|
||||
{overrides.skip_tags !== undefined &&
|
||||
overrides.skip_tags !== null &&
|
||||
overrides.skip_tags !== '' &&
|
||||
overrides.skip_tags.length > 0 &&
|
||||
overrides.skip_tags.split(',').map((skipTag) => (
|
||||
<Chip
|
||||
key={skipTag}
|
||||
|
||||
@@ -13,6 +13,18 @@ import ScheduleForm from '../shared/ScheduleForm';
|
||||
import buildRuleSet from '../shared/buildRuleSet';
|
||||
import { CardBody } from '../../Card';
|
||||
|
||||
function generateExtraData(extra_vars, surveyValues, surveyConfiguration) {
|
||||
const extraVars = parseVariableField(
|
||||
yaml.dump(mergeExtraVars(extra_vars, surveyValues))
|
||||
);
|
||||
surveyConfiguration.spec.forEach((q) => {
|
||||
if (!surveyValues[q.variable]) {
|
||||
delete extraVars[q.variable];
|
||||
}
|
||||
});
|
||||
return extraVars;
|
||||
}
|
||||
|
||||
function ScheduleEdit({
|
||||
hasDaysToKeepField,
|
||||
schedule,
|
||||
@@ -33,10 +45,12 @@ function ScheduleEdit({
|
||||
surveyConfiguration,
|
||||
originalInstanceGroups,
|
||||
originalLabels,
|
||||
scheduleCredentials = []
|
||||
scheduleCredentials = [],
|
||||
isPromptTouched = false
|
||||
) => {
|
||||
const {
|
||||
execution_environment,
|
||||
extra_vars = null,
|
||||
instance_groups,
|
||||
inventory,
|
||||
credentials = [],
|
||||
@@ -48,45 +62,54 @@ function ScheduleEdit({
|
||||
labels,
|
||||
...submitValues
|
||||
} = values;
|
||||
let extraVars;
|
||||
|
||||
const surveyValues = getSurveyValues(values);
|
||||
|
||||
if (
|
||||
!Object.values(surveyValues).length &&
|
||||
surveyConfiguration?.spec?.length
|
||||
isPromptTouched &&
|
||||
surveyConfiguration?.spec &&
|
||||
launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
surveyConfiguration.spec.forEach((q) => {
|
||||
surveyValues[q.variable] = q.default;
|
||||
});
|
||||
submitValues.extra_data = generateExtraData(
|
||||
extra_vars,
|
||||
surveyValues,
|
||||
surveyConfiguration
|
||||
);
|
||||
} else if (
|
||||
isPromptTouched &&
|
||||
surveyConfiguration?.spec &&
|
||||
!launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
submitValues.extra_data = generateExtraData(
|
||||
schedule.extra_data,
|
||||
surveyValues,
|
||||
surveyConfiguration
|
||||
);
|
||||
} else if (
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
submitValues.extra_data = parseVariableField(extra_vars);
|
||||
}
|
||||
|
||||
const initialExtraVars =
|
||||
launchConfiguration?.ask_variables_on_launch &&
|
||||
(values.extra_vars || '---');
|
||||
if (surveyConfiguration?.spec) {
|
||||
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, surveyValues));
|
||||
} else {
|
||||
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, {}));
|
||||
}
|
||||
submitValues.extra_data = extraVars && parseVariableField(extraVars);
|
||||
|
||||
if (
|
||||
Object.keys(submitValues.extra_data).length === 0 &&
|
||||
Object.keys(schedule.extra_data).length > 0
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_inventory_on_launch &&
|
||||
inventory
|
||||
) {
|
||||
submitValues.extra_data = schedule.extra_data;
|
||||
}
|
||||
delete values.extra_vars;
|
||||
if (inventory) {
|
||||
submitValues.inventory = inventory.id;
|
||||
}
|
||||
|
||||
if (execution_environment) {
|
||||
if (
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_execution_environment_on_launch &&
|
||||
execution_environment
|
||||
) {
|
||||
submitValues.execution_environment = execution_environment.id;
|
||||
}
|
||||
|
||||
try {
|
||||
if (launchConfiguration?.ask_labels_on_launch) {
|
||||
if (isPromptTouched && launchConfiguration?.ask_labels_on_launch) {
|
||||
const { labelIds, error } = createNewLabels(
|
||||
values.labels,
|
||||
resource.organization
|
||||
@@ -120,9 +143,16 @@ function ScheduleEdit({
|
||||
}
|
||||
}
|
||||
|
||||
const cleanedRequestData = Object.keys(requestData)
|
||||
.filter((key) => !key.startsWith('survey_'))
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = requestData[key];
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const {
|
||||
data: { id: scheduleId },
|
||||
} = await SchedulesAPI.update(schedule.id, requestData);
|
||||
} = await SchedulesAPI.update(schedule.id, cleanedRequestData);
|
||||
|
||||
const { added: addedCredentials, removed: removedCredentials } =
|
||||
getAddedAndRemoved(
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
InventoriesAPI,
|
||||
CredentialsAPI,
|
||||
CredentialTypesAPI,
|
||||
JobTemplatesAPI,
|
||||
} from 'api';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import ScheduleEdit from './ScheduleEdit';
|
||||
@@ -125,6 +126,7 @@ describe('<ScheduleEdit />', () => {
|
||||
id: 27,
|
||||
},
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ScheduleEdit
|
||||
@@ -206,7 +208,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run once schedule',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
@@ -233,7 +234,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run every 10 minutes 10 times',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T103000 RRULE:INTERVAL=10;FREQ=MINUTELY;COUNT=10',
|
||||
});
|
||||
@@ -262,7 +262,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run every hour until date',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=HOURLY;UNTIL=20200326T144500Z',
|
||||
});
|
||||
@@ -288,7 +287,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run daily',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=DAILY',
|
||||
});
|
||||
@@ -316,7 +314,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run weekly on mon/wed/fri',
|
||||
extra_data: {},
|
||||
rrule: `DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=WEEKLY;BYDAY=${RRule.MO},${RRule.WE},${RRule.FR}`,
|
||||
});
|
||||
});
|
||||
@@ -344,7 +341,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run on the first day of the month',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200401T104500 RRULE:INTERVAL=1;FREQ=MONTHLY;BYMONTHDAY=1',
|
||||
});
|
||||
@@ -376,7 +372,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run monthly on the last Tuesday',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200331T110000 RRULE:INTERVAL=1;FREQ=MONTHLY;BYSETPOS=-1;BYDAY=TU',
|
||||
});
|
||||
@@ -406,7 +401,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the first day of March',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200301T000000 RRULE:INTERVAL=1;FREQ=YEARLY;BYMONTH=3;BYMONTHDAY=1',
|
||||
});
|
||||
@@ -437,7 +431,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the second Friday in April',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=2;BYDAY=FR;BYMONTH=4',
|
||||
});
|
||||
@@ -468,7 +461,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the first weekday in October',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=1;BYDAY=MO,TU,WE,TH,FR;BYMONTH=10',
|
||||
});
|
||||
@@ -562,7 +554,6 @@ describe('<ScheduleEdit />', () => {
|
||||
wrapper.update();
|
||||
|
||||
expect(SchedulesAPI.update).toBeCalledWith(27, {
|
||||
extra_data: {},
|
||||
name: 'mock schedule',
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20210128T141500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
@@ -633,15 +624,13 @@ describe('<ScheduleEdit />', () => {
|
||||
endDateTime: undefined,
|
||||
startDateTime: undefined,
|
||||
description: '',
|
||||
extra_data: {},
|
||||
name: 'foo',
|
||||
inventory: 702,
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
|
||||
test('should submit survey with default values properly, without opening prompt wizard', async () => {
|
||||
test('should submit update values properly when prompt is not opened', async () => {
|
||||
let scheduleSurveyWrapper;
|
||||
await act(async () => {
|
||||
scheduleSurveyWrapper = mountWithContexts(
|
||||
@@ -746,9 +735,195 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run once schedule',
|
||||
extra_data: { mc: 'first', text: 'text variable' },
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
test('should submit update values properly when survey values change', async () => {
|
||||
JobTemplatesAPI.readSurvey.mockResolvedValue({
|
||||
data: {
|
||||
spec: [
|
||||
{
|
||||
question_name: 'text',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'text',
|
||||
variable: 'text',
|
||||
min: 0,
|
||||
max: 1024,
|
||||
default: 'text variable',
|
||||
choices: '',
|
||||
new_question: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
JobTemplatesAPI.readLaunch.mockResolvedValue({
|
||||
data: {
|
||||
can_start_without_user_input: false,
|
||||
passwords_needed_to_start: [],
|
||||
ask_scm_branch_on_launch: false,
|
||||
ask_variables_on_launch: false,
|
||||
ask_tags_on_launch: false,
|
||||
ask_diff_mode_on_launch: false,
|
||||
ask_skip_tags_on_launch: false,
|
||||
ask_job_type_on_launch: false,
|
||||
ask_limit_on_launch: false,
|
||||
ask_verbosity_on_launch: false,
|
||||
ask_inventory_on_launch: true,
|
||||
ask_credential_on_launch: true,
|
||||
survey_enabled: true,
|
||||
variables_needed_to_start: [],
|
||||
credential_needed_to_start: true,
|
||||
inventory_needed_to_start: true,
|
||||
job_template_data: {
|
||||
name: 'Demo Job Template',
|
||||
id: 7,
|
||||
description: '',
|
||||
},
|
||||
defaults: {
|
||||
extra_vars: '---',
|
||||
diff_mode: false,
|
||||
limit: '',
|
||||
job_tags: '',
|
||||
skip_tags: '',
|
||||
job_type: 'run',
|
||||
verbosity: 0,
|
||||
inventory: {
|
||||
name: null,
|
||||
id: null,
|
||||
},
|
||||
scm_branch: '',
|
||||
credentials: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let scheduleSurveyWrapper;
|
||||
await act(async () => {
|
||||
scheduleSurveyWrapper = mountWithContexts(
|
||||
<ScheduleEdit
|
||||
schedule={mockSchedule}
|
||||
resource={{
|
||||
id: 700,
|
||||
type: 'job_template',
|
||||
iventory: 1,
|
||||
summary_fields: {
|
||||
credentials: [
|
||||
{ name: 'job template credential', id: 75, kind: 'ssh' },
|
||||
],
|
||||
},
|
||||
name: 'Foo Job Template',
|
||||
description: '',
|
||||
}}
|
||||
resourceDefaultCredentials={[]}
|
||||
launchConfig={{
|
||||
can_start_without_user_input: false,
|
||||
passwords_needed_to_start: [],
|
||||
ask_scm_branch_on_launch: false,
|
||||
ask_variables_on_launch: false,
|
||||
ask_tags_on_launch: false,
|
||||
ask_diff_mode_on_launch: false,
|
||||
ask_skip_tags_on_launch: false,
|
||||
ask_job_type_on_launch: false,
|
||||
ask_limit_on_launch: false,
|
||||
ask_verbosity_on_launch: false,
|
||||
ask_inventory_on_launch: true,
|
||||
ask_credential_on_launch: true,
|
||||
survey_enabled: true,
|
||||
variables_needed_to_start: [],
|
||||
credential_needed_to_start: true,
|
||||
inventory_needed_to_start: true,
|
||||
job_template_data: {
|
||||
name: 'Demo Job Template',
|
||||
id: 7,
|
||||
description: '',
|
||||
},
|
||||
defaults: {
|
||||
extra_vars: '---',
|
||||
diff_mode: false,
|
||||
limit: '',
|
||||
job_tags: '',
|
||||
skip_tags: '',
|
||||
job_type: 'run',
|
||||
verbosity: 0,
|
||||
inventory: {
|
||||
name: null,
|
||||
id: null,
|
||||
},
|
||||
scm_branch: '',
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
surveyConfig={{
|
||||
spec: [
|
||||
{
|
||||
question_name: 'text',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'text',
|
||||
variable: 'text',
|
||||
min: 0,
|
||||
max: 1024,
|
||||
default: 'text variable',
|
||||
choices: '',
|
||||
new_question: true,
|
||||
},
|
||||
],
|
||||
}}
|
||||
/>
|
||||
);
|
||||
});
|
||||
scheduleSurveyWrapper.update();
|
||||
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper
|
||||
.find('Button[aria-label="Prompt"]')
|
||||
.prop('onClick')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
expect(scheduleSurveyWrapper.find('WizardNavItem').length).toBe(4);
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper
|
||||
.find('input#survey-question-text')
|
||||
.simulate('change', {
|
||||
target: { value: 'foo', name: 'survey_text' },
|
||||
})
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
|
||||
expect(scheduleSurveyWrapper.find('Wizard').length).toBe(0);
|
||||
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('Button[aria-label="Save"]').prop('onClick')()
|
||||
);
|
||||
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: '',
|
||||
name: 'mock schedule',
|
||||
inventory: 702,
|
||||
extra_data: {
|
||||
text: 'foo',
|
||||
},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -40,6 +40,7 @@ function ScheduleForm({
|
||||
resourceDefaultCredentials,
|
||||
}) {
|
||||
const [isWizardOpen, setIsWizardOpen] = useState(false);
|
||||
const [isPromptTouched, setIsPromptTouched] = useState(false);
|
||||
const [isSaveDisabled, setIsSaveDisabled] = useState(false);
|
||||
const originalLabels = useRef([]);
|
||||
const originalInstanceGroups = useRef([]);
|
||||
@@ -492,7 +493,8 @@ function ScheduleForm({
|
||||
surveyConfig,
|
||||
originalInstanceGroups.current,
|
||||
originalLabels.current,
|
||||
credentials
|
||||
credentials,
|
||||
isPromptTouched
|
||||
);
|
||||
}}
|
||||
validate={validate}
|
||||
@@ -518,6 +520,7 @@ function ScheduleForm({
|
||||
onSave={() => {
|
||||
setIsWizardOpen(false);
|
||||
setIsSaveDisabled(false);
|
||||
setIsPromptTouched(true);
|
||||
}}
|
||||
resourceDefaultCredentials={resourceDefaultCredentials}
|
||||
labels={originalLabels.current}
|
||||
|
||||
@@ -115,8 +115,11 @@ function SessionProvider({ children }) {
|
||||
}, [setSessionTimeout, setSessionCountdown]);
|
||||
|
||||
useEffect(() => {
|
||||
const isRedirectCondition = (location, histLength) =>
|
||||
location.pathname === '/login' && histLength === 2;
|
||||
|
||||
const unlisten = history.listen((location, action) => {
|
||||
if (action === 'POP') {
|
||||
if (action === 'POP' || isRedirectCondition(location, history.length)) {
|
||||
setIsRedirectLinkReceived(true);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -784,7 +784,7 @@ msgstr "Branche à utiliser dans l’exécution de la tâche. Projet par défaut
|
||||
|
||||
#: screens/Inventory/shared/Inventory.helptext.js:155
|
||||
msgid "Branch to use on inventory sync. Project default used if blank. Only allowed if project allow_override field is set to true."
|
||||
msgstr ""
|
||||
msgstr "Branche à utiliser pour la synchronisation de l'inventaire. La valeur par défaut du projet est utilisée si elle est vide. Cette option n'est autorisée que si le champ allow_override du projet est défini sur vrai."
|
||||
|
||||
#: components/About/About.js:45
|
||||
msgid "Brand Image"
|
||||
@@ -2832,7 +2832,7 @@ msgstr "Entrez les variables avec la syntaxe JSON ou YAML. Consultez la documen
|
||||
|
||||
#: screens/Inventory/shared/SmartInventoryForm.js:94
|
||||
msgid "Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Controller documentation for example syntax."
|
||||
msgstr ""
|
||||
msgstr "Entrez les variables d'inventaire en utilisant la syntaxe JSON ou YAML. Utilisez le bouton d'option pour basculer entre les deux. Référez-vous à la documentation du contrôleur Ansible pour les exemples de syntaxe."
|
||||
|
||||
#: screens/CredentialType/CredentialTypeDetails/CredentialTypeDetails.js:87
|
||||
msgid "Environment variables or extra variables that specify the values a credential type can inject."
|
||||
@@ -3015,7 +3015,7 @@ msgstr "Recherche exacte sur le champ d'identification."
|
||||
|
||||
#: components/Search/RelatedLookupTypeInput.js:38
|
||||
msgid "Exact search on name field."
|
||||
msgstr ""
|
||||
msgstr "Recherche exacte sur le champ nom."
|
||||
|
||||
#: screens/Project/shared/Project.helptext.js:23
|
||||
msgid "Example URLs for GIT Source Control include:"
|
||||
@@ -3242,7 +3242,7 @@ msgstr "Jobs ayant échoué"
|
||||
|
||||
#: screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js:262
|
||||
msgid "Failed to approve one or more workflow approval."
|
||||
msgstr ""
|
||||
msgstr "Échec de l'approbation d'une ou plusieurs validations de flux de travail."
|
||||
|
||||
#: screens/WorkflowApproval/shared/WorkflowApprovalButton.js:56
|
||||
msgid "Failed to approve {0}."
|
||||
@@ -3474,7 +3474,7 @@ msgstr "N'a pas réussi à supprimer {name}."
|
||||
|
||||
#: screens/WorkflowApproval/WorkflowApprovalList/WorkflowApprovalList.js:263
|
||||
msgid "Failed to deny one or more workflow approval."
|
||||
msgstr ""
|
||||
msgstr "Échec du refus d'une ou plusieurs validations de flux de travail."
|
||||
|
||||
#: screens/WorkflowApproval/shared/WorkflowDenyButton.js:51
|
||||
msgid "Failed to deny {0}."
|
||||
@@ -3520,7 +3520,7 @@ msgstr "Echec du lancement du Job."
|
||||
|
||||
#: screens/Inventory/InventoryHosts/InventoryHostItem.js:121
|
||||
msgid "Failed to load related groups."
|
||||
msgstr ""
|
||||
msgstr "Impossible de charger les groupes associés."
|
||||
|
||||
#: screens/Instances/InstanceDetail/InstanceDetail.js:388
|
||||
#: screens/Instances/InstanceList/InstanceList.js:266
|
||||
@@ -3972,12 +3972,12 @@ msgstr "Demande(s) de bilan de santé soumise(s). Veuillez patienter et recharge
|
||||
#: screens/Instances/InstanceDetail/InstanceDetail.js:234
|
||||
#: screens/Instances/InstanceList/InstanceListItem.js:242
|
||||
msgid "Health checks are asynchronous tasks. See the"
|
||||
msgstr ""
|
||||
msgstr "Les bilans de santé sont des tâches asynchrones. Veuillez consulter la documentation pour plus d'informations."
|
||||
|
||||
#: screens/InstanceGroup/Instances/InstanceList.js:286
|
||||
#: screens/Instances/InstanceList/InstanceList.js:219
|
||||
msgid "Health checks can only be run on execution nodes."
|
||||
msgstr ""
|
||||
msgstr "Les bilans de santé ne peuvent être exécutées que sur les nœuds d'exécution."
|
||||
|
||||
#: components/StatusLabel/StatusLabel.js:42
|
||||
msgid "Healthy"
|
||||
@@ -5048,7 +5048,7 @@ msgstr "Lancer"
|
||||
|
||||
#: components/TemplateList/TemplateListItem.js:214
|
||||
msgid "Launch Template"
|
||||
msgstr "Lacer le modèle."
|
||||
msgstr "Lancer le modèle."
|
||||
|
||||
#: screens/ManagementJob/ManagementJobList/LaunchManagementPrompt.js:32
|
||||
#: screens/ManagementJob/ManagementJobList/LaunchManagementPrompt.js:34
|
||||
@@ -9637,7 +9637,7 @@ msgstr "Utilisateur"
|
||||
|
||||
#: components/AppContainer/PageHeaderToolbar.js:160
|
||||
msgid "User Details"
|
||||
msgstr "Détails de l'erreur"
|
||||
msgstr "Détails de l'utilisateur"
|
||||
|
||||
#: screens/Setting/SettingList.js:121
|
||||
#: screens/Setting/Settings.js:118
|
||||
|
||||
@@ -80,7 +80,7 @@ function Dashboard() {
|
||||
<Trans>
|
||||
<p>
|
||||
<InfoCircleIcon /> A tech preview of the new {brandName} user
|
||||
interface can be found <a href="/ui_next/dashboard">here</a>.
|
||||
interface can be found <a href="/ui_next">here</a>.
|
||||
</p>
|
||||
</Trans>
|
||||
</Banner>
|
||||
|
||||
@@ -191,7 +191,7 @@ function InstancePeerList({ setBreadcrumb }) {
|
||||
fetchPeers();
|
||||
addToast({
|
||||
id: instancesPeerToAssociate,
|
||||
title: t`Peers update on ${instance.hostname}. Please be sure to run the install bundle for ${instance.hostname} again in order to see changes take effect.`,
|
||||
title: t`Please be sure to run the install bundle for the selected instance(s) again in order to see changes take effect.`,
|
||||
variant: AlertVariant.success,
|
||||
hasTimeout: true,
|
||||
});
|
||||
|
||||
@@ -21,6 +21,8 @@ const ansibleDocUrls = {
|
||||
'https://docs.ansible.com/ansible/latest/collections/community/vmware/vmware_vm_inventory_inventory.html',
|
||||
constructed:
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -119,10 +121,10 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
@@ -138,8 +140,8 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
</div>
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
SCMSubForm,
|
||||
SatelliteSubForm,
|
||||
ControllerSubForm,
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
@@ -214,6 +215,14 @@ const InventorySourceFormFields = ({
|
||||
}
|
||||
/>
|
||||
),
|
||||
terraform: (
|
||||
<TerraformSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id || source?.source !== 'terraform'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
vmware: (
|
||||
<VMwareSubForm
|
||||
autoPopulateCredential={
|
||||
|
||||
@@ -38,6 +38,7 @@ describe('<InventorySourceForm />', () => {
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
['terraform', 'Terraform State'],
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
import { t } from '@lingui/macro';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const TerraformSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="terraform"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(docsBaseUrl, 'terraform')}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TerraformSubForm;
|
||||
@@ -0,0 +1,70 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import TerraformSubForm from './TerraformSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
const mockSourceOptions = {
|
||||
actions: {
|
||||
POST: {},
|
||||
},
|
||||
};
|
||||
|
||||
describe('<TerraformSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<TerraformSubForm sourceOptions={mockSourceOptions} />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'terraform',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,5 +6,6 @@ export { default as OpenStackSubForm } from './OpenStackSubForm';
|
||||
export { default as SCMSubForm } from './SCMSubForm';
|
||||
export { default as SatelliteSubForm } from './SatelliteSubForm';
|
||||
export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Modal, Tab, Tabs, TabTitleText } from '@patternfly/react-core';
|
||||
import PropTypes from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
import { encode } from 'html-entities';
|
||||
import { jsonToYaml } from 'util/yaml';
|
||||
import StatusLabel from '../../../components/StatusLabel';
|
||||
import { DetailList, Detail } from '../../../components/DetailList';
|
||||
import ContentEmpty from '../../../components/ContentEmpty';
|
||||
@@ -144,9 +145,28 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
<ContentEmpty title={t`No JSON Available`} />
|
||||
)}
|
||||
</Tab>
|
||||
<Tab
|
||||
eventKey={2}
|
||||
title={<TabTitleText>{t`YAML`}</TabTitleText>}
|
||||
aria-label={t`YAML tab`}
|
||||
ouiaId="yaml-tab"
|
||||
>
|
||||
{activeTabKey === 2 && jsonObj ? (
|
||||
<CodeEditor
|
||||
mode="javascript"
|
||||
readOnly
|
||||
value={jsonToYaml(JSON.stringify(jsonObj))}
|
||||
onChange={() => {}}
|
||||
rows={20}
|
||||
hasErrors={false}
|
||||
/>
|
||||
) : (
|
||||
<ContentEmpty title={t`No YAML Available`} />
|
||||
)}
|
||||
</Tab>
|
||||
{stdOut?.length ? (
|
||||
<Tab
|
||||
eventKey={2}
|
||||
eventKey={3}
|
||||
title={<TabTitleText>{t`Output`}</TabTitleText>}
|
||||
aria-label={t`Output tab`}
|
||||
ouiaId="standard-out-tab"
|
||||
@@ -163,7 +183,7 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
|
||||
) : null}
|
||||
{stdErr?.length ? (
|
||||
<Tab
|
||||
eventKey={3}
|
||||
eventKey={4}
|
||||
title={<TabTitleText>{t`Standard Error`}</TabTitleText>}
|
||||
aria-label={t`Standard error tab`}
|
||||
ouiaId="standard-error-tab"
|
||||
|
||||
@@ -2,6 +2,7 @@ import React from 'react';
|
||||
import { shallow } from 'enzyme';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import HostEventModal from './HostEventModal';
|
||||
import { jsonToYaml } from 'util/yaml';
|
||||
|
||||
const hostEvent = {
|
||||
changed: true,
|
||||
@@ -167,6 +168,8 @@ const jsonValue = `{
|
||||
]
|
||||
}`;
|
||||
|
||||
const yamlValue = jsonToYaml(jsonValue);
|
||||
|
||||
describe('HostEventModal', () => {
|
||||
test('initially renders successfully', () => {
|
||||
const wrapper = shallow(
|
||||
@@ -187,7 +190,7 @@ describe('HostEventModal', () => {
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
);
|
||||
|
||||
expect(wrapper.find('Tabs Tab').length).toEqual(4);
|
||||
expect(wrapper.find('Tabs Tab').length).toEqual(5);
|
||||
});
|
||||
|
||||
test('should initially show details tab', () => {
|
||||
@@ -287,7 +290,7 @@ describe('HostEventModal', () => {
|
||||
expect(codeEditor.prop('value')).toEqual(jsonValue);
|
||||
});
|
||||
|
||||
test('should display Standard Out tab content on tab click', () => {
|
||||
test('should display YAML tab content on tab click', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
);
|
||||
@@ -299,6 +302,21 @@ describe('HostEventModal', () => {
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual(yamlValue);
|
||||
});
|
||||
|
||||
test('should display Standard Out tab content on tab click', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual(hostEvent.event_data.res.stdout);
|
||||
});
|
||||
|
||||
@@ -316,10 +334,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 3);
|
||||
handleTabClick(null, 4);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=4] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual('error content');
|
||||
@@ -351,10 +369,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 2);
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual('foo bar');
|
||||
@@ -375,10 +393,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 2);
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual('baz\nbar');
|
||||
@@ -394,10 +412,10 @@ describe('HostEventModal', () => {
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 2);
|
||||
handleTabClick(null, 3);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
const codeEditor = wrapper.find('Tab[eventKey=3] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual(
|
||||
|
||||
@@ -30,7 +30,7 @@ function SubscriptionUsage() {
|
||||
<Trans>
|
||||
<p>
|
||||
<InfoCircleIcon /> A tech preview of the new {brandName} user
|
||||
interface can be found <a href="/ui_next/dashboard">here</a>.
|
||||
interface can be found <a href="/ui_next">here</a>.
|
||||
</p>
|
||||
</Trans>
|
||||
</Banner>
|
||||
|
||||
@@ -201,7 +201,11 @@ function NodeViewModal({ readOnly }) {
|
||||
overrides.limit = originalNodeObject.limit;
|
||||
}
|
||||
if (launchConfig.ask_verbosity_on_launch) {
|
||||
overrides.verbosity = originalNodeObject.verbosity.toString();
|
||||
overrides.verbosity =
|
||||
originalNodeObject.verbosity !== undefined &&
|
||||
originalNodeObject.verbosity !== null
|
||||
? originalNodeObject.verbosity.toString()
|
||||
: '0';
|
||||
}
|
||||
if (launchConfig.ask_credential_on_launch) {
|
||||
overrides.credentials = originalNodeCredentials || [];
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export default function getSurveyValues(values) {
|
||||
const surveyValues = {};
|
||||
Object.keys(values).forEach((key) => {
|
||||
if (key.startsWith('survey_') && values[key] !== []) {
|
||||
if (key.startsWith('survey_')) {
|
||||
if (Array.isArray(values[key]) && values[key].length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
export default function mergeExtraVars(extraVars = '', survey = {}) {
|
||||
const vars = yaml.load(extraVars) || {};
|
||||
let vars = {};
|
||||
if (typeof extraVars === 'string') {
|
||||
vars = yaml.load(extraVars);
|
||||
} else if (typeof extraVars === 'object') {
|
||||
vars = extraVars;
|
||||
}
|
||||
return {
|
||||
...vars,
|
||||
...survey,
|
||||
|
||||
@@ -35,7 +35,7 @@ ui-next/src/build: $(UI_NEXT_DIR)/src/build/awx
|
||||
## True target for ui-next/src/build. Build ui_next from source.
|
||||
$(UI_NEXT_DIR)/src/build/awx: $(UI_NEXT_DIR)/src $(UI_NEXT_DIR)/src/node_modules/webpack
|
||||
@echo "=== Building ui_next ==="
|
||||
@cd $(UI_NEXT_DIR)/src && PRODUCT="$(PRODUCT)" PUBLIC_PATH=/static/awx/ npm run build:awx
|
||||
@cd $(UI_NEXT_DIR)/src && PRODUCT="$(PRODUCT)" PUBLIC_PATH=/static/awx/ ROUTE_PREFIX=/ui_next npm run build:awx
|
||||
@mv $(UI_NEXT_DIR)/src/build/awx/index.html $(UI_NEXT_DIR)/src/build/awx/index_awx.html
|
||||
|
||||
.PHONY: ui-next/src
|
||||
|
||||
15
awx/urls.py
15
awx/urls.py
@@ -2,7 +2,9 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import re_path, include
|
||||
from django.urls import path, re_path, include
|
||||
|
||||
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
|
||||
|
||||
from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect
|
||||
|
||||
@@ -10,7 +12,16 @@ from awx.main.views import handle_400, handle_403, handle_404, handle_500, handl
|
||||
urlpatterns = [
|
||||
re_path(r'', include('awx.ui.urls', namespace='ui')),
|
||||
re_path(r'^ui_next/.*', include('awx.ui_next.urls', namespace='ui_next')),
|
||||
re_path(r'^api/', include('awx.api.urls', namespace='api')),
|
||||
path('api/', include('awx.api.urls', namespace='api')),
|
||||
]
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||
urlpatterns += [
|
||||
path(f'api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/', include('awx.api.urls')),
|
||||
]
|
||||
|
||||
urlpatterns += [
|
||||
re_path(r'^api/v2/', include(resource_api_urls)),
|
||||
re_path(r'^sso/', include('awx.sso.urls', namespace='sso')),
|
||||
re_path(r'^sso/', include('social_django.urls', namespace='social')),
|
||||
re_path(r'^(?:api/)?400.html$', handle_400),
|
||||
|
||||
@@ -18,7 +18,7 @@ documentation: https://github.com/ansible/awx/blob/devel/awx_collection/README.m
|
||||
homepage: https://www.ansible.com/
|
||||
issues: https://github.com/ansible/awx/issues?q=is%3Aissue+label%3Acomponent%3Aawx_collection
|
||||
license:
|
||||
- GPL-3.0-only
|
||||
- GPL-3.0-or-later
|
||||
name: awx
|
||||
namespace: awx
|
||||
readme: README.md
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c), Wayne Witzel III <wayne@riotousliving.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import traceback
|
||||
|
||||
TOWER_CLI_IMP_ERR = None
|
||||
try:
|
||||
import tower_cli.utils.exceptions as exc
|
||||
from tower_cli.utils import parser
|
||||
from tower_cli.api import client
|
||||
|
||||
HAS_TOWER_CLI = True
|
||||
except ImportError:
|
||||
TOWER_CLI_IMP_ERR = traceback.format_exc()
|
||||
HAS_TOWER_CLI = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
def tower_auth_config(module):
|
||||
"""
|
||||
`tower_auth_config` attempts to load the tower-cli.cfg file
|
||||
specified from the `tower_config_file` parameter. If found,
|
||||
if returns the contents of the file as a dictionary, else
|
||||
it will attempt to fetch values from the module params and
|
||||
only pass those values that have been set.
|
||||
"""
|
||||
config_file = module.params.pop('tower_config_file', None)
|
||||
if config_file:
|
||||
if not os.path.exists(config_file):
|
||||
module.fail_json(msg='file not found: %s' % config_file)
|
||||
if os.path.isdir(config_file):
|
||||
module.fail_json(msg='directory can not be used as config file: %s' % config_file)
|
||||
|
||||
with open(config_file, 'r') as f:
|
||||
return parser.string_to_dict(f.read())
|
||||
else:
|
||||
auth_config = {}
|
||||
host = module.params.pop('tower_host', None)
|
||||
if host:
|
||||
auth_config['host'] = host
|
||||
username = module.params.pop('tower_username', None)
|
||||
if username:
|
||||
auth_config['username'] = username
|
||||
password = module.params.pop('tower_password', None)
|
||||
if password:
|
||||
auth_config['password'] = password
|
||||
module.params.pop('tower_verify_ssl', None) # pop alias if used
|
||||
verify_ssl = module.params.pop('validate_certs', None)
|
||||
if verify_ssl is not None:
|
||||
auth_config['verify_ssl'] = verify_ssl
|
||||
return auth_config
|
||||
|
||||
|
||||
def tower_check_mode(module):
|
||||
'''Execute check mode logic for Ansible Tower modules'''
|
||||
if module.check_mode:
|
||||
try:
|
||||
result = client.get('/ping').json()
|
||||
module.exit_json(changed=True, tower_version='{0}'.format(result['version']))
|
||||
except (exc.ServerError, exc.ConnectionError, exc.BadRequest) as excinfo:
|
||||
module.fail_json(changed=False, msg='Failed check mode: {0}'.format(excinfo))
|
||||
|
||||
|
||||
class TowerLegacyModule(AnsibleModule):
|
||||
def __init__(self, argument_spec, **kwargs):
|
||||
args = dict(
|
||||
tower_host=dict(),
|
||||
tower_username=dict(),
|
||||
tower_password=dict(no_log=True),
|
||||
validate_certs=dict(type='bool', aliases=['tower_verify_ssl']),
|
||||
tower_config_file=dict(type='path'),
|
||||
)
|
||||
args.update(argument_spec)
|
||||
|
||||
kwargs.setdefault('mutually_exclusive', [])
|
||||
kwargs['mutually_exclusive'].extend(
|
||||
(
|
||||
('tower_config_file', 'tower_host'),
|
||||
('tower_config_file', 'tower_username'),
|
||||
('tower_config_file', 'tower_password'),
|
||||
('tower_config_file', 'validate_certs'),
|
||||
)
|
||||
)
|
||||
|
||||
super().__init__(argument_spec=args, **kwargs)
|
||||
|
||||
if not HAS_TOWER_CLI:
|
||||
self.fail_json(msg=missing_required_lib('ansible-tower-cli'), exception=TOWER_CLI_IMP_ERR)
|
||||
@@ -147,8 +147,12 @@ def main():
|
||||
if redirect_uris is not None:
|
||||
application_fields['redirect_uris'] = ' '.join(redirect_uris)
|
||||
|
||||
# If the state was present and we can let the module build or update the existing application, this will return on its own
|
||||
module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application')
|
||||
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False)
|
||||
if 'client_id' in response:
|
||||
module.json_output['client_id'] = response['client_id']
|
||||
if 'client_secret' in response:
|
||||
module.json_output['client_secret'] = response['client_secret']
|
||||
module.exit_json(**module.json_output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -39,6 +39,16 @@ options:
|
||||
description:
|
||||
- Limit to use for the I(job_template).
|
||||
type: str
|
||||
tags:
|
||||
description:
|
||||
- Specific tags to apply from the I(job_template).
|
||||
type: list
|
||||
elements: str
|
||||
skip_tags:
|
||||
description:
|
||||
- Specific tags to skip from the I(job_template).
|
||||
type: list
|
||||
elements: str
|
||||
scm_branch:
|
||||
description:
|
||||
- A specific branch of the SCM project to run the template on.
|
||||
@@ -100,6 +110,8 @@ def main():
|
||||
organization=dict(),
|
||||
inventory=dict(),
|
||||
limit=dict(),
|
||||
tags=dict(type='list', elements='str'),
|
||||
skip_tags=dict(type='list', elements='str'),
|
||||
scm_branch=dict(),
|
||||
extra_vars=dict(type='dict'),
|
||||
wait=dict(required=False, default=True, type='bool'),
|
||||
@@ -128,6 +140,14 @@ def main():
|
||||
if field_val is not None:
|
||||
optional_args[field_name] = field_val
|
||||
|
||||
# Special treatment of tags parameters
|
||||
job_tags = module.params.get('tags')
|
||||
if job_tags is not None:
|
||||
optional_args['job_tags'] = ",".join(job_tags)
|
||||
skip_tags = module.params.get('skip_tags')
|
||||
if skip_tags is not None:
|
||||
optional_args['skip_tags'] = ",".join(skip_tags)
|
||||
|
||||
# Create a datastructure to pass into our job launch
|
||||
post_data = {}
|
||||
for arg_name, arg_value in optional_args.items():
|
||||
@@ -152,6 +172,8 @@ def main():
|
||||
check_vars_to_prompts = {
|
||||
'inventory': 'ask_inventory_on_launch',
|
||||
'limit': 'ask_limit_on_launch',
|
||||
'job_tags': 'ask_tags_on_launch',
|
||||
'skip_tags': 'ask_skip_tags_on_launch',
|
||||
'scm_branch': 'ask_scm_branch_on_launch',
|
||||
}
|
||||
|
||||
|
||||
@@ -181,10 +181,8 @@ def run_module(request, collection_import):
|
||||
resource_class = resource_module.ControllerAWXKitModule
|
||||
elif getattr(resource_module, 'ControllerAPIModule', None):
|
||||
resource_class = resource_module.ControllerAPIModule
|
||||
elif getattr(resource_module, 'TowerLegacyModule', None):
|
||||
resource_class = resource_module.TowerLegacyModule
|
||||
else:
|
||||
raise RuntimeError("The module has neither a TowerLegacyModule, ControllerAWXKitModule or a ControllerAPIModule")
|
||||
raise RuntimeError("The module has neither a ControllerAWXKitModule or a ControllerAPIModule")
|
||||
|
||||
with mock.patch.object(resource_class, '_load_params', new=mock_load_params):
|
||||
# Call the test utility (like a mock server) instead of issuing HTTP requests
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user