mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 11:34:43 -03:30
Compare commits
286 Commits
23.8.1
...
feature_ui
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
853730acb9 | ||
|
|
f1448fced1 | ||
|
|
7697b6a69b | ||
|
|
22a491c32c | ||
|
|
cbd9dce940 | ||
|
|
a4fdcc1cca | ||
|
|
df95439008 | ||
|
|
acd834df8b | ||
|
|
587f0ecf98 | ||
|
|
5a2091f7bf | ||
|
|
fa7423819a | ||
|
|
fde8af9f11 | ||
|
|
209e7e27b1 | ||
|
|
6c7d29a982 | ||
|
|
282ba36839 | ||
|
|
b727d2c3b3 | ||
|
|
7fc3d5c7c7 | ||
|
|
4e055f46c4 | ||
|
|
f595985b7c | ||
|
|
ea232315bf | ||
|
|
ee251812b5 | ||
|
|
00ba1ea569 | ||
|
|
d91af132c1 | ||
|
|
94e5795dfc | ||
|
|
c4688d6298 | ||
|
|
6763badea3 | ||
|
|
2c4ad6ef0f | ||
|
|
37f44d7214 | ||
|
|
98bbc836a6 | ||
|
|
b59aff50dc | ||
|
|
a70b0c1ddc | ||
|
|
db72c9d5b8 | ||
|
|
4e0d19914f | ||
|
|
6f2307f50e | ||
|
|
dbc2215bb6 | ||
|
|
7c08b29827 | ||
|
|
407194d320 | ||
|
|
853af295d9 | ||
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 | ||
|
|
6dc4a4508d | ||
|
|
cf09a4220d | ||
|
|
659c3b64de | ||
|
|
37ad690d09 | ||
|
|
7845ec7e01 | ||
|
|
a15bcf1d55 | ||
|
|
7b3fb2c2a8 | ||
|
|
6df47c8449 | ||
|
|
cae42653bf | ||
|
|
da46a29f40 | ||
|
|
0eb465531c | ||
|
|
d0fe0ed796 | ||
|
|
ceafa14c9d | ||
|
|
08e1454098 | ||
|
|
776b661fb3 | ||
|
|
af6ccdbde5 | ||
|
|
559ab3564b | ||
|
|
208ef0ce25 | ||
|
|
c3d9aa54d8 | ||
|
|
66efe7198a | ||
|
|
adf930ee42 | ||
|
|
892410477a | ||
|
|
0d4f653794 | ||
|
|
8de8f6dce2 | ||
|
|
fc9064e27f | ||
|
|
7de350dc3e | ||
|
|
d4bdaad4d8 | ||
|
|
a9b2ffa3e9 | ||
|
|
1b8d409043 | ||
|
|
da2bccf5a8 | ||
|
|
a2f083bd8e | ||
|
|
4d641b6cf5 | ||
|
|
439c3f0c23 | ||
|
|
946bbe3560 | ||
|
|
20f054d600 | ||
|
|
918d5b3565 | ||
|
|
158314af50 | ||
|
|
4754819a09 | ||
|
|
78fc23138a | ||
|
|
014534bfa5 | ||
|
|
2502e7c7d8 | ||
|
|
fb237e3834 | ||
|
|
e4646ae611 | ||
|
|
7dc77546f4 | ||
|
|
f5f85666c8 | ||
|
|
47a061eb39 | ||
|
|
c760577855 | ||
|
|
814ceb0d06 | ||
|
|
f178c84728 | ||
|
|
c0f71801f6 | ||
|
|
4e8e1398d7 | ||
|
|
3d6a8fd4ef | ||
|
|
e873bb1304 | ||
|
|
672f1eb745 | ||
|
|
199507c6f1 | ||
|
|
a176c04c14 | ||
|
|
e3af658f82 | ||
|
|
e8a3b96482 | ||
|
|
c015e8413e | ||
|
|
390c2d8907 | ||
|
|
97605c5f19 | ||
|
|
818c326160 | ||
|
|
c98727d83e | ||
|
|
a138a92e67 | ||
|
|
7aed19ffda | ||
|
|
3bb559dd09 | ||
|
|
389a729b75 | ||
|
|
2f3c9122fd | ||
|
|
733478ee19 | ||
|
|
41c6337fc1 | ||
|
|
7446da1c2f | ||
|
|
c79fca5ceb | ||
|
|
dc5f43927a | ||
|
|
35a5a81e19 | ||
|
|
9dcc11d54c | ||
|
|
74ce21fa54 | ||
|
|
eb93660b36 | ||
|
|
f50e597548 | ||
|
|
817c3b36b9 | ||
|
|
1859a6ae69 | ||
|
|
0645d342dd | ||
|
|
61ec03e540 | ||
|
|
09f0a366bf | ||
|
|
778961d31e | ||
|
|
f962c88df3 | ||
|
|
8db3ffe719 | ||
|
|
cc5d4dd119 | ||
|
|
86204cf23b | ||
|
|
468949b899 | ||
|
|
f1d9966224 | ||
|
|
b022b50966 | ||
|
|
e2f4213839 | ||
|
|
ae1235b223 | ||
|
|
c061f59f1c | ||
|
|
3edaaebba2 | ||
|
|
7cdf1c7f96 | ||
|
|
d558204192 | ||
|
|
d06ce8f911 | ||
|
|
4b6f7e0ebe | ||
|
|
370c567be1 | ||
|
|
9be64f3de5 | ||
|
|
30500e5a95 | ||
|
|
bb323c5710 | ||
|
|
7571df49d5 | ||
|
|
1559c21033 | ||
|
|
d9b81731e9 | ||
|
|
2034cca3a9 | ||
|
|
0b5e59d9cb | ||
|
|
f48b2d1ae5 | ||
|
|
b44bb98c7e | ||
|
|
8cafdf0400 | ||
|
|
3f566c8737 | ||
|
|
c8021a25bf | ||
|
|
934646a0f6 | ||
|
|
9bb97dd658 | ||
|
|
7150f5edc6 | ||
|
|
93da15c0ee | ||
|
|
ab593bda45 | ||
|
|
065bd3ae2a | ||
|
|
8ff7260bc6 | ||
|
|
a635445082 | ||
|
|
949e7efab1 | ||
|
|
615f09226f | ||
|
|
d903c524f5 | ||
|
|
393d9c39c6 | ||
|
|
dfab342bb4 | ||
|
|
12843eccf7 | ||
|
|
dd9160135d | ||
|
|
ad96a92fa7 | ||
|
|
ca8085fe7e | ||
|
|
b076cb00a9 | ||
|
|
ee9eac15dc | ||
|
|
3f2f7b75a6 | ||
|
|
b71645f3b1 | ||
|
|
eb300252b8 | ||
|
|
2e2cd7f2de | ||
|
|
727278aaa3 | ||
|
|
81825ab755 | ||
|
|
7f2a1b6b03 | ||
|
|
1b56d94d30 | ||
|
|
e1e32c971c | ||
|
|
a4a2fabc01 | ||
|
|
b7b7bfa520 | ||
|
|
887604317e | ||
|
|
d35d8b6ed7 | ||
|
|
ec28eff7f7 | ||
|
|
a5d17539c6 | ||
|
|
a49d894cf1 | ||
|
|
b3466d4449 | ||
|
|
237adc6150 | ||
|
|
09b028ee3c | ||
|
|
fb83bfbc31 | ||
|
|
88e406e121 | ||
|
|
59d0bcc63f | ||
|
|
3fb3125bc3 | ||
|
|
d70c6b9474 | ||
|
|
5549516a37 | ||
|
|
14ac91a8a2 | ||
|
|
d5753818a0 | ||
|
|
33010a2e02 | ||
|
|
14454cc670 | ||
|
|
7ab2bca16e | ||
|
|
f0f655f2c3 | ||
|
|
4286d411a7 | ||
|
|
06ad32ed8e | ||
|
|
1ebff23232 | ||
|
|
700de14c76 | ||
|
|
8605e339df | ||
|
|
e50954ce40 | ||
|
|
7caca60308 | ||
|
|
f4e13af056 | ||
|
|
decdb56288 | ||
|
|
bcd4c2e8ef | ||
|
|
d663066ac5 | ||
|
|
1ceebb275c | ||
|
|
f78ba282a6 | ||
|
|
81d88df757 | ||
|
|
0bdb01a9e9 | ||
|
|
cd91fbf59f | ||
|
|
f240e640e5 | ||
|
|
46f489185e | ||
|
|
dbb80fb7e3 | ||
|
|
cb3d357ce1 | ||
|
|
dfa4db9266 | ||
|
|
6906a88dc9 | ||
|
|
1f7be9258c | ||
|
|
dcce024424 |
10
.github/actions/awx_devel_image/action.yml
vendored
10
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,6 +11,12 @@ runs:
|
||||
shell: bash
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Log in to registry
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -18,11 +24,11 @@ runs:
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
run: docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
||||
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
|
||||
- name: Build image for current source checkout
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
make docker-compose-build
|
||||
|
||||
4
.github/actions/run_awx_devel/action.yml
vendored
4
.github/actions/run_awx_devel/action.yml
vendored
@@ -35,7 +35,7 @@ runs:
|
||||
- name: Start AWX
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
||||
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
COMPOSE_UP_OPTS="-d" \
|
||||
make docker-compose
|
||||
@@ -71,7 +71,7 @@ runs:
|
||||
id: data
|
||||
shell: bash
|
||||
run: |
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks._sources_awx.IPAddress}}' tools_awx_1)
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
|
||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
||||
|
||||
3
.github/pr_labeler.yml
vendored
3
.github/pr_labeler.yml
vendored
@@ -15,5 +15,4 @@
|
||||
|
||||
"dependencies":
|
||||
- any: ["awx/ui/package.json"]
|
||||
- any: ["requirements/*.txt"]
|
||||
- any: ["requirements/requirements.in"]
|
||||
- any: ["requirements/*"]
|
||||
|
||||
29
.github/workflows/ci.yml
vendored
29
.github/workflows/ci.yml
vendored
@@ -66,6 +66,8 @@ jobs:
|
||||
awx-operator:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
@@ -94,11 +96,11 @@ jobs:
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
run: |
|
||||
ansible-playbook -v tools/ansible/build.yml \
|
||||
-e headless=yes \
|
||||
-e awx_image=awx \
|
||||
-e awx_image_tag=ci \
|
||||
-e ansible_python_interpreter=$(which python3)
|
||||
VERSION=`make version-for-buildyml` make awx-kube-build
|
||||
env:
|
||||
COMPOSE_TAG: ci
|
||||
DEV_DOCKER_TAG_BASE: local
|
||||
HEADLESS: yes
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
@@ -107,10 +109,19 @@ jobs:
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||
env:
|
||||
AWX_TEST_IMAGE: awx
|
||||
AWX_TEST_IMAGE: local/awx
|
||||
AWX_TEST_VERSION: ci
|
||||
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
|
||||
STORE_DEBUG_OUTPUT: true
|
||||
|
||||
- name: Upload debug output
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: awx-operator-debug-output
|
||||
path: ${{ env.DEBUG_OUTPUT_DIR }}
|
||||
|
||||
collection-sanity:
|
||||
name: awx_collection sanity
|
||||
@@ -127,10 +138,6 @@ jobs:
|
||||
|
||||
- name: Run sanity tests
|
||||
run: make test_collection_sanity
|
||||
env:
|
||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||
# with the fix has not been made yet.
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
|
||||
collection-integration:
|
||||
name: awx_collection integration
|
||||
|
||||
48
.github/workflows/dab-release.yml
vendored
Normal file
48
.github/workflows/dab-release.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
---
|
||||
name: django-ansible-base requirements update
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 6 * * *' # once an day @ 6 AM
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
jobs:
|
||||
dab-pin-newest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: dab-release
|
||||
name: Get current django-ansible-base release version
|
||||
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
||||
with:
|
||||
owner: ansible
|
||||
repo: django-ansible-base
|
||||
excludes: prerelease, draft
|
||||
|
||||
- name: Check out respository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- id: dab-pinned
|
||||
name: Get current django-ansible-base pinned version
|
||||
run:
|
||||
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Update django-ansible-base pinned version to upstream release
|
||||
run:
|
||||
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||
with:
|
||||
base: devel
|
||||
branch: bump-django-ansible-base
|
||||
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
||||
body: |
|
||||
Automated .github/workflows/dab-release.yml
|
||||
|
||||
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
||||
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
||||
commit-message: |
|
||||
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
||||
add-paths:
|
||||
requirements/requirements_git.txt
|
||||
73
.github/workflows/devel_images.yml
vendored
73
.github/workflows/devel_images.yml
vendored
@@ -2,29 +2,52 @@
|
||||
name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
- release_*
|
||||
- feature_*
|
||||
jobs:
|
||||
push:
|
||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
||||
push-development-images:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 120
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
build-targets:
|
||||
- image-name: awx_devel
|
||||
make-target: docker-compose-buildx
|
||||
- image-name: awx_kube_devel
|
||||
make-target: awx-kube-dev-buildx
|
||||
- image-name: awx
|
||||
make-target: awx-kube-buildx
|
||||
steps:
|
||||
|
||||
- name: Skipping build of awx image for non-awx repository
|
||||
run: |
|
||||
echo "Skipping build of awx image for non-awx repository"
|
||||
exit 0
|
||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set lower case owner name
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set GITHUB_ENV variables
|
||||
run: |
|
||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
||||
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
@@ -37,23 +60,29 @@ jobs:
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
|
||||
- name: Setup node and npm for old UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Build images
|
||||
- name: Prebuild old-UI for awx image (to speed up build process)
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Push development images
|
||||
- name: Setup node and npm for the new UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '18'
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
run: |
|
||||
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
|
||||
make ui-next
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Push AWX k8s image, only for upstream and feature branches
|
||||
run: docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
|
||||
if: endsWith(github.repository, '/awx')
|
||||
- name: Build and push AWX devel images
|
||||
run: |
|
||||
make ${{ matrix.build-targets.make-target }}
|
||||
|
||||
75
.github/workflows/e2e_test.yml
vendored
75
.github/workflows/e2e_test.yml
vendored
@@ -1,75 +0,0 @@
|
||||
---
|
||||
name: E2E Tests
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
e2e-test:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 40
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
with:
|
||||
build-ui: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Pull awx_cypress_base image
|
||||
run: |
|
||||
docker pull quay.io/awx/awx_cypress_base:latest
|
||||
|
||||
- name: Checkout test project
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/tower-qa
|
||||
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
||||
path: tower-qa
|
||||
ref: devel
|
||||
|
||||
- name: Build cypress
|
||||
run: |
|
||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||
docker build -t awx-pf-tests .
|
||||
|
||||
- name: Run E2E tests
|
||||
env:
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
run: |
|
||||
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
||||
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
||||
export COMMIT_INFO_SHA=$GITHUB_SHA
|
||||
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||
AWX_IP=${{ steps.awx.outputs.ip }}
|
||||
printenv > .env
|
||||
echo "Executing tests:"
|
||||
docker run \
|
||||
--network '_sources_default' \
|
||||
--ipc=host \
|
||||
--env-file=.env \
|
||||
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
||||
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
||||
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
||||
-e COMMAND="npm run cypress-concurrently-gha" \
|
||||
-v /dev/shm:/dev/shm \
|
||||
-v $PWD:/e2e \
|
||||
-w /e2e \
|
||||
awx-pf-tests run --project .
|
||||
|
||||
- uses: ./.github/actions/upload_awx_devel_logs
|
||||
if: always()
|
||||
with:
|
||||
log-filename: e2e-${{ matrix.job }}.log
|
||||
53
.github/workflows/promote.yml
vendored
53
.github/workflows/promote.yml
vendored
@@ -7,7 +7,11 @@ env:
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag_name:
|
||||
description: 'Name for the tag of the release.'
|
||||
required: true
|
||||
permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
|
||||
@@ -17,6 +21,16 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- name: Set GitHub Env vars for workflow_dispatch event
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
run: |
|
||||
echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Set GitHub Env vars if release event
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
run: |
|
||||
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
|
||||
@@ -43,16 +57,21 @@ jobs:
|
||||
- name: Build collection and publish to galaxy
|
||||
env:
|
||||
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
||||
COLLECTION_VERSION: ${{ github.event.release.tag_name }}
|
||||
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
||||
COLLECTION_TEMPLATE_VERSION: true
|
||||
run: |
|
||||
sudo apt-get install jq
|
||||
make build_collection
|
||||
if [ "$(curl -L --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
||||
echo "Galaxy release already done"; \
|
||||
else \
|
||||
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
||||
if [[ "$count" == "1" ]]; then
|
||||
echo "Galaxy release already done";
|
||||
elif [[ "$count" == "0" ]]; then
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
||||
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
||||
else
|
||||
echo "Unexpected count from galaxy search: $count";
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Set official pypi info
|
||||
@@ -64,6 +83,8 @@ jobs:
|
||||
if: ${{ github.repository_owner != 'ansible' }}
|
||||
|
||||
- name: Build awxkit and upload to pypi
|
||||
env:
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.TAG_NAME }}
|
||||
run: |
|
||||
git reset --hard
|
||||
cd awxkit && python3 setup.py sdist bdist_wheel
|
||||
@@ -83,11 +104,15 @@ jobs:
|
||||
|
||||
- name: Re-tag and promote awx image
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository }}:latest
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||
--tag quay.io/${{ github.repository }}:${{ env.TAG_NAME }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||
--tag quay.io/${{ github.repository }}:latest
|
||||
|
||||
- name: Re-tag and promote awx-ee image
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }} \
|
||||
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }}
|
||||
|
||||
96
.github/workflows/stage.yml
vendored
96
.github/workflows/stage.yml
vendored
@@ -49,13 +49,11 @@ jobs:
|
||||
with:
|
||||
path: awx
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Checkout awx-logos
|
||||
uses: actions/checkout@v3
|
||||
@@ -63,57 +61,85 @@ jobs:
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
- name: Get python version from Makefile
|
||||
working-directory: awx
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python3 -m pip install docker
|
||||
|
||||
- name: Build and stage AWX
|
||||
working-directory: awx
|
||||
run: |
|
||||
ansible-playbook -v tools/ansible/build.yml \
|
||||
-e registry=ghcr.io \
|
||||
-e registry_username=${{ github.actor }} \
|
||||
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
|
||||
-e awx_image=${{ github.repository }} \
|
||||
-e awx_version=${{ github.event.inputs.version }} \
|
||||
-e ansible_python_interpreter=$(which python3) \
|
||||
-e push=yes \
|
||||
-e awx_official=yes
|
||||
|
||||
- name: Log into registry ghcr.io
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log into registry quay.io
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||
- name: Copy logos for inclusion in sdist for official build
|
||||
working-directory: awx
|
||||
run: |
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Setup node and npm
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USER }}
|
||||
password: ${{ secrets.QUAY_TOKEN }}
|
||||
node-version: '16.13.1'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
|
||||
- name: Set build env variables
|
||||
run: |
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
|
||||
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
|
||||
- name: Build and stage AWX
|
||||
working-directory: awx
|
||||
env:
|
||||
DOCKER_BUILDX_PUSH: true
|
||||
HEADLESS: false
|
||||
PLATFORMS: linux/amd64,linux/arm64
|
||||
run: |
|
||||
make awx-kube-buildx
|
||||
|
||||
- name: tag awx-ee:latest with version input
|
||||
run: |
|
||||
docker pull quay.io/ansible/awx-ee:latest
|
||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker buildx imagetools create \
|
||||
quay.io/ansible/awx-ee:latest \
|
||||
--tag ${AWX_EE_TEST_IMAGE}
|
||||
|
||||
- name: Stage awx-operator image
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \
|
||||
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
||||
IMG=ghcr.io/${{ github.repository_owner }}/awx-operator:${{ github.event.inputs.operator_version }} \
|
||||
IMG=${AWX_OPERATOR_TEST_IMAGE} \
|
||||
make docker-buildx
|
||||
|
||||
- name: Pulling images for test deployment with awx-operator
|
||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||
run: |
|
||||
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull -q ${AWX_EE_TEST_IMAGE}
|
||||
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
@@ -122,10 +148,6 @@ jobs:
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
||||
env:
|
||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Create draft release for AWX
|
||||
working-directory: awx
|
||||
|
||||
2
.github/workflows/upload_schema.yml
vendored
2
.github/workflows/upload_schema.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -46,6 +46,11 @@ tools/docker-compose/overrides/
|
||||
tools/docker-compose-minikube/_sources
|
||||
tools/docker-compose/keycloak.awx.realm.json
|
||||
|
||||
!tools/docker-compose/editable_dependencies
|
||||
tools/docker-compose/editable_dependencies/*
|
||||
!tools/docker-compose/editable_dependencies/README.md
|
||||
!tools/docker-compose/editable_dependencies/install.sh
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
**/provision_docker
|
||||
|
||||
113
.vscode/launch.json
vendored
Normal file
113
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "run_ws_heartbeat",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_ws_heartbeat"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-ws-heartbeat",
|
||||
"postDebugTask": "start awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_callback_receiver",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_callback_receiver"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-receiver",
|
||||
"postDebugTask": "start awx-receiver"
|
||||
},
|
||||
{
|
||||
"name": "run_dispatcher",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_dispatcher"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-dispatcher",
|
||||
"postDebugTask": "start awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"name": "run_rsyslog_configurer",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_rsyslog_configurer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-rsyslog-configurer",
|
||||
"postDebugTask": "start awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_wsrelay",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_wsrelay"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-wsrelay",
|
||||
"postDebugTask": "start awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"name": "daphne",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
||||
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-daphne",
|
||||
"postDebugTask": "start awx-daphne"
|
||||
},
|
||||
{
|
||||
"name": "runserver(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "runserver_plus(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver_plus", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "shell_plus",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["shell_plus"],
|
||||
"django": true,
|
||||
},
|
||||
]
|
||||
}
|
||||
100
.vscode/tasks.json
vendored
Normal file
100
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "start awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "start awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "start awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "start awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "start awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi and install Werkzeug",
|
||||
"type": "shell",
|
||||
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "start awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "start awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -11,6 +11,8 @@ ignore: |
|
||||
# django template files
|
||||
awx/api/templates/instance_install_bundle/**
|
||||
.readthedocs.yaml
|
||||
tools/loki
|
||||
tools/otel
|
||||
|
||||
extends: default
|
||||
|
||||
|
||||
@@ -80,7 +80,7 @@ If any of those items are missing your pull request will still get the `needs_tr
|
||||
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
||||
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
||||
|
||||
The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
|
||||
The `state:needs_triage` label will remain on your pull request until a person has looked at it.
|
||||
|
||||
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
||||
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
||||
|
||||
145
Makefile
145
Makefile
@@ -1,8 +1,8 @@
|
||||
-include awx/ui_next/Makefile
|
||||
|
||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
||||
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||
SHELL := bash
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
DOCKER_COMPOSE ?= docker compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
@@ -47,6 +47,14 @@ VAULT ?= false
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||
OTEL ?= false
|
||||
# If set to true docker-compose will also start a Loki instance
|
||||
LOKI ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
# If set to true, use tls for postgres connection
|
||||
PG_TLS ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -55,6 +63,11 @@ DEV_DOCKER_OWNER ?= ansible
|
||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
||||
|
||||
# Common command to use for running ansible-playbook
|
||||
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||
|
||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
|
||||
@@ -63,7 +76,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -75,6 +88,21 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||
|
||||
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
|
||||
# DOCKER_CACHE=--no-cache make docker-compose-build
|
||||
ifeq ($(DOCKER_CACHE),)
|
||||
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
|
||||
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
|
||||
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
|
||||
else
|
||||
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
endif
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
@@ -213,8 +241,6 @@ collectstatic:
|
||||
fi; \
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -222,7 +248,7 @@ uwsgi: collectstatic
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -302,7 +328,7 @@ swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
|
||||
check: black
|
||||
|
||||
@@ -359,7 +385,7 @@ symlink_collection:
|
||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||
|
||||
awx_collection_build: $(shell find awx_collection -type f)
|
||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||
-e collection_version=$(COLLECTION_VERSION) \
|
||||
@@ -513,10 +539,10 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
@@ -532,16 +558,26 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
-e enable_otel=$(OTEL) \
|
||||
-e enable_loki=$(LOKI) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
-e pg_tls=$(PG_TLS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP);
|
||||
-e enable_ldap=$(LDAP); \
|
||||
$(MAKE) docker-compose-up
|
||||
|
||||
docker-compose-up:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-down:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
@@ -573,7 +609,7 @@ docker-compose-container-group-clean:
|
||||
.PHONY: Dockerfile.dev
|
||||
## Generate Dockerfile.dev for awx_devel image
|
||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.dev \
|
||||
-e build_dev=True \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||
@@ -584,37 +620,28 @@ docker-compose-build: Dockerfile.dev
|
||||
-f Dockerfile.dev \
|
||||
-t $(DEVEL_IMAGE_NAME) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
$(DOCKER_DEVEL_CACHE_FLAG) .
|
||||
|
||||
# ## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
# docker-compose-buildx: Dockerfile.dev
|
||||
# DOCKER_BUILDKIT=1 docker build \
|
||||
# -f Dockerfile.dev \
|
||||
# -t $(DEVEL_IMAGE_NAME) \
|
||||
# --build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
# --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
# PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||
# architectures. (i.e. make docker-buildx IMG=myregistry/mypoperator:0.0.1). To use this option you need to:
|
||||
# - able to use docker buildx . More info: https://docs.docker.com/build/buildx/
|
||||
# - have enable BuildKit, More info: https://docs.docker.com/develop/develop-images/build_enhancements/
|
||||
# - be able to push the image for your registry (i.e. if you do not inform a valid value via IMG=<myregistry/image:<tag>> than the export will fail)
|
||||
# To properly provided solutions that supports more than one platform you should use this option.
|
||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||
.PHONY: docker-compose-buildx
|
||||
docker-compose-buildx: Dockerfile.dev ## Build and push docker image for the manager for cross-platform support
|
||||
- docker buildx create --name project-v3-builder
|
||||
docker buildx use project-v3-builder
|
||||
- docker buildx build --push $(BUILD_ARGS) --platform=$(PLATFORMS) --tag $(DEVEL_IMAGE_NAME) -f Dockerfile.dev .
|
||||
- docker buildx rm project-v3-builder
|
||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
docker-compose-buildx: Dockerfile.dev
|
||||
- docker buildx create --name docker-compose-buildx
|
||||
docker buildx use docker-compose-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
$(DOCKER_DEVEL_CACHE_FLAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEVEL_IMAGE_NAME) \
|
||||
-f Dockerfile.dev .
|
||||
- docker buildx rm docker-compose-buildx
|
||||
|
||||
docker-clean:
|
||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
@@ -636,9 +663,6 @@ clean-elk:
|
||||
docker rm tools_elasticsearch_1
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
@@ -659,7 +683,7 @@ version-for-buildyml:
|
||||
.PHONY: Dockerfile
|
||||
## Generate Dockerfile for awx image
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
-e headless=$(HEADLESS)
|
||||
|
||||
@@ -669,12 +693,29 @@ awx-kube-build: Dockerfile
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||
-t $(IMAGE_KUBE) .
|
||||
|
||||
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||
awx-kube-buildx: Dockerfile
|
||||
- docker buildx create --name awx-kube-buildx
|
||||
docker buildx use awx-kube-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
--platform=$(PLATFORMS) \
|
||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||
--tag $(IMAGE_KUBE) \
|
||||
-f Dockerfile .
|
||||
- docker buildx rm awx-kube-buildx
|
||||
|
||||
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.kube-dev \
|
||||
-e kube_dev=True \
|
||||
-e template_dest=_build_kube_dev \
|
||||
@@ -684,12 +725,24 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
awx-kube-dev-build: Dockerfile.kube-dev
|
||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
-t $(IMAGE_KUBE_DEV) .
|
||||
|
||||
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
- docker buildx create --name awx-kube-dev-buildx
|
||||
docker buildx use awx-kube-dev-buildx
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(IMAGE_KUBE_DEV) \
|
||||
-f Dockerfile.kube-dev .
|
||||
- docker buildx rm awx-kube-dev-buildx
|
||||
|
||||
kind-dev-load: awx-kube-dev-build
|
||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
|
||||
|
||||
# Translation TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@@ -154,10 +154,12 @@ def manage():
|
||||
from django.conf import settings
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
||||
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
||||
# The return of connection.pg_version is something like 12013
|
||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||
if (connection.pg_version // 10000) < 12:
|
||||
sys.stderr.write("Postgres version 12 is required\n")
|
||||
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
|
||||
@@ -93,6 +93,7 @@ register(
|
||||
default='',
|
||||
label=_('Login redirect override URL'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
||||
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -30,14 +30,21 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
from ansible_base.lib.utils.requests import get_remote_host
|
||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
from awx.main.models.rbac import give_creator_permissions
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
@@ -89,20 +96,26 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||
ret.set_cookie(
|
||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||
)
|
||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
|
||||
class LoggedLogoutView(auth_views.LogoutView):
|
||||
|
||||
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
@@ -142,22 +155,23 @@ class APIView(views.APIView):
|
||||
Store the Django REST Framework Request object as an attribute on the
|
||||
normal Django request, store time the request started.
|
||||
"""
|
||||
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
self.time_started = time.time()
|
||||
if getattr(settings, 'SQL_DEBUG', False):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||
else:
|
||||
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the allowed proxy list
|
||||
if all(
|
||||
[
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
]
|
||||
):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
request.environ.pop(custom_header, None)
|
||||
if settings.PROXY_IP_ALLOWED_LIST:
|
||||
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
@@ -202,17 +216,21 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
msg_data = {
|
||||
'status_code': response.status_code,
|
||||
'user_name': request.user,
|
||||
'url_path': request.path,
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||
'remote_addr': ip,
|
||||
}
|
||||
|
||||
if type(response.data) is dict:
|
||||
msg_data['error'] = response.data.get('error', response.status_text)
|
||||
elif type(response.data) is list:
|
||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||
if len(response.data) > 0 and isinstance(response.data[0], str):
|
||||
msg_data['error'] = str(response.data[0])
|
||||
else:
|
||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||
else:
|
||||
msg_data['error'] = response.status_text
|
||||
|
||||
@@ -472,7 +490,11 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
|
||||
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||
# Base class for a list view that allows creating new objects.
|
||||
pass
|
||||
def perform_create(self, serializer):
|
||||
super().perform_create(serializer)
|
||||
if serializer.Meta.model in permission_registry.all_registered_models:
|
||||
if self.request and self.request.user:
|
||||
give_creator_permissions(self.request.user, serializer.instance)
|
||||
|
||||
|
||||
class ParentMixin(object):
|
||||
@@ -792,6 +814,7 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
||||
|
||||
|
||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
deprecated = True
|
||||
serializer_class = ResourceAccessListElementSerializer
|
||||
ordering = ('username',)
|
||||
|
||||
@@ -799,6 +822,15 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
obj = self.get_parent_object()
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
||||
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
||||
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
|
||||
if auditor_role:
|
||||
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
||||
return qs.distinct()
|
||||
|
||||
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
||||
|
||||
ancestors = set()
|
||||
@@ -958,7 +990,7 @@ class CopyAPIView(GenericAPIView):
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
||||
)
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||
new_obj.admin_role.members.add(request.user)
|
||||
give_creator_permissions(request.user, new_obj)
|
||||
if sub_objs:
|
||||
permission_check_func = None
|
||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||
|
||||
@@ -36,11 +36,13 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
field_info['hidden'] = getattr(field, 'hidden', False)
|
||||
|
||||
text_attrs = [
|
||||
'read_only',
|
||||
'label',
|
||||
'help_text',
|
||||
'warning_text',
|
||||
'min_length',
|
||||
'max_length',
|
||||
'min_value',
|
||||
|
||||
@@ -43,11 +43,14 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
||||
# Django-Polymorphic
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.models import get_type_for_model
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
# AWX
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE
|
||||
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE, org_role_to_permission
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
AdHocCommand,
|
||||
@@ -102,7 +105,7 @@ from awx.main.models import (
|
||||
CLOUD_INVENTORY_SOURCES,
|
||||
)
|
||||
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.models.rbac import role_summary_fields_generator, RoleAncestorEntry
|
||||
from awx.main.models.rbac import role_summary_fields_generator, give_creator_permissions, get_role_codenames, to_permissions, get_role_from_object_role
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.utils import (
|
||||
get_model_for_type,
|
||||
@@ -191,6 +194,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
||||
'credential_type': DEFAULT_SUMMARY_FIELDS,
|
||||
'resource': ('ansible_id', 'resource_type'),
|
||||
}
|
||||
|
||||
|
||||
@@ -2762,13 +2766,26 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
team_content_type = ContentType.objects.get_for_model(Team)
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
def get_roles_on_resource(parent_role):
|
||||
"Returns a string list of the roles a parent_role has for current obj."
|
||||
return list(
|
||||
RoleAncestorEntry.objects.filter(ancestor=parent_role, content_type_id=content_type.id, object_id=obj.id)
|
||||
.values_list('role_field', flat=True)
|
||||
.distinct()
|
||||
)
|
||||
reversed_org_map = {}
|
||||
for k, v in org_role_to_permission.items():
|
||||
reversed_org_map[v] = k
|
||||
reversed_role_map = {}
|
||||
for k, v in to_permissions.items():
|
||||
reversed_role_map[v] = k
|
||||
|
||||
def get_roles_from_perms(perm_list):
|
||||
"""given a list of permission codenames return a list of role names"""
|
||||
role_names = set()
|
||||
for codename in perm_list:
|
||||
action = codename.split('_', 1)[0]
|
||||
if action in reversed_role_map:
|
||||
role_names.add(reversed_role_map[action])
|
||||
elif codename in reversed_org_map:
|
||||
if isinstance(obj, Organization):
|
||||
role_names.add(reversed_org_map[codename])
|
||||
if 'view_organization' not in role_names:
|
||||
role_names.add('read_role')
|
||||
return list(role_names)
|
||||
|
||||
def format_role_perm(role):
|
||||
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
||||
@@ -2785,13 +2802,21 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
else:
|
||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||
role_dict['user_capabilities'] = {'unattach': False}
|
||||
return {'role': role_dict, 'descendant_roles': get_roles_on_resource(role)}
|
||||
|
||||
model_name = content_type.model
|
||||
if isinstance(obj, Organization):
|
||||
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name) or codename.startswith('add_')]
|
||||
else:
|
||||
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name)]
|
||||
|
||||
return {'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)}
|
||||
|
||||
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
||||
ret = []
|
||||
team = naive_team_role.content_object
|
||||
team_role = naive_team_role
|
||||
if naive_team_role.role_field == 'admin_role':
|
||||
team_role = naive_team_role.content_object.member_role
|
||||
team_role = team.member_role
|
||||
for role in team_role.children.filter(id__in=permissive_role_ids).all():
|
||||
role_dict = {
|
||||
'id': role.id,
|
||||
@@ -2811,10 +2836,87 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
else:
|
||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||
role_dict['user_capabilities'] = {'unattach': False}
|
||||
ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(team_role)})
|
||||
|
||||
descendant_perms = list(
|
||||
RoleEvaluation.objects.filter(role__in=team.has_roles.all(), object_id=obj.id, content_type_id=content_type.id)
|
||||
.values_list('codename', flat=True)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
ret.append({'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)})
|
||||
return ret
|
||||
|
||||
gfk_kwargs = dict(content_type_id=content_type.id, object_id=obj.id)
|
||||
direct_permissive_role_ids = Role.objects.filter(**gfk_kwargs).values_list('id', flat=True)
|
||||
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
ret['summary_fields']['direct_access'] = []
|
||||
ret['summary_fields']['indirect_access'] = []
|
||||
|
||||
new_roles_seen = set()
|
||||
all_team_roles = set()
|
||||
all_permissive_role_ids = set()
|
||||
for evaluation in RoleEvaluation.objects.filter(role__in=user.has_roles.all(), **gfk_kwargs).prefetch_related('role'):
|
||||
new_role = evaluation.role
|
||||
if new_role.id in new_roles_seen:
|
||||
continue
|
||||
new_roles_seen.add(new_role.id)
|
||||
old_role = get_role_from_object_role(new_role)
|
||||
all_permissive_role_ids.add(old_role.id)
|
||||
|
||||
if int(new_role.object_id) == obj.id and new_role.content_type_id == content_type.id:
|
||||
ret['summary_fields']['direct_access'].append(format_role_perm(old_role))
|
||||
elif new_role.content_type_id == team_content_type.id:
|
||||
all_team_roles.add(old_role)
|
||||
else:
|
||||
ret['summary_fields']['indirect_access'].append(format_role_perm(old_role))
|
||||
|
||||
# Lazy role creation gives us a big problem, where some intermediate roles are not easy to find
|
||||
# like when a team has indirect permission, so here we get all roles the users teams have
|
||||
# these contribute to all potential permission-granting roles of the object
|
||||
user_teams_qs = permission_registry.team_model.objects.filter(member_roles__in=ObjectRole.objects.filter(users=user))
|
||||
team_obj_roles = ObjectRole.objects.filter(teams__in=user_teams_qs)
|
||||
for evaluation in RoleEvaluation.objects.filter(role__in=team_obj_roles, **gfk_kwargs).prefetch_related('role'):
|
||||
new_role = evaluation.role
|
||||
if new_role.id in new_roles_seen:
|
||||
continue
|
||||
new_roles_seen.add(new_role.id)
|
||||
old_role = get_role_from_object_role(new_role)
|
||||
all_permissive_role_ids.add(old_role.id)
|
||||
|
||||
# In DAB RBAC, superuser is strictly a user flag, and global roles are not in the RoleEvaluation table
|
||||
if user.is_superuser:
|
||||
ret['summary_fields'].setdefault('indirect_access', [])
|
||||
all_role_names = [field.name for field in obj._meta.get_fields() if isinstance(field, ImplicitRoleField)]
|
||||
ret['summary_fields']['indirect_access'].append(
|
||||
{
|
||||
"role": {
|
||||
"id": None,
|
||||
"name": _("System Administrator"),
|
||||
"description": _("Can manage all aspects of the system"),
|
||||
"user_capabilities": {"unattach": False},
|
||||
},
|
||||
"descendant_roles": all_role_names,
|
||||
}
|
||||
)
|
||||
elif user.is_system_auditor:
|
||||
ret['summary_fields'].setdefault('indirect_access', [])
|
||||
ret['summary_fields']['indirect_access'].append(
|
||||
{
|
||||
"role": {
|
||||
"id": None,
|
||||
"name": _("System Auditor"),
|
||||
"description": _("Can view all aspects of the system"),
|
||||
"user_capabilities": {"unattach": False},
|
||||
},
|
||||
"descendant_roles": ["read_role"],
|
||||
}
|
||||
)
|
||||
|
||||
ret['summary_fields']['direct_access'].extend([y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in all_team_roles) for y in x])
|
||||
|
||||
return ret
|
||||
|
||||
direct_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('id', flat=True)
|
||||
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
||||
|
||||
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
|
||||
@@ -3083,7 +3185,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
credential = super(CredentialSerializerCreate, self).create(validated_data)
|
||||
|
||||
if user:
|
||||
credential.admin_role.members.add(user)
|
||||
give_creator_permissions(user, credential)
|
||||
if team:
|
||||
if not credential.organization or team.organization.id != credential.organization.id:
|
||||
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
||||
@@ -5279,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
|
||||
)
|
||||
|
||||
def get_body(self, obj):
|
||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
||||
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
|
||||
if isinstance(obj.body, dict):
|
||||
if 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
@@ -5301,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
|
||||
def to_representation(self, obj):
|
||||
ret = super(NotificationSerializer, self).to_representation(obj)
|
||||
|
||||
if obj.notification_type == 'webhook':
|
||||
if obj.notification_type in ('webhook', 'awssns'):
|
||||
ret.pop('subject')
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
|
||||
ret.pop('body')
|
||||
return ret
|
||||
|
||||
@@ -5594,7 +5696,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP]:
|
||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP] and not obj.managed:
|
||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||
if obj.node_type == 'execution':
|
||||
|
||||
@@ -2,28 +2,21 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import NoReverseMatch
|
||||
|
||||
from rest_framework.reverse import _reverse
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
||||
|
||||
|
||||
def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
||||
"""
|
||||
Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
|
||||
query string parameters.
|
||||
"""
|
||||
scheme = getattr(request, 'versioning_scheme', None)
|
||||
if scheme is not None:
|
||||
try:
|
||||
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
|
||||
except NoReverseMatch:
|
||||
# In case the versioning scheme reversal fails, fallback to the
|
||||
# default implementation
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
else:
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
def is_optional_api_urlpattern_prefix_request(request):
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
||||
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def transform_optional_api_urlpattern_prefix_url(request, url):
|
||||
if is_optional_api_urlpattern_prefix_request(request):
|
||||
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||
return url
|
||||
|
||||
|
||||
|
||||
@@ -60,6 +60,11 @@ from oauth2_provider.models import get_access_token_model
|
||||
import pytz
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
from awx.main.access import get_user_queryset
|
||||
@@ -87,6 +92,7 @@ from awx.api.generics import (
|
||||
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main import models
|
||||
from awx.main.models.rbac import get_role_definition
|
||||
from awx.main.utils import (
|
||||
camelcase_to_underscore,
|
||||
extract_ansible_vars,
|
||||
@@ -124,6 +130,7 @@ from awx.api.views.mixin import (
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
|
||||
@@ -272,16 +279,24 @@ class DashboardJobsGraphView(APIView):
|
||||
|
||||
success_query = user_unified_jobs.filter(status='successful')
|
||||
failed_query = user_unified_jobs.filter(status='failed')
|
||||
canceled_query = user_unified_jobs.filter(status='canceled')
|
||||
error_query = user_unified_jobs.filter(status='error')
|
||||
|
||||
if job_type == 'inv_sync':
|
||||
success_query = success_query.filter(instance_of=models.InventoryUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
|
||||
canceled_query = canceled_query.filter(instance_of=models.InventoryUpdate)
|
||||
error_query = error_query.filter(instance_of=models.InventoryUpdate)
|
||||
elif job_type == 'playbook_run':
|
||||
success_query = success_query.filter(instance_of=models.Job)
|
||||
failed_query = failed_query.filter(instance_of=models.Job)
|
||||
canceled_query = canceled_query.filter(instance_of=models.Job)
|
||||
error_query = error_query.filter(instance_of=models.Job)
|
||||
elif job_type == 'scm_update':
|
||||
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
||||
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
||||
canceled_query = canceled_query.filter(instance_of=models.ProjectUpdate)
|
||||
error_query = error_query.filter(instance_of=models.ProjectUpdate)
|
||||
|
||||
end = now()
|
||||
interval = 'day'
|
||||
@@ -297,10 +312,12 @@ class DashboardJobsGraphView(APIView):
|
||||
else:
|
||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": [], "canceled": [], "error": []}}
|
||||
|
||||
succ_list = dashboard_data['jobs']['successful']
|
||||
fail_list = dashboard_data['jobs']['failed']
|
||||
canceled_list = dashboard_data['jobs']['canceled']
|
||||
error_list = dashboard_data['jobs']['error']
|
||||
|
||||
qs_s = (
|
||||
success_query.filter(finished__range=(start, end))
|
||||
@@ -318,6 +335,22 @@ class DashboardJobsGraphView(APIView):
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_f = {item['d']: item['agg'] for item in qs_f}
|
||||
qs_c = (
|
||||
canceled_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_c = {item['d']: item['agg'] for item in qs_c}
|
||||
qs_e = (
|
||||
error_query.filter(finished__range=(start, end))
|
||||
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||
.order_by()
|
||||
.values('d')
|
||||
.annotate(agg=Count('id', distinct=True))
|
||||
)
|
||||
data_e = {item['d']: item['agg'] for item in qs_e}
|
||||
|
||||
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
for d in itertools.count():
|
||||
@@ -326,6 +359,8 @@ class DashboardJobsGraphView(APIView):
|
||||
break
|
||||
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
||||
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
||||
canceled_list.append([time.mktime(date.timetuple()), data_c.get(date, 0)])
|
||||
error_list.append([time.mktime(date.timetuple()), data_e.get(date, 0)])
|
||||
|
||||
return Response(dashboard_data)
|
||||
|
||||
@@ -508,6 +543,7 @@ class InstanceGroupAccessList(ResourceAccessList):
|
||||
|
||||
|
||||
class InstanceGroupObjectRolesList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.InstanceGroup
|
||||
@@ -677,16 +713,81 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
def immutablesharedfields(cls):
|
||||
'''
|
||||
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||
|
||||
Works by overriding these view methods:
|
||||
- create
|
||||
- delete
|
||||
- perform_update
|
||||
create and delete are overridden to raise a PermissionDenied exception.
|
||||
perform_update is overridden to check if any shared fields are being modified,
|
||||
and raise a PermissionDenied exception if so.
|
||||
'''
|
||||
# create instead of perform_create because some of our views
|
||||
# override create instead of perform_create
|
||||
if hasattr(cls, 'create'):
|
||||
cls.original_create = cls.create
|
||||
|
||||
@functools.wraps(cls.create)
|
||||
def create_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_create(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||
|
||||
cls.create = create_wrapper
|
||||
|
||||
if hasattr(cls, 'delete'):
|
||||
cls.original_delete = cls.delete
|
||||
|
||||
@functools.wraps(cls.delete)
|
||||
def delete_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_delete(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||
|
||||
cls.delete = delete_wrapper
|
||||
|
||||
if hasattr(cls, 'perform_update'):
|
||||
cls.original_perform_update = cls.perform_update
|
||||
|
||||
@functools.wraps(cls.perform_update)
|
||||
def update_wrapper(*args, **kwargs):
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
view, serializer = args
|
||||
instance = view.get_object()
|
||||
if instance:
|
||||
if isinstance(instance, models.Organization):
|
||||
shared_fields = OrganizationType._declared_fields.keys()
|
||||
elif isinstance(instance, models.User):
|
||||
shared_fields = UserType._declared_fields.keys()
|
||||
elif isinstance(instance, models.Team):
|
||||
shared_fields = TeamType._declared_fields.keys()
|
||||
attrs = serializer.validated_data
|
||||
for field in shared_fields:
|
||||
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||
return cls.original_perform_update(*args, **kwargs)
|
||||
|
||||
cls.perform_update = update_wrapper
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamList(ListCreateAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamUsersList(BaseUsersList):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -696,6 +797,7 @@ class TeamUsersList(BaseUsersList):
|
||||
|
||||
|
||||
class TeamRolesList(SubListAttachDetachAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||
metadata_class = RoleMetadata
|
||||
@@ -735,10 +837,12 @@ class TeamRolesList(SubListAttachDetachAPIView):
|
||||
|
||||
|
||||
class TeamObjectRolesList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.Team
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@@ -756,8 +860,15 @@ class TeamProjectsList(SubListAPIView):
|
||||
self.check_parent_access(team)
|
||||
model_ct = ContentType.objects.get_for_model(self.model)
|
||||
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
||||
proj_roles = models.Role.objects.filter(Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk), content_type=model_ct)
|
||||
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
|
||||
|
||||
rd = get_role_definition(team.member_role)
|
||||
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
|
||||
if role is None:
|
||||
# Team has no permissions, therefore team has no projects
|
||||
return self.model.objects.none()
|
||||
else:
|
||||
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
|
||||
|
||||
|
||||
class TeamActivityStreamList(SubListAPIView):
|
||||
@@ -772,10 +883,23 @@ class TeamActivityStreamList(SubListAPIView):
|
||||
self.check_parent_access(parent)
|
||||
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
|
||||
return qs.filter(
|
||||
Q(team=parent)
|
||||
| Q(project__in=models.Project.accessible_objects(parent.member_role, 'read_role'))
|
||||
| Q(credential__in=models.Credential.accessible_objects(parent.member_role, 'read_role'))
|
||||
| Q(
|
||||
project__in=RoleEvaluation.objects.filter(
|
||||
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Project).id, codename='view_project'
|
||||
)
|
||||
.values_list('object_id')
|
||||
.distinct()
|
||||
)
|
||||
| Q(
|
||||
credential__in=RoleEvaluation.objects.filter(
|
||||
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Credential).id, codename='view_credential'
|
||||
)
|
||||
.values_list('object_id')
|
||||
.distinct()
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -1027,10 +1151,12 @@ class ProjectAccessList(ResourceAccessList):
|
||||
|
||||
|
||||
class ProjectObjectRolesList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.Project
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@@ -1043,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
||||
copy_return_serializer_class = serializers.ProjectSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1188,6 +1315,7 @@ class UserTeamsList(SubListAPIView):
|
||||
|
||||
|
||||
class UserRolesList(SubListAttachDetachAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||
metadata_class = RoleMetadata
|
||||
@@ -1212,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(models.Role, pk=sub_id)
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
@@ -1284,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1462,10 +1600,12 @@ class CredentialAccessList(ResourceAccessList):
|
||||
|
||||
|
||||
class CredentialObjectRolesList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.Credential
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@@ -2252,12 +2392,13 @@ class JobTemplateList(ListCreateAPIView):
|
||||
serializer_class = serializers.JobTemplateSerializer
|
||||
always_allow_superuser = False
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(JobTemplateList, self).post(request, *args, **kwargs)
|
||||
if ret.status_code == 201:
|
||||
job_template = models.JobTemplate.objects.get(id=ret.data['id'])
|
||||
job_template.admin_role.members.add(request.user)
|
||||
return ret
|
||||
def check_permissions(self, request):
|
||||
if request.method == 'POST':
|
||||
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||
if not can_access:
|
||||
self.permission_denied(request, message=messages)
|
||||
|
||||
super(JobTemplateList, self).check_permissions(request)
|
||||
|
||||
|
||||
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
@@ -2638,12 +2779,7 @@ class JobTemplateCallback(GenericAPIView):
|
||||
host for the current request.
|
||||
"""
|
||||
# Find the list of remote host names/IPs to check.
|
||||
remote_hosts = set()
|
||||
for header in settings.REMOTE_HOST_HEADERS:
|
||||
for value in self.request.META.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
remote_hosts = set(get_remote_hosts(self.request))
|
||||
# Add the reverse lookup of IP addresses.
|
||||
for rh in list(remote_hosts):
|
||||
try:
|
||||
@@ -2804,10 +2940,12 @@ class JobTemplateAccessList(ResourceAccessList):
|
||||
|
||||
|
||||
class JobTemplateObjectRolesList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.JobTemplate
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@@ -2981,6 +3119,14 @@ class WorkflowJobTemplateList(ListCreateAPIView):
|
||||
serializer_class = serializers.WorkflowJobTemplateSerializer
|
||||
always_allow_superuser = False
|
||||
|
||||
def check_permissions(self, request):
|
||||
if request.method == 'POST':
|
||||
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||
if not can_access:
|
||||
self.permission_denied(request, message=messages)
|
||||
|
||||
super(WorkflowJobTemplateList, self).check_permissions(request)
|
||||
|
||||
|
||||
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = models.WorkflowJobTemplate
|
||||
@@ -3190,10 +3336,12 @@ class WorkflowJobTemplateAccessList(ResourceAccessList):
|
||||
|
||||
|
||||
class WorkflowJobTemplateObjectRolesList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.WorkflowJobTemplate
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@@ -4202,6 +4350,7 @@ class ActivityStreamDetail(RetrieveAPIView):
|
||||
|
||||
|
||||
class RoleList(ListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -4209,11 +4358,13 @@ class RoleList(ListAPIView):
|
||||
|
||||
|
||||
class RoleDetail(RetrieveAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
|
||||
|
||||
class RoleUsersList(SubListAttachDetachAPIView):
|
||||
deprecated = True
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
parent_model = models.Role
|
||||
@@ -4234,7 +4385,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
@@ -4248,6 +4407,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
|
||||
|
||||
class RoleTeamsList(SubListAttachDetachAPIView):
|
||||
deprecated = True
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
parent_model = models.Role
|
||||
@@ -4292,10 +4452,12 @@ class RoleTeamsList(SubListAttachDetachAPIView):
|
||||
team.member_role.children.remove(role)
|
||||
else:
|
||||
team.member_role.children.add(role)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class RoleParentsList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.Role
|
||||
@@ -4309,6 +4471,7 @@ class RoleParentsList(SubListAPIView):
|
||||
|
||||
|
||||
class RoleChildrenList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.Role
|
||||
|
||||
@@ -48,23 +48,23 @@ class AnalyticsRootView(APIView):
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorized'] = reverse('api:analytics_authorized')
|
||||
data['reports'] = reverse('api:analytics_reports_list')
|
||||
data['report_options'] = reverse('api:analytics_report_options_list')
|
||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate')
|
||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options')
|
||||
data['event_explorer'] = reverse('api:analytics_event_explorer')
|
||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options')
|
||||
data['host_explorer'] = reverse('api:analytics_host_explorer')
|
||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options')
|
||||
data['job_explorer'] = reverse('api:analytics_job_explorer')
|
||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options')
|
||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer')
|
||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options')
|
||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer')
|
||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options')
|
||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer')
|
||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options')
|
||||
data['authorized'] = reverse('api:analytics_authorized', request=request)
|
||||
data['reports'] = reverse('api:analytics_reports_list', request=request)
|
||||
data['report_options'] = reverse('api:analytics_report_options_list', request=request)
|
||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request)
|
||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request)
|
||||
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request)
|
||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request)
|
||||
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request)
|
||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request)
|
||||
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request)
|
||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request)
|
||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request)
|
||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request)
|
||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request)
|
||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request)
|
||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request)
|
||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -152,6 +152,7 @@ class InventoryObjectRolesList(SubListAPIView):
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Inventory
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
|
||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
@@ -226,6 +232,7 @@ class OrganizationObjectRolesList(SubListAPIView):
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Organization
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
|
||||
@@ -13,6 +13,7 @@ from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.urls import reverse as django_reverse
|
||||
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -27,7 +28,7 @@ from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
@@ -39,19 +40,19 @@ logger = logging.getLogger('awx.api.views.root')
|
||||
class ApiRootView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
name = _('REST API')
|
||||
versioning_class = None
|
||||
versioning_class = URLPathVersioning
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@method_decorator(ensure_csrf_cookie)
|
||||
def get(self, request, format=None):
|
||||
'''List supported API versions'''
|
||||
|
||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v2=v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
if not is_optional_api_urlpattern_prefix_request(request):
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
@@ -130,6 +131,10 @@ class ApiVersionRootView(APIView):
|
||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||
data['bulk'] = reverse('api:bulk', request=request)
|
||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
||||
data['service_index'] = django_reverse('service-index-root')
|
||||
data['role_definitions'] = django_reverse('roledefinition-list')
|
||||
data['role_user_assignments'] = django_reverse('roleuserassignment-list')
|
||||
data['role_team_assignments'] = django_reverse('roleteamassignment-list')
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ register(
|
||||
# Optional; category_slug will be slugified version of category if not
|
||||
# explicitly provided.
|
||||
category_slug='cows',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -61,6 +61,10 @@ class StringListBooleanField(ListField):
|
||||
|
||||
def to_representation(self, value):
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
|
||||
# DRF changed truthy and falsy lists to be capitalized
|
||||
value = value.lower()
|
||||
if isinstance(value, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_representation(value)
|
||||
elif value in BooleanField.TRUE_VALUES:
|
||||
@@ -78,6 +82,8 @@ class StringListBooleanField(ListField):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
if isinstance(data, str):
|
||||
data = data.lower()
|
||||
if isinstance(data, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_internal_value(data)
|
||||
elif data in BooleanField.TRUE_VALUES:
|
||||
|
||||
@@ -127,6 +127,8 @@ class SettingsRegistry(object):
|
||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||
unit = field_kwargs.pop('unit', None)
|
||||
hidden = field_kwargs.pop('hidden', False)
|
||||
warning_text = field_kwargs.pop('warning_text', None)
|
||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||
field_kwargs['child'].source = None
|
||||
field_instance = field_class(**field_kwargs)
|
||||
@@ -134,12 +136,14 @@ class SettingsRegistry(object):
|
||||
field_instance.category = category
|
||||
field_instance.depends_on = depends_on
|
||||
field_instance.unit = unit
|
||||
field_instance.hidden = hidden
|
||||
if placeholder is not empty:
|
||||
field_instance.placeholder = placeholder
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
if field_instance.defined_in_file:
|
||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||
field_instance.encrypted = encrypted
|
||||
field_instance.warning_text = warning_text
|
||||
original_field_instance = field_instance
|
||||
if field_class != original_field_class:
|
||||
original_field_instance = original_field_class(**field_kwargs)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import contextlib
|
||||
import logging
|
||||
import psycopg
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
@@ -13,7 +14,7 @@ from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import Error as DBError, ProgrammingError
|
||||
from django.db.utils import DatabaseError, ProgrammingError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -80,18 +81,26 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except DBError as exc:
|
||||
except ProgrammingError as e:
|
||||
# Exception raised for programming errors
|
||||
# Examples may be table not found or already exists,
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
# syntax error in the SQL statement, wrong number of parameters specified, etc.
|
||||
if trans_safe:
|
||||
level = logger.warning
|
||||
if isinstance(exc, ProgrammingError):
|
||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
level = logger.debug
|
||||
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
||||
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}')
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
except DatabaseError as e:
|
||||
if trans_safe:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
finally:
|
||||
|
||||
@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
|
||||
|
||||
def test_empty_setting(settings, mocker):
|
||||
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
|
||||
|
||||
def test_setting_from_db(settings, mocker):
|
||||
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
|
||||
|
||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
assert existing_setting.value == 'NEW-VALUE'
|
||||
existing_setting.save.assert_called_with(update_fields=['value'])
|
||||
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
||||
del settings.AWX_SOME_SETTING
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
|
||||
del settings.AWX_SOME_SETTING
|
||||
|
||||
assert existing_setting.delete.call_count == 1
|
||||
|
||||
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
# use its primary key as part of the encryption key
|
||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
|
||||
|
||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
|
||||
@@ -20,7 +20,10 @@ from rest_framework.exceptions import ParseError, PermissionDenied
|
||||
# Django OAuth Toolkit
|
||||
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.validation import to_python_boolean
|
||||
from ansible_base.rbac.models import RoleEvaluation
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import (
|
||||
@@ -72,8 +75,6 @@ from awx.main.models import (
|
||||
WorkflowJobTemplateNode,
|
||||
WorkflowApproval,
|
||||
WorkflowApprovalTemplate,
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
|
||||
@@ -264,7 +265,11 @@ class BaseAccess(object):
|
||||
return self.can_change(obj, data)
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.user.is_superuser
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
if obj._meta.model_name in [cls._meta.model_name for cls in permission_registry.all_registered_models]:
|
||||
return self.user.has_obj_perm(obj, 'delete')
|
||||
return False
|
||||
|
||||
def can_copy(self, obj):
|
||||
return self.can_add({'reference_obj': obj})
|
||||
@@ -593,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
- a superuser
|
||||
- admin role on the Instance group
|
||||
I can add/delete Instance Groups:
|
||||
- a superuser(system administrator)
|
||||
- a superuser(system administrator), because these are not org-scoped
|
||||
I can use Instance Groups when I have:
|
||||
- use_role on the instance group
|
||||
"""
|
||||
@@ -622,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
return self.user.has_obj_perm(obj, 'delete')
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
@@ -639,7 +644,10 @@ class UserAccess(BaseAccess):
|
||||
"""
|
||||
|
||||
model = User
|
||||
prefetch_related = ('profile',)
|
||||
prefetch_related = (
|
||||
'profile',
|
||||
'resource',
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
@@ -648,9 +656,7 @@ class UserAccess(BaseAccess):
|
||||
qs = (
|
||||
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
|
||||
| User.objects.filter(pk=self.user.id)
|
||||
| User.objects.filter(
|
||||
pk__in=Role.objects.filter(singleton_name__in=[ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
|
||||
)
|
||||
| User.objects.filter(is_superuser=True)
|
||||
).distinct()
|
||||
return qs
|
||||
|
||||
@@ -708,6 +714,15 @@ class UserAccess(BaseAccess):
|
||||
if not allow_orphans:
|
||||
# in these cases only superusers can modify orphan users
|
||||
return False
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
# Permission granted if the user has all permissions that the target user has
|
||||
target_perms = set(
|
||||
RoleEvaluation.objects.filter(role__in=obj.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||
)
|
||||
user_perms = set(
|
||||
RoleEvaluation.objects.filter(role__in=self.user.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||
)
|
||||
return not (target_perms - user_perms)
|
||||
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
|
||||
else:
|
||||
return self.is_all_org_admin(obj)
|
||||
@@ -835,6 +850,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
||||
prefetch_related = (
|
||||
'created_by',
|
||||
'modified_by',
|
||||
'resource', # dab_resource_registry
|
||||
)
|
||||
# organization admin_role is not a parent of organization auditor_role
|
||||
notification_attach_roles = ['admin_role', 'auditor_role']
|
||||
@@ -945,9 +961,6 @@ class InventoryAccess(BaseAccess):
|
||||
def can_update(self, obj):
|
||||
return self.user in obj.update_role
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.can_admin(obj, None)
|
||||
|
||||
def can_run_ad_hoc_commands(self, obj):
|
||||
return self.user in obj.adhoc_role
|
||||
|
||||
@@ -1303,6 +1316,7 @@ class TeamAccess(BaseAccess):
|
||||
'created_by',
|
||||
'modified_by',
|
||||
'organization',
|
||||
'resource', # dab_resource_registry
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
@@ -1373,12 +1387,11 @@ class TeamAccess(BaseAccess):
|
||||
class ExecutionEnvironmentAccess(BaseAccess):
|
||||
"""
|
||||
I can see an execution environment when:
|
||||
- I'm a superuser
|
||||
- I'm a member of the same organization
|
||||
- it is a global ExecutionEnvironment
|
||||
- I can see its organization
|
||||
- It is a global ExecutionEnvironment
|
||||
I can create/change an execution environment when:
|
||||
- I'm a superuser
|
||||
- I'm an admin for the organization(s)
|
||||
- I have an organization or object role that gives access
|
||||
"""
|
||||
|
||||
model = ExecutionEnvironment
|
||||
@@ -1400,13 +1413,13 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
def can_change(self, obj, data):
|
||||
if obj and obj.organization_id is None:
|
||||
raise PermissionDenied
|
||||
if self.user not in obj.organization.execution_environment_admin_role:
|
||||
raise PermissionDenied
|
||||
if data and 'organization' in data:
|
||||
new_org = get_object_from_data('organization', Organization, data, obj=obj)
|
||||
if not new_org or self.user not in new_org.execution_environment_admin_role:
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
if not self.user.has_obj_perm(obj, 'change'):
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
||||
else:
|
||||
if self.user not in obj.organization.execution_environment_admin_role:
|
||||
raise PermissionDenied
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role')
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj.managed:
|
||||
@@ -1578,6 +1591,8 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
inventory = get_value(Inventory, 'inventory')
|
||||
if inventory:
|
||||
if self.user not in inventory.use_role:
|
||||
if self.save_messages:
|
||||
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
|
||||
return False
|
||||
|
||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||
@@ -1586,11 +1601,16 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
project = get_value(Project, 'project')
|
||||
# If the user has admin access to the project (as an org admin), should
|
||||
# be able to proceed without additional checks.
|
||||
if project:
|
||||
return self.user in project.use_role
|
||||
else:
|
||||
if not project:
|
||||
return False
|
||||
|
||||
if self.user not in project.use_role:
|
||||
if self.save_messages:
|
||||
self.messages['project'] = [_('You do not have use permission on Project')]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
def can_copy_related(self, obj):
|
||||
"""
|
||||
@@ -2074,11 +2094,23 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||
|
||||
return bool(
|
||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
||||
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
||||
)
|
||||
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
|
||||
if data.get('organization', None) is None:
|
||||
if self.save_messages:
|
||||
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||
return False
|
||||
|
||||
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
|
||||
if self.save_messages:
|
||||
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||
return False
|
||||
|
||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||
if self.save_messages:
|
||||
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
@@ -2587,6 +2619,8 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
if not JobLaunchConfigAccess(self.user).can_add(data):
|
||||
return False
|
||||
if not data:
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
return self.user.has_roles.filter(permission_partials__codename__in=['execute_jobtemplate', 'update_project', 'update_inventory']).exists()
|
||||
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
|
||||
|
||||
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
||||
@@ -2608,13 +2642,15 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
|
||||
class NotificationTemplateAccess(BaseAccess):
|
||||
"""
|
||||
I can see/use a notification_template if I have permission to
|
||||
Run standard logic from DAB RBAC
|
||||
"""
|
||||
|
||||
model = NotificationTemplate
|
||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
return self.model.access_qs(self.user, 'view')
|
||||
return self.model.objects.filter(
|
||||
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
|
||||
).distinct()
|
||||
@@ -2627,10 +2663,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if obj.organization is None:
|
||||
# only superusers are allowed to edit orphan notification templates
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
||||
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||
|
||||
def can_admin(self, obj, data):
|
||||
return self.can_change(obj, data)
|
||||
@@ -2640,9 +2673,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if obj.organization is None:
|
||||
return False
|
||||
return self.user in obj.organization.notification_admin_role
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
@@ -2783,7 +2814,7 @@ class ActivityStreamAccess(BaseAccess):
|
||||
| Q(notification_template__organization__in=auditing_orgs)
|
||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||
| Q(label__organization__in=auditing_orgs)
|
||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
||||
| Q(role__in=Role.visible_roles(self.user) if auditing_orgs else [])
|
||||
)
|
||||
|
||||
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||
@@ -2840,13 +2871,10 @@ class RoleAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
result = Role.visible_roles(self.user)
|
||||
# Sanity check: is the requesting user an orphaned non-admin/auditor?
|
||||
# if yes, make system admin/auditor mandatorily visible.
|
||||
if not self.user.is_superuser and not self.user.is_system_auditor and not self.user.organizations.exists():
|
||||
mandatories = ('system_administrator', 'system_auditor')
|
||||
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
||||
result = result | super_qs
|
||||
return result
|
||||
# Make system admin/auditor mandatorily visible.
|
||||
mandatories = ('system_administrator', 'system_auditor')
|
||||
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
||||
return result | super_qs
|
||||
|
||||
def can_add(self, obj, data):
|
||||
# Unsupported for now
|
||||
|
||||
@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
||||
resolved_action,
|
||||
resolved_role,
|
||||
-- '-' operator listed here:
|
||||
-- https://www.postgresql.org/docs/12/functions-json.html
|
||||
-- https://www.postgresql.org/docs/15/functions-json.html
|
||||
-- note that operator is only supported by jsonb objects
|
||||
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||
|
||||
@@ -1,7 +1,40 @@
|
||||
from django.apps import AppConfig
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from awx.main.utils.named_url_graph import _customize_graph, generate_graph
|
||||
from awx.conf import register, fields
|
||||
|
||||
|
||||
class MainConfig(AppConfig):
|
||||
name = 'awx.main'
|
||||
verbose_name = _('Main')
|
||||
|
||||
def load_named_url_feature(self):
|
||||
models = [m for m in self.get_models() if hasattr(m, 'get_absolute_url')]
|
||||
generate_graph(models)
|
||||
_customize_graph()
|
||||
register(
|
||||
'NAMED_URL_FORMATS',
|
||||
field_class=fields.DictField,
|
||||
read_only=True,
|
||||
label=_('Formats of all available named urls'),
|
||||
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
||||
category=_('Named URL'),
|
||||
category_slug='named-url',
|
||||
)
|
||||
register(
|
||||
'NAMED_URL_GRAPH_NODES',
|
||||
field_class=fields.DictField,
|
||||
read_only=True,
|
||||
label=_('List of all named url graph nodes.'),
|
||||
help_text=_(
|
||||
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
||||
' Use this list to programmatically generate named URLs for resources'
|
||||
),
|
||||
category=_('Named URL'),
|
||||
category_slug='named-url',
|
||||
)
|
||||
|
||||
def ready(self):
|
||||
super().ready()
|
||||
|
||||
self.load_named_url_feature()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.core.checks import Error
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -92,6 +93,7 @@ register(
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -774,6 +776,7 @@ register(
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||
@@ -815,6 +818,7 @@ register(
|
||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -825,6 +829,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -835,6 +840,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -845,6 +851,7 @@ register(
|
||||
help_text=_('Enable preview of new user interface.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -948,3 +955,27 @@ def logging_validate(serializer, attrs):
|
||||
|
||||
|
||||
register_validate('logging', logging_validate)
|
||||
|
||||
|
||||
def csrf_trusted_origins_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'CSRF_TRUSTED_ORIGINS'):
|
||||
return attrs
|
||||
if 'CSRF_TRUSTED_ORIGINS' not in attrs:
|
||||
return attrs
|
||||
errors = []
|
||||
for origin in attrs['CSRF_TRUSTED_ORIGINS']:
|
||||
if "://" not in origin:
|
||||
errors.append(
|
||||
Error(
|
||||
"As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS "
|
||||
"setting must start with a scheme (usually http:// or "
|
||||
"https://) but found %s. See the release notes for details." % origin,
|
||||
)
|
||||
)
|
||||
if errors:
|
||||
error_messages = [error.msg for error in errors]
|
||||
raise serializers.ValidationError(_('\n'.join(error_messages)))
|
||||
return attrs
|
||||
|
||||
|
||||
register_validate('system', csrf_trusted_origins_validate)
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
@@ -114,3 +114,28 @@ SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS = 'unique_managed_hosts'
|
||||
|
||||
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
|
||||
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
|
||||
|
||||
# Data for RBAC compatibility layer
|
||||
role_name_to_perm_mapping = {
|
||||
'adhoc_role': ['adhoc_'],
|
||||
'approval_role': ['approve_'],
|
||||
'auditor_role': ['audit_'],
|
||||
'admin_role': ['change_', 'add_', 'delete_'],
|
||||
'execute_role': ['execute_'],
|
||||
'read_role': ['view_'],
|
||||
'update_role': ['update_'],
|
||||
'member_role': ['member_'],
|
||||
'use_role': ['use_'],
|
||||
}
|
||||
|
||||
org_role_to_permission = {
|
||||
'notification_admin_role': 'add_notificationtemplate',
|
||||
'project_admin_role': 'add_project',
|
||||
'execute_role': 'execute_jobtemplate',
|
||||
'inventory_admin_role': 'add_inventory',
|
||||
'credential_admin_role': 'add_credential',
|
||||
'workflow_admin_role': 'add_workflowjobtemplate',
|
||||
'job_template_admin_role': 'change_jobtemplate', # TODO: this doesnt really work, solution not clear
|
||||
'execution_environment_admin_role': 'add_executionenvironment',
|
||||
'auditor_role': 'view_project', # TODO: also doesnt really work
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
from azure.identity import ClientSecretCredential
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
|
||||
from azure.common.credentials import ServicePrincipalCredentials
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
|
||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||
@@ -54,22 +55,9 @@ azure_keyvault_inputs = {
|
||||
|
||||
|
||||
def azure_keyvault_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
[cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
|
||||
|
||||
def auth_callback(server, resource, scope):
|
||||
credentials = ServicePrincipalCredentials(
|
||||
url=url,
|
||||
client_id=kwargs['client'],
|
||||
secret=kwargs['secret'],
|
||||
tenant=kwargs['tenant'],
|
||||
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
||||
)
|
||||
token = credentials.token
|
||||
return token['token_type'], token['access_token']
|
||||
|
||||
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
|
||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
||||
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret'])
|
||||
kv = SecretClient(credential=csc, vault_url=kwargs['url'])
|
||||
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
|
||||
|
||||
|
||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
import psycopg
|
||||
import select
|
||||
from copy import deepcopy
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -94,14 +95,15 @@ class PubSub(object):
|
||||
|
||||
|
||||
def create_listener_connection():
|
||||
conf = settings.DATABASES['default'].copy()
|
||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
||||
conf = deepcopy(settings.DATABASES['default'])
|
||||
conf['OPTIONS'] = deepcopy(conf.get('OPTIONS', {}))
|
||||
# Modify the application name to distinguish from other connections the process might use
|
||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||
|
||||
# Apply overrides specifically for the listener connection
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
conf[k] = v
|
||||
if k != 'OPTIONS':
|
||||
conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
conf['OPTIONS'][k] = v
|
||||
|
||||
|
||||
@@ -259,6 +259,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
current_downtime = time.time() - self.pg_down_time
|
||||
if current_downtime > self.pg_max_wait:
|
||||
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
# Wait for a second before next attempt, but still listen for any shutdown signals
|
||||
for i in range(10):
|
||||
@@ -270,6 +276,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
except Exception:
|
||||
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
||||
logger.exception('Encountered unhandled error in dispatcher main loop')
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
|
||||
|
||||
|
||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('on_delete', models.CASCADE)
|
||||
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
|
||||
12
awx/main/management/commands/check_instance_ready.py
Normal file
12
awx/main/management/commands/check_instance_ready.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from awx.main.models.ha import Instance
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Instance.objects.me().node_state != Instance.States.READY:
|
||||
raise CommandError('Instance is not ready') # so that return code is not 0
|
||||
|
||||
return
|
||||
195
awx/main/management/commands/dump_auth_config.py
Normal file
195
awx/main/management/commands/dump_auth_config.py
Normal file
@@ -0,0 +1,195 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
from awx.conf import settings_registry
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
|
||||
|
||||
DAB_SAML_AUTHENTICATOR_KEYS = {
|
||||
"SP_ENTITY_ID": True,
|
||||
"SP_PUBLIC_CERT": True,
|
||||
"SP_PRIVATE_KEY": True,
|
||||
"ORG_INFO": True,
|
||||
"TECHNICAL_CONTACT": True,
|
||||
"SUPPORT_CONTACT": True,
|
||||
"SP_EXTRA": False,
|
||||
"SECURITY_CONFIG": False,
|
||||
"EXTRA_DATA": False,
|
||||
"ENABLED_IDPS": True,
|
||||
"CALLBACK_URL": False,
|
||||
}
|
||||
|
||||
DAB_LDAP_AUTHENTICATOR_KEYS = {
|
||||
"SERVER_URI": True,
|
||||
"BIND_DN": False,
|
||||
"BIND_PASSWORD": False,
|
||||
"CONNECTION_OPTIONS": False,
|
||||
"GROUP_TYPE": True,
|
||||
"GROUP_TYPE_PARAMS": True,
|
||||
"GROUP_SEARCH": False,
|
||||
"START_TLS": False,
|
||||
"USER_DN_TEMPLATE": True,
|
||||
"USER_ATTR_MAP": True,
|
||||
"USER_SEARCH": False,
|
||||
}
|
||||
|
||||
def is_enabled(self, settings, keys):
|
||||
missing_fields = []
|
||||
for key, required in keys.items():
|
||||
if required and not settings.get(key):
|
||||
missing_fields.append(key)
|
||||
if missing_fields:
|
||||
return False, missing_fields
|
||||
return True, None
|
||||
|
||||
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||
awx_ldap_settings = {}
|
||||
|
||||
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
|
||||
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
|
||||
value = getattr(settings, awx_ldap_setting, None)
|
||||
awx_ldap_settings[key] = value
|
||||
|
||||
grouped_settings = {}
|
||||
|
||||
for key, value in awx_ldap_settings.items():
|
||||
match = re.search(r'(\d+)', key)
|
||||
index = int(match.group()) if match else 0
|
||||
new_key = re.sub(r'\d+_', '', key)
|
||||
|
||||
if index not in grouped_settings:
|
||||
grouped_settings[index] = {}
|
||||
|
||||
grouped_settings[index][new_key] = value
|
||||
if new_key == "GROUP_TYPE" and value:
|
||||
grouped_settings[index][new_key] = type(value).__name__
|
||||
|
||||
if new_key == "SERVER_URI" and value:
|
||||
value = value.split(", ")
|
||||
grouped_settings[index][new_key] = value
|
||||
|
||||
if type(value).__name__ == "LDAPSearch":
|
||||
data = []
|
||||
data.append(value.base_dn)
|
||||
data.append("SCOPE_SUBTREE")
|
||||
data.append(value.filterstr)
|
||||
grouped_settings[index][new_key] = data
|
||||
|
||||
return grouped_settings
|
||||
|
||||
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||
awx_saml_settings = {}
|
||||
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
|
||||
|
||||
return awx_saml_settings
|
||||
|
||||
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||
config = {
|
||||
"type": f"ansible_base.authentication.authenticator_plugins.{type}",
|
||||
"name": name,
|
||||
"enabled": enabled,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {},
|
||||
}
|
||||
for k in keys:
|
||||
v = awx_settings.get(k)
|
||||
config["configuration"].update({k: v})
|
||||
|
||||
if type == "saml":
|
||||
idp_to_key_mapping = {
|
||||
"url": "IDP_URL",
|
||||
"x509cert": "IDP_X509_CERT",
|
||||
"entity_id": "IDP_ENTITY_ID",
|
||||
"attr_email": "IDP_ATTR_EMAIL",
|
||||
"attr_groups": "IDP_GROUPS",
|
||||
"attr_username": "IDP_ATTR_USERNAME",
|
||||
"attr_last_name": "IDP_ATTR_LAST_NAME",
|
||||
"attr_first_name": "IDP_ATTR_FIRST_NAME",
|
||||
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
|
||||
}
|
||||
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
|
||||
for key in idp_to_key_mapping:
|
||||
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
|
||||
if value is not None:
|
||||
config["name"] = idp_name
|
||||
config["configuration"].update({idp_to_key_mapping[key]: value})
|
||||
|
||||
return config
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"output_file",
|
||||
nargs="?",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Output JSON file path",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
data = []
|
||||
|
||||
# dump SAML settings
|
||||
awx_saml_settings = self.get_awx_saml_settings()
|
||||
awx_saml_enabled, saml_missing_fields = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||
if awx_saml_enabled:
|
||||
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
awx_saml_enabled,
|
||||
awx_saml_settings,
|
||||
"saml",
|
||||
self.DAB_SAML_AUTHENTICATOR_KEYS,
|
||||
awx_saml_name,
|
||||
)
|
||||
)
|
||||
else:
|
||||
data.append({"SAML_missing_fields": saml_missing_fields})
|
||||
|
||||
# dump LDAP settings
|
||||
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||
for awx_ldap_name, awx_ldap_settings in awx_ldap_group_settings.items():
|
||||
awx_ldap_enabled, ldap_missing_fields = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||
if awx_ldap_enabled:
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
awx_ldap_enabled,
|
||||
awx_ldap_settings,
|
||||
"ldap",
|
||||
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||
f"LDAP_{awx_ldap_name}",
|
||||
)
|
||||
)
|
||||
else:
|
||||
data.append({f"LDAP_{awx_ldap_name}_missing_fields": ldap_missing_fields})
|
||||
|
||||
# write to file if requested
|
||||
if options["output_file"]:
|
||||
# Define the path for the output JSON file
|
||||
output_file = options["output_file"]
|
||||
|
||||
# Ensure the directory exists
|
||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||
|
||||
# Write data to the JSON file
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
|
||||
else:
|
||||
self.stdout.write(json.dumps(data, indent=4))
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
|
||||
sys.exit(1)
|
||||
@@ -92,8 +92,6 @@ class Command(BaseCommand):
|
||||
return host_stats
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
# it's necessary to delay this import in case
|
||||
# database migrations are still running
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -103,8 +101,9 @@ class Command(BaseCommand):
|
||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||
except Exception as exc:
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
||||
time.sleep(10)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
|
||||
return
|
||||
|
||||
# In containerized deployments, migrations happen in the task container,
|
||||
@@ -123,13 +122,14 @@ class Command(BaseCommand):
|
||||
return
|
||||
|
||||
try:
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
|
||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||
except RuntimeError as e:
|
||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||
# ensure network connectivity between the task and web instance
|
||||
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
||||
time.sleep(5)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
|
||||
return
|
||||
|
||||
if options.get('status'):
|
||||
@@ -166,8 +166,16 @@ class Command(BaseCommand):
|
||||
|
||||
return
|
||||
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
try:
|
||||
logger.info('Starting Websocket Relayer...')
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Terminating Websocket Relayer')
|
||||
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
|
||||
return
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
from pathlib import Path, PurePosixPath
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db import connection
|
||||
from django.shortcuts import redirect
|
||||
from django.apps import apps
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.urls import reverse, resolve
|
||||
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
from awx.conf import fields, register
|
||||
from awx.main import migrations
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
from awx.main.utils.common import memoize
|
||||
from awx.urls import get_urlpatterns
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
@@ -97,49 +97,7 @@ class DisableLocalAuthMiddleware(MiddlewareMixin):
|
||||
logout(request)
|
||||
|
||||
|
||||
def _customize_graph():
|
||||
from awx.main.models import Instance, Schedule, UnifiedJobTemplate
|
||||
|
||||
for model in [Schedule, UnifiedJobTemplate]:
|
||||
if model in settings.NAMED_URL_GRAPH:
|
||||
settings.NAMED_URL_GRAPH[model].remove_bindings()
|
||||
settings.NAMED_URL_GRAPH.pop(model)
|
||||
if User not in settings.NAMED_URL_GRAPH:
|
||||
settings.NAMED_URL_GRAPH[User] = GraphNode(User, ['username'], [])
|
||||
settings.NAMED_URL_GRAPH[User].add_bindings()
|
||||
if Instance not in settings.NAMED_URL_GRAPH:
|
||||
settings.NAMED_URL_GRAPH[Instance] = GraphNode(Instance, ['hostname'], [])
|
||||
settings.NAMED_URL_GRAPH[Instance].add_bindings()
|
||||
|
||||
|
||||
class URLModificationMiddleware(MiddlewareMixin):
|
||||
def __init__(self, get_response):
|
||||
models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')]
|
||||
generate_graph(models)
|
||||
_customize_graph()
|
||||
register(
|
||||
'NAMED_URL_FORMATS',
|
||||
field_class=fields.DictField,
|
||||
read_only=True,
|
||||
label=_('Formats of all available named urls'),
|
||||
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
||||
category=_('Named URL'),
|
||||
category_slug='named-url',
|
||||
)
|
||||
register(
|
||||
'NAMED_URL_GRAPH_NODES',
|
||||
field_class=fields.DictField,
|
||||
read_only=True,
|
||||
label=_('List of all named url graph nodes.'),
|
||||
help_text=_(
|
||||
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
||||
' Use this list to programmatically generate named URLs for resources'
|
||||
),
|
||||
category=_('Named URL'),
|
||||
category_slug='named-url',
|
||||
)
|
||||
super().__init__(get_response)
|
||||
|
||||
@staticmethod
|
||||
def _hijack_for_old_jt_name(node, kwargs, named_url):
|
||||
try:
|
||||
@@ -180,14 +138,36 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
|
||||
@classmethod
|
||||
def _convert_named_url(cls, url_path):
|
||||
url_units = url_path.split('/')
|
||||
# If the identifier is an empty string, it is always invalid.
|
||||
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
|
||||
return url_path
|
||||
resource = url_units[3]
|
||||
default_prefix = PurePosixPath('/api/v2/')
|
||||
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
|
||||
|
||||
url_path_original = url_path
|
||||
url_path = PurePosixPath(url_path)
|
||||
|
||||
if set(optional_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = optional_prefix
|
||||
elif set(default_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = default_prefix
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
# Remove prefix
|
||||
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
|
||||
try:
|
||||
resource_path = PurePosixPath(url_path.parts[0])
|
||||
name = url_path.parts[1]
|
||||
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
|
||||
except IndexError:
|
||||
return url_path_original
|
||||
|
||||
resource = resource_path.parts[0]
|
||||
if resource in settings.NAMED_URL_MAPPINGS:
|
||||
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
|
||||
return '/'.join(url_units)
|
||||
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
|
||||
return PurePosixPath(*parts).as_posix() + '/'
|
||||
|
||||
def process_request(self, request):
|
||||
old_path = request.path_info
|
||||
@@ -198,9 +178,46 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
request.path_info = new_path
|
||||
|
||||
|
||||
@memoize(ttl=20)
|
||||
def is_migrating():
|
||||
latest_number = 0
|
||||
latest_name = ''
|
||||
for migration_path in Path(migrations.__path__[0]).glob('[0-9]*.py'):
|
||||
try:
|
||||
migration_number = int(migration_path.name.split('_', 1)[0])
|
||||
except ValueError:
|
||||
continue
|
||||
if migration_number > latest_number:
|
||||
latest_number = migration_number
|
||||
latest_name = migration_path.name[: -len('.py')]
|
||||
return not MigrationRecorder(connection).migration_qs.filter(app='main', name=latest_name).exists()
|
||||
|
||||
|
||||
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||
def process_request(self, request):
|
||||
executor = MigrationExecutor(connection)
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
return redirect(reverse("ui:migrations_notran"))
|
||||
|
||||
|
||||
class OptionalURLPrefixPath(MiddlewareMixin):
|
||||
@functools.lru_cache
|
||||
def _url_optional(self, prefix):
|
||||
# Relavant Django code path https://github.com/django/django/blob/stable/4.2.x/django/core/handlers/base.py#L300
|
||||
#
|
||||
# resolve_request(request)
|
||||
# get_resolver(request.urlconf)
|
||||
# _get_cached_resolver(request.urlconf) <-- cached via @functools.cache
|
||||
#
|
||||
# Django will attempt to cache the value(s) of request.urlconf
|
||||
# Being hashable is a prerequisit for being cachable.
|
||||
# tuple() is hashable list() is not.
|
||||
# Hence the tuple(list()) wrap.
|
||||
return tuple(get_urlpatterns(prefix=prefix))
|
||||
|
||||
def process_request(self, request):
|
||||
prefix = settings.OPTIONAL_API_URLPATTERN_PREFIX
|
||||
|
||||
if request.path.startswith(f"/api/{prefix}"):
|
||||
request.urlconf = self._url_optional(prefix)
|
||||
else:
|
||||
request.urlconf = 'awx.urls'
|
||||
|
||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'admin_role',
|
||||
'execute_role',
|
||||
|
||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='approval_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.approval_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execution_environment_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
||||
model_name='instancegroup',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -0,0 +1,58 @@
|
||||
# Generated by Django 4.2.6 on 2024-02-15 20:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0189_inbound_hop_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
85
awx/main/migrations/0191_add_django_permissions.py
Normal file
85
awx/main/migrations/0191_add_django_permissions.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# Generated by Django 4.2.6 on 2023-11-13 20:10
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0190_alter_inventorysource_source_and_more'),
|
||||
('dab_rbac', '__first__'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Add custom permissions for all special actions, like update, use, adhoc, and so on
|
||||
migrations.AlterModelOptions(
|
||||
name='credential',
|
||||
options={'ordering': ('name',), 'permissions': [('use_credential', 'Can use credential in a job or related resource')]},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='instancegroup',
|
||||
options={'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')]},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='inventory',
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
'permissions': [
|
||||
('use_inventory', 'Can use inventory in a job template'),
|
||||
('adhoc_inventory', 'Can run ad hoc commands'),
|
||||
('update_inventory', 'Can update inventory sources in inventory'),
|
||||
],
|
||||
'verbose_name_plural': 'inventories',
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='jobtemplate',
|
||||
options={'ordering': ('name',), 'permissions': [('execute_jobtemplate', 'Can run this job template')]},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='project',
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
'permissions': [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')],
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='workflowjobtemplate',
|
||||
options={
|
||||
'permissions': [
|
||||
('execute_workflowjobtemplate', 'Can run this workflow job template'),
|
||||
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
|
||||
]
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='organization',
|
||||
options={
|
||||
'default_permissions': ('change', 'delete', 'view'),
|
||||
'ordering': ('name',),
|
||||
'permissions': [
|
||||
('member_organization', 'Basic participation permissions for organization'),
|
||||
('audit_organization', 'Audit everything inside the organization'),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='team',
|
||||
options={'ordering': ('organization__name', 'name'), 'permissions': [('member_team', 'Inherit all roles assigned to this team')]},
|
||||
),
|
||||
# Remove add default permission for a few models
|
||||
migrations.AlterModelOptions(
|
||||
name='jobtemplate',
|
||||
options={
|
||||
'default_permissions': ('change', 'delete', 'view'),
|
||||
'ordering': ('name',),
|
||||
'permissions': [('execute_jobtemplate', 'Can run this job template')],
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='instancegroup',
|
||||
options={
|
||||
'default_permissions': ('change', 'delete', 'view'),
|
||||
'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')],
|
||||
},
|
||||
),
|
||||
]
|
||||
20
awx/main/migrations/0192_custom_roles.py
Normal file
20
awx/main/migrations/0192_custom_roles.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 4.2.6 on 2023-11-21 02:06
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from awx.main.migrations._dab_rbac import migrate_to_new_rbac, create_permissions_as_operation, setup_managed_role_definitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0191_add_django_permissions'),
|
||||
('dab_rbac', '__first__'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# make sure permissions and content types have been created by now
|
||||
# these normally run in a post_migrate signal but we need them for our logic
|
||||
migrations.RunPython(create_permissions_as_operation, migrations.RunPython.noop),
|
||||
migrations.RunPython(setup_managed_role_definitions, migrations.RunPython.noop),
|
||||
migrations.RunPython(migrate_to_new_rbac, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -0,0 +1,51 @@
|
||||
# Generated by Django 4.2.6 on 2024-05-08 07:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0192_custom_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0193_alter_notification_notification_type_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
26
awx/main/migrations/0195_EE_permissions.py
Normal file
26
awx/main/migrations/0195_EE_permissions.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 4.2.6 on 2024-06-20 15:55
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def delete_execution_environment_read_role(apps, schema_editor):
|
||||
permission_classes = [apps.get_model('auth', 'Permission'), apps.get_model('dab_rbac', 'DABPermission')]
|
||||
for permission_cls in permission_classes:
|
||||
ee_read_perm = permission_cls.objects.filter(codename='view_executionenvironment').first()
|
||||
if ee_read_perm:
|
||||
ee_read_perm.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0194_alter_inventorysource_source_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='executionenvironment',
|
||||
options={'default_permissions': ('add', 'change', 'delete'), 'ordering': ('-created',)},
|
||||
),
|
||||
migrations.RunPython(delete_execution_environment_read_role, migrations.RunPython.noop),
|
||||
]
|
||||
402
awx/main/migrations/_dab_rbac.py
Normal file
402
awx/main/migrations/_dab_rbac.py
Normal file
@@ -0,0 +1,402 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from django.apps import apps as global_apps
|
||||
from django.db.models import ForeignKey
|
||||
from django.conf import settings
|
||||
from ansible_base.rbac.migrations._utils import give_permissions
|
||||
from ansible_base.rbac.management import create_dab_permissions
|
||||
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.constants import role_name_to_perm_mapping
|
||||
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations._dab_rbac')
|
||||
|
||||
|
||||
def create_permissions_as_operation(apps, schema_editor):
|
||||
create_dab_permissions(global_apps.get_app_config("main"), apps=apps)
|
||||
|
||||
|
||||
"""
|
||||
Data structures and methods for the migration of old Role model to ObjectRole
|
||||
"""
|
||||
|
||||
system_admin = ImplicitRoleField(name='system_administrator')
|
||||
system_auditor = ImplicitRoleField(name='system_auditor')
|
||||
system_admin.model = None
|
||||
system_auditor.model = None
|
||||
|
||||
|
||||
def resolve_parent_role(f, role_path):
|
||||
"""
|
||||
Given a field and a path declared in parent_role from the field definition, like
|
||||
execute_role = ImplicitRoleField(parent_role='admin_role')
|
||||
This expects to be passed in (execute_role object, "admin_role")
|
||||
It hould return the admin_role from that object
|
||||
"""
|
||||
if role_path == 'singleton:system_administrator':
|
||||
return system_admin
|
||||
elif role_path == 'singleton:system_auditor':
|
||||
return system_auditor
|
||||
else:
|
||||
related_field = f
|
||||
current_model = f.model
|
||||
for related_field_name in role_path.split('.'):
|
||||
related_field = current_model._meta.get_field(related_field_name)
|
||||
if isinstance(related_field, ForeignKey) and not isinstance(related_field, ImplicitRoleField):
|
||||
current_model = related_field.related_model
|
||||
return related_field
|
||||
|
||||
|
||||
def build_role_map(apps):
|
||||
"""
|
||||
For the old Role model, this builds and returns dictionaries (children, parents)
|
||||
which give a global mapping of the ImplicitRoleField instances according to the graph
|
||||
"""
|
||||
models = set(apps.get_app_config('main').get_models())
|
||||
|
||||
all_fields = set()
|
||||
parents = {}
|
||||
children = {}
|
||||
|
||||
all_fields.add(system_admin)
|
||||
all_fields.add(system_auditor)
|
||||
|
||||
for cls in models:
|
||||
for f in cls._meta.get_fields():
|
||||
if isinstance(f, ImplicitRoleField):
|
||||
all_fields.add(f)
|
||||
|
||||
for f in all_fields:
|
||||
if f.parent_role is not None:
|
||||
if isinstance(f.parent_role, str):
|
||||
parent_roles = [f.parent_role]
|
||||
else:
|
||||
parent_roles = f.parent_role
|
||||
|
||||
# SPECIAL CASE: organization auditor_role is not a child of admin_role
|
||||
# this makes no practical sense and conflicts with expected managed role
|
||||
# so we put it in as a hack here
|
||||
if f.name == 'auditor_role' and f.model._meta.model_name == 'organization':
|
||||
parent_roles.append('admin_role')
|
||||
|
||||
parent_list = []
|
||||
for rel_name in parent_roles:
|
||||
parent_list.append(resolve_parent_role(f, rel_name))
|
||||
|
||||
parents[f] = parent_list
|
||||
|
||||
# build children lookup from parents lookup
|
||||
for child_field, parent_list in parents.items():
|
||||
for parent_field in parent_list:
|
||||
children.setdefault(parent_field, [])
|
||||
children[parent_field].append(child_field)
|
||||
|
||||
return (parents, children)
|
||||
|
||||
|
||||
def get_descendents(f, children_map):
|
||||
"""
|
||||
Given ImplicitRoleField F and the children mapping, returns all descendents
|
||||
of that field, as a set of other fields, including itself
|
||||
"""
|
||||
ret = {f}
|
||||
if f in children_map:
|
||||
for child_field in children_map[f]:
|
||||
ret.update(get_descendents(child_field, children_map))
|
||||
return ret
|
||||
|
||||
|
||||
def get_permissions_for_role(role_field, children_map, apps):
|
||||
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
|
||||
perm_list = []
|
||||
for child_field in get_descendents(role_field, children_map):
|
||||
if child_field.name in role_name_to_perm_mapping:
|
||||
for perm_name in role_name_to_perm_mapping[child_field.name]:
|
||||
if perm_name == 'add_' and role_field.model._meta.model_name != 'organization':
|
||||
continue # only organizations can contain add permissions
|
||||
perm = Permission.objects.filter(content_type=ContentType.objects.get_for_model(child_field.model), codename__startswith=perm_name).first()
|
||||
if perm is not None and perm not in perm_list:
|
||||
perm_list.append(perm)
|
||||
|
||||
# special case for two models that have object roles but no organization roles in old system
|
||||
if role_field.name == 'notification_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
|
||||
ct = ContentType.objects.get_for_model(apps.get_model('main', 'NotificationTemplate'))
|
||||
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
|
||||
if role_field.name == 'execution_environment_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
|
||||
ct = ContentType.objects.get_for_model(apps.get_model('main', 'ExecutionEnvironment'))
|
||||
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
|
||||
|
||||
# more special cases for those same above special org-level roles
|
||||
if role_field.name == 'auditor_role':
|
||||
perm_list.append(Permission.objects.get(codename='view_notificationtemplate'))
|
||||
|
||||
return perm_list
|
||||
|
||||
|
||||
def model_class(ct, apps):
|
||||
"""
|
||||
You can not use model methods in migrations, so this duplicates
|
||||
what ContentType.model_class does, using current apps
|
||||
"""
|
||||
try:
|
||||
return apps.get_model(ct.app_label, ct.model)
|
||||
except LookupError:
|
||||
return None
|
||||
|
||||
|
||||
def migrate_to_new_rbac(apps, schema_editor):
|
||||
"""
|
||||
This method moves the assigned permissions from the old rbac.py models
|
||||
to the new RoleDefinition and ObjectRole models
|
||||
"""
|
||||
Role = apps.get_model('main', 'Role')
|
||||
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||
|
||||
# remove add premissions that are not valid for migrations from old versions
|
||||
for perm_str in ('add_organization', 'add_jobtemplate'):
|
||||
perm = Permission.objects.filter(codename=perm_str).first()
|
||||
if perm:
|
||||
perm.delete()
|
||||
|
||||
managed_definitions = dict()
|
||||
for role_definition in RoleDefinition.objects.filter(managed=True):
|
||||
permissions = frozenset(role_definition.permissions.values_list('id', flat=True))
|
||||
managed_definitions[permissions] = role_definition
|
||||
|
||||
# Build map of old role model
|
||||
parents, children = build_role_map(apps)
|
||||
|
||||
# NOTE: this import is expected to break at some point, and then just move the data here
|
||||
from awx.main.models.rbac import role_descriptions
|
||||
|
||||
for role in Role.objects.prefetch_related('members', 'parents').iterator():
|
||||
if role.singleton_name:
|
||||
continue # only bothering to migrate object roles
|
||||
|
||||
team_roles = []
|
||||
for parent in role.parents.all():
|
||||
if parent.id not in json.loads(role.implicit_parents):
|
||||
team_roles.append(parent)
|
||||
|
||||
# we will not create any roles that do not have any users or teams
|
||||
if not (role.members.all() or team_roles):
|
||||
logger.debug(f'Skipping role {role.role_field} for {role.content_type.model}-{role.object_id} due to no members')
|
||||
continue
|
||||
|
||||
# get a list of permissions that the old role would grant
|
||||
object_cls = apps.get_model(f'main.{role.content_type.model}')
|
||||
object = object_cls.objects.get(pk=role.object_id) # WORKAROUND, role.content_object does not work in migrations
|
||||
f = object._meta.get_field(role.role_field) # should be ImplicitRoleField
|
||||
perm_list = get_permissions_for_role(f, children, apps)
|
||||
|
||||
permissions = frozenset(perm.id for perm in perm_list)
|
||||
|
||||
# With the needed permissions established, obtain the RoleDefinition this will need, priorities:
|
||||
# 1. If it exists as a managed RoleDefinition then obviously use that
|
||||
# 2. If we already created this for a prior role, use that
|
||||
# 3. Create a new RoleDefinition that lists those permissions
|
||||
if permissions in managed_definitions:
|
||||
role_definition = managed_definitions[permissions]
|
||||
else:
|
||||
action = role.role_field.rsplit('_', 1)[0] # remove the _field ending of the name
|
||||
role_definition_name = f'{model_class(role.content_type, apps).__name__} {action.title()}'
|
||||
|
||||
description = role_descriptions[role.role_field]
|
||||
if type(description) == dict:
|
||||
if role.content_type.model in description:
|
||||
description = description.get(role.content_type.model)
|
||||
else:
|
||||
description = description.get('default')
|
||||
if '%s' in description:
|
||||
description = description % role.content_type.model
|
||||
|
||||
role_definition, created = RoleDefinition.objects.get_or_create(
|
||||
name=role_definition_name,
|
||||
defaults={'description': description, 'content_type_id': role.content_type_id},
|
||||
)
|
||||
|
||||
if created:
|
||||
logger.info(f'Created custom Role Definition {role_definition_name}, pk={role_definition.pk}')
|
||||
role_definition.permissions.set(perm_list)
|
||||
|
||||
# Create the object role and add users to it
|
||||
give_permissions(
|
||||
apps,
|
||||
role_definition,
|
||||
users=role.members.all(),
|
||||
teams=[tr.object_id for tr in team_roles],
|
||||
object_id=role.object_id,
|
||||
content_type_id=role.content_type_id,
|
||||
)
|
||||
|
||||
# Create new replacement system auditor role
|
||||
new_system_auditor, created = RoleDefinition.objects.get_or_create(
|
||||
name='System Auditor',
|
||||
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
|
||||
)
|
||||
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
|
||||
|
||||
# migrate is_system_auditor flag, because it is no longer handled by a system role
|
||||
old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first()
|
||||
if old_system_auditor:
|
||||
# if the system auditor role is not present, this is a new install and no users should exist
|
||||
ct = 0
|
||||
for user in role.members.all():
|
||||
RoleUserAssignment.objects.create(user=user, role_definition=new_system_auditor)
|
||||
ct += 1
|
||||
if ct:
|
||||
logger.info(f'Migrated {ct} users to new system auditor flag')
|
||||
|
||||
|
||||
def get_or_create_managed(name, description, ct, permissions, RoleDefinition):
|
||||
role_definition, created = RoleDefinition.objects.get_or_create(name=name, defaults={'managed': True, 'description': description, 'content_type': ct})
|
||||
role_definition.permissions.set(list(permissions))
|
||||
|
||||
if not role_definition.managed:
|
||||
role_definition.managed = True
|
||||
role_definition.save(update_fields=['managed'])
|
||||
|
||||
if created:
|
||||
logger.info(f'Created RoleDefinition {role_definition.name} pk={role_definition} with {len(permissions)} permissions')
|
||||
|
||||
return role_definition
|
||||
|
||||
|
||||
def setup_managed_role_definitions(apps, schema_editor):
|
||||
"""
|
||||
Idepotent method to create or sync the managed role definitions
|
||||
"""
|
||||
to_create = {
|
||||
'object_admin': '{cls.__name__} Admin',
|
||||
'org_admin': 'Organization Admin',
|
||||
'org_children': 'Organization {cls.__name__} Admin',
|
||||
'special': '{cls.__name__} {action}',
|
||||
}
|
||||
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||
Organization = apps.get_model(settings.ANSIBLE_BASE_ORGANIZATION_MODEL)
|
||||
org_ct = ContentType.objects.get_for_model(Organization)
|
||||
managed_role_definitions = []
|
||||
|
||||
org_perms = set()
|
||||
for cls in permission_registry.all_registered_models:
|
||||
ct = ContentType.objects.get_for_model(cls)
|
||||
cls_name = cls._meta.model_name
|
||||
object_perms = set(Permission.objects.filter(content_type=ct))
|
||||
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
|
||||
if cls_name != 'instancegroup':
|
||||
org_perms.update(object_perms)
|
||||
|
||||
if 'object_admin' in to_create and cls_name != 'organization':
|
||||
indiv_perms = object_perms.copy()
|
||||
add_perms = [perm for perm in indiv_perms if perm.codename.startswith('add_')]
|
||||
if add_perms:
|
||||
for perm in add_perms:
|
||||
indiv_perms.remove(perm)
|
||||
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition
|
||||
)
|
||||
)
|
||||
|
||||
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
|
||||
org_child_perms = object_perms.copy()
|
||||
org_child_perms.add(Permission.objects.get(codename='view_organization'))
|
||||
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['org_children'].format(cls=cls),
|
||||
f'Has all permissions to {cls._meta.verbose_name_plural} within an organization',
|
||||
org_ct,
|
||||
org_child_perms,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
if 'special' in to_create:
|
||||
special_perms = []
|
||||
for perm in object_perms:
|
||||
# Organization auditor is handled separately
|
||||
if perm.codename.split('_')[0] not in ('add', 'change', 'delete', 'view', 'audit'):
|
||||
special_perms.append(perm)
|
||||
for perm in special_perms:
|
||||
action = perm.codename.split('_')[0]
|
||||
view_perm = Permission.objects.get(content_type=ct, codename__startswith='view_')
|
||||
perm_list = [perm, view_perm]
|
||||
# Handle special-case where adhoc role also listed use permission
|
||||
if action == 'adhoc':
|
||||
for other_perm in object_perms:
|
||||
if other_perm.codename == 'use_inventory':
|
||||
perm_list.append(other_perm)
|
||||
break
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['special'].format(cls=cls, action=action.title()),
|
||||
f'Has {action} permissions to a single {cls._meta.verbose_name}',
|
||||
ct,
|
||||
perm_list,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
if 'org_admin' in to_create:
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['org_admin'].format(cls=Organization),
|
||||
'Has all permissions to a single organization and all objects inside of it',
|
||||
org_ct,
|
||||
org_perms,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
# Special "organization action" roles
|
||||
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
|
||||
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Audit',
|
||||
'Has permission to view all objects inside of a single organization',
|
||||
org_ct,
|
||||
audit_permissions,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_execute_permissions = {'view_jobtemplate', 'execute_jobtemplate', 'view_workflowjobtemplate', 'execute_workflowjobtemplate', 'view_organization'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Execute',
|
||||
'Has permission to execute all runnable objects in the organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_execute_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_approval_permissions = {'view_organization', 'view_workflowjobtemplate', 'approve_workflowjobtemplate'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Approval',
|
||||
'Has permission to approve any workflow steps within a single organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_approval_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
unexpected_role_definitions = RoleDefinition.objects.filter(managed=True).exclude(pk__in=[rd.pk for rd in managed_role_definitions])
|
||||
for role_definition in unexpected_role_definitions:
|
||||
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
|
||||
role_definition.delete()
|
||||
@@ -1,12 +1,19 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import json
|
||||
|
||||
# Django
|
||||
from django.conf import settings # noqa
|
||||
from django.db import connection
|
||||
from django.db.models.signals import pre_delete # noqa
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||
from ansible_base.rbac import permission_registry
|
||||
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
from ansible_base.lib.utils.models import user_summary_fields
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
|
||||
@@ -99,6 +106,8 @@ from awx.main.access import get_user_queryset, check_user_access, check_user_acc
|
||||
User.add_to_class('get_queryset', get_user_queryset)
|
||||
User.add_to_class('can_access', check_user_access)
|
||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
|
||||
User.add_to_class('summary_fields', user_summary_fields)
|
||||
|
||||
|
||||
def convert_jsonfields():
|
||||
@@ -167,17 +176,17 @@ pre_delete.connect(cleanup_created_modified_by, sender=User)
|
||||
|
||||
@property
|
||||
def user_get_organizations(user):
|
||||
return Organization.objects.filter(member_role__members=user)
|
||||
return Organization.access_qs(user, 'member')
|
||||
|
||||
|
||||
@property
|
||||
def user_get_admin_of_organizations(user):
|
||||
return Organization.objects.filter(admin_role__members=user)
|
||||
return Organization.access_qs(user, 'change')
|
||||
|
||||
|
||||
@property
|
||||
def user_get_auditor_of_organizations(user):
|
||||
return Organization.objects.filter(auditor_role__members=user)
|
||||
return Organization.access_qs(user, 'audit')
|
||||
|
||||
|
||||
@property
|
||||
@@ -191,11 +200,21 @@ User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
|
||||
User.add_to_class('created', created)
|
||||
|
||||
|
||||
def get_system_auditor_role():
|
||||
rd, created = RoleDefinition.objects.get_or_create(
|
||||
name='System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
|
||||
)
|
||||
if created:
|
||||
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))
|
||||
return rd
|
||||
|
||||
|
||||
@property
|
||||
def user_is_system_auditor(user):
|
||||
if not hasattr(user, '_is_system_auditor'):
|
||||
if user.pk:
|
||||
user._is_system_auditor = user.roles.filter(singleton_name='system_auditor', role_field='system_auditor').exists()
|
||||
rd = get_system_auditor_role()
|
||||
user._is_system_auditor = RoleUserAssignment.objects.filter(user=user, role_definition=rd).exists()
|
||||
else:
|
||||
# Odd case where user is unsaved, this should never be relied on
|
||||
return False
|
||||
@@ -209,17 +228,17 @@ def user_is_system_auditor(user, tf):
|
||||
# time they've logged in, and we've just created the new User in this
|
||||
# request), we need one to set up the system auditor role
|
||||
user.save()
|
||||
if tf:
|
||||
role = Role.singleton('system_auditor')
|
||||
# must check if member to not duplicate activity stream
|
||||
if user not in role.members.all():
|
||||
role.members.add(user)
|
||||
user._is_system_auditor = True
|
||||
else:
|
||||
role = Role.singleton('system_auditor')
|
||||
if user in role.members.all():
|
||||
role.members.remove(user)
|
||||
user._is_system_auditor = False
|
||||
rd = get_system_auditor_role()
|
||||
assignment = RoleUserAssignment.objects.filter(user=user, role_definition=rd).first()
|
||||
prior_value = bool(assignment)
|
||||
if prior_value != bool(tf):
|
||||
if assignment:
|
||||
assignment.delete()
|
||||
else:
|
||||
rd.give_global_permission(user)
|
||||
user._is_system_auditor = bool(tf)
|
||||
entry = ActivityStream.objects.create(changes=json.dumps({"is_system_auditor": [prior_value, bool(tf)]}), object1='user', operation='update')
|
||||
entry.user.add(user)
|
||||
|
||||
|
||||
User.add_to_class('is_system_auditor', user_is_system_auditor)
|
||||
@@ -287,6 +306,10 @@ activity_stream_registrar.connect(WorkflowApprovalTemplate)
|
||||
activity_stream_registrar.connect(OAuth2Application)
|
||||
activity_stream_registrar.connect(OAuth2AccessToken)
|
||||
|
||||
# Register models
|
||||
permission_registry.register(Project, Team, WorkflowJobTemplate, JobTemplate, Inventory, Organization, Credential, NotificationTemplate, ExecutionEnvironment)
|
||||
permission_registry.register(InstanceGroup, parent_field_name=None) # Not part of an organization
|
||||
|
||||
# prevent API filtering on certain Django-supplied sensitive fields
|
||||
prevent_search(User._meta.get_field('password'))
|
||||
prevent_search(OAuth2AccessToken._meta.get_field('token'))
|
||||
|
||||
@@ -7,6 +7,9 @@ from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.models import get_type_for_model
|
||||
|
||||
# Django-CRUM
|
||||
from crum import get_current_user
|
||||
|
||||
@@ -139,6 +142,23 @@ class BaseModel(models.Model):
|
||||
self.save(update_fields=update_fields)
|
||||
return update_fields
|
||||
|
||||
def summary_fields(self):
|
||||
"""
|
||||
This exists for use by django-ansible-base,
|
||||
which has standard patterns that differ from AWX, but we enable views from DAB
|
||||
for those views to list summary_fields for AWX models, those models need to provide this
|
||||
"""
|
||||
from awx.api.serializers import SUMMARIZABLE_FK_FIELDS
|
||||
|
||||
model_name = get_type_for_model(self)
|
||||
related_fields = SUMMARIZABLE_FK_FIELDS.get(model_name, {})
|
||||
summary_data = {}
|
||||
for field_name in related_fields:
|
||||
fval = getattr(self, field_name, None)
|
||||
if fval is not None:
|
||||
summary_data[field_name] = fval
|
||||
return summary_data
|
||||
|
||||
|
||||
class CreatedModifiedModel(BaseModel):
|
||||
"""
|
||||
|
||||
@@ -21,6 +21,10 @@ from django.conf import settings
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# DRF
|
||||
from rest_framework.serializers import ValidationError as DRFValidationError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -41,6 +45,7 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.models import Team, Organization
|
||||
from awx.main.utils import encrypt_field
|
||||
from . import injectors as builtin_injectors
|
||||
|
||||
@@ -83,6 +88,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
app_label = 'main'
|
||||
ordering = ('name',)
|
||||
unique_together = ('organization', 'name', 'credential_type')
|
||||
permissions = [('use_credential', 'Can use credential in a job or related resource')]
|
||||
|
||||
PASSWORD_FIELDS = ['inputs']
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
|
||||
@@ -314,6 +320,16 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
else:
|
||||
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
||||
|
||||
def validate_role_assignment(self, actor, role_definition):
|
||||
if self.organization:
|
||||
if isinstance(actor, User):
|
||||
if actor.is_superuser or Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists():
|
||||
return
|
||||
if isinstance(actor, Team):
|
||||
if actor.organization == self.organization:
|
||||
return
|
||||
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
|
||||
|
||||
|
||||
class CredentialType(CommonModelNameNotUnique):
|
||||
"""
|
||||
@@ -1231,6 +1247,14 @@ ManagedCredentialType(
|
||||
'multiline': True,
|
||||
'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'),
|
||||
},
|
||||
{
|
||||
'id': 'gce_credentials',
|
||||
'label': gettext_noop('Google Cloud Platform account credentials'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
'help_text': gettext_noop('Google Cloud Platform account credentials in JSON format.'),
|
||||
},
|
||||
],
|
||||
'required': ['configuration'],
|
||||
},
|
||||
|
||||
@@ -130,3 +130,10 @@ def terraform(cred, env, private_data_dir):
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
f.write(cred.get_input('configuration'))
|
||||
env['TF_BACKEND_CONFIG_FILE'] = to_container_path(path, private_data_dir)
|
||||
# Handle env variables for GCP account credentials
|
||||
if 'gce_credentials' in cred.inputs:
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
with os.fdopen(handle, 'w') as f:
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
f.write(cred.get_input('gce_credentials'))
|
||||
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(path, private_data_dir)
|
||||
|
||||
@@ -4,11 +4,12 @@ import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models, DatabaseError
|
||||
from django.db import models, DatabaseError, transaction
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
||||
def _update_host_metrics(updated_hosts_list):
|
||||
from awx.main.models import HostMetric # circular import
|
||||
|
||||
# bulk-create
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
||||
)
|
||||
# bulk-update
|
||||
batch_start, batch_size = 0, 1000
|
||||
while batch_start <= len(updated_hosts_list):
|
||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
batch_start += batch_size
|
||||
|
||||
# FUTURE:
|
||||
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||
|
||||
args = [iter(updated_hosts_list)] * 500
|
||||
for hosts in itertools.zip_longest(*args):
|
||||
with transaction.atomic():
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||
)
|
||||
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
|
||||
@property
|
||||
def job_verbosity(self):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import CommonModel
|
||||
from awx.main.validators import validate_container_image_name
|
||||
@@ -12,6 +14,8 @@ __all__ = ['ExecutionEnvironment']
|
||||
class ExecutionEnvironment(CommonModel):
|
||||
class Meta:
|
||||
ordering = ('-created',)
|
||||
# Remove view permission, as a temporary solution, defer to organization read permission
|
||||
default_permissions = ('add', 'change', 'delete')
|
||||
|
||||
PULL_CHOICES = [
|
||||
('always', _("Always pull container before running.")),
|
||||
@@ -53,3 +57,16 @@ class ExecutionEnvironment(CommonModel):
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def validate_role_assignment(self, actor, role_definition):
|
||||
if self.managed:
|
||||
raise ValidationError({'object_id': _('Can not assign object roles to managed Execution Environments')})
|
||||
if self.organization_id is None:
|
||||
raise ValidationError({'object_id': _('Can not assign object roles to global Execution Environments')})
|
||||
|
||||
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
|
||||
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})
|
||||
if actor._meta.model_name == 'team':
|
||||
organization_cls = self._meta.get_field('organization').related_model
|
||||
if self.organization not in organization_cls.access_qs(actor, 'view'):
|
||||
raise ValidationError({'team': _('Team must have view permission to Execution Environment organization')})
|
||||
|
||||
@@ -485,6 +485,9 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin, ResourceMi
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
permissions = [('use_instancegroup', 'Can use instance group in a preference list of a resource')]
|
||||
# Since this has no direct organization field only superuser can add, so remove add permission
|
||||
default_permissions = ('change', 'delete', 'view')
|
||||
|
||||
def set_default_policy_fields(self):
|
||||
self.policy_instance_list = []
|
||||
|
||||
@@ -11,6 +11,8 @@ import os.path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import yaml
|
||||
import tempfile
|
||||
import stat
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -89,6 +91,11 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
verbose_name_plural = _('inventories')
|
||||
unique_together = [('name', 'organization')]
|
||||
ordering = ('name',)
|
||||
permissions = [
|
||||
('use_inventory', 'Can use inventory in a job template'),
|
||||
('adhoc_inventory', 'Can run ad hoc commands'),
|
||||
('update_inventory', 'Can update inventory sources in inventory'),
|
||||
]
|
||||
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
@@ -925,6 +932,8 @@ class InventorySourceOptions(BaseModel):
|
||||
('rhv', _('Red Hat Virtualization')),
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1034,7 +1043,7 @@ class InventorySourceOptions(BaseModel):
|
||||
def cloud_credential_validation(source, cred):
|
||||
if not source:
|
||||
return None
|
||||
if cred and source not in ('custom', 'scm'):
|
||||
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||
# If a credential was provided, it's important that it matches
|
||||
# the actual inventory source being used (Amazon requires Amazon
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
@@ -1043,12 +1052,14 @@ class InventorySourceOptions(BaseModel):
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
||||
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||
return _('Credential is required for a cloud source.')
|
||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||
return None
|
||||
|
||||
def get_cloud_credential(self):
|
||||
@@ -1399,7 +1410,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
||||
return selected_groups
|
||||
|
||||
|
||||
class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
|
||||
class CustomInventoryScript(CommonModelNameNotUnique):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('name',)
|
||||
@@ -1630,6 +1641,42 @@ class satellite6(PluginFileInjector):
|
||||
return ret
|
||||
|
||||
|
||||
class terraform(PluginFileInjector):
|
||||
plugin_name = 'terraform_state'
|
||||
namespace = 'cloud'
|
||||
collection = 'terraform'
|
||||
use_fqcn = True
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
config_cred = credential.get_input('configuration')
|
||||
if config_cred:
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
with os.fdopen(handle, 'w') as f:
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
f.write(config_cred)
|
||||
ret['backend_config_files'] = to_container_path(path, private_data_dir)
|
||||
return ret
|
||||
|
||||
def build_plugin_private_data(self, inventory_update, private_data_dir):
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
|
||||
private_data = {'credentials': {}}
|
||||
gce_cred = credential.get_input('gce_credentials', default=None)
|
||||
if gce_cred:
|
||||
private_data['credentials'][credential] = gce_cred
|
||||
return private_data
|
||||
|
||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
cred_data = private_data_files['credentials']
|
||||
if credential in cred_data:
|
||||
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
|
||||
return env
|
||||
|
||||
|
||||
class controller(PluginFileInjector):
|
||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||
base_injector = 'template'
|
||||
@@ -1649,6 +1696,16 @@ class insights(PluginFileInjector):
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class openshift_virtualization(PluginFileInjector):
|
||||
plugin_name = 'kubevirt'
|
||||
base_injector = 'template'
|
||||
namespace = 'kubevirt'
|
||||
collection = 'core'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'openshift_virtualization'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class constructed(PluginFileInjector):
|
||||
plugin_name = 'constructed'
|
||||
namespace = 'ansible'
|
||||
|
||||
@@ -205,6 +205,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('name',)
|
||||
permissions = [('execute_jobtemplate', 'Can run this job template')]
|
||||
# Remove add permission, ability to add comes from use permission for inventory, project, credentials
|
||||
default_permissions = ('change', 'delete', 'view')
|
||||
|
||||
job_type = models.CharField(
|
||||
max_length=64,
|
||||
|
||||
@@ -19,13 +19,14 @@ from django.utils.translation import gettext_lazy as _
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.main.models.rbac import Role, RoleAncestorEntry
|
||||
|
||||
from awx.main.models.rbac import Role, RoleAncestorEntry, to_permissions
|
||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
|
||||
from awx.main.utils.execution_environments import get_default_execution_environment
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
||||
from awx.main.fields import AskForField
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.constants import ACTIVE_STATES, org_role_to_permission
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.mixins')
|
||||
@@ -64,6 +65,18 @@ class ResourceMixin(models.Model):
|
||||
|
||||
@staticmethod
|
||||
def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
if cls._meta.model_name == 'organization' and role_field in org_role_to_permission:
|
||||
# Organization roles can not use the DAB RBAC shortcuts
|
||||
# like Organization.access_qs(user, 'change_jobtemplate') is needed
|
||||
# not just Organization.access_qs(user, 'change') is needed
|
||||
if accessor.is_superuser:
|
||||
return cls.objects.values_list('id')
|
||||
|
||||
codename = org_role_to_permission[role_field]
|
||||
|
||||
return cls.access_ids_qs(accessor, codename, content_types=content_types)
|
||||
return cls.access_ids_qs(accessor, to_permissions[role_field], content_types=content_types)
|
||||
if accessor._meta.model_name == 'user':
|
||||
ancestor_roles = accessor.roles.all()
|
||||
elif type(accessor) == Role:
|
||||
|
||||
@@ -31,6 +31,7 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
|
||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
from awx.main.notifications.awssns_backend import AWSSNSBackend
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.notifications')
|
||||
@@ -40,6 +41,7 @@ __all__ = ['NotificationTemplate', 'Notification']
|
||||
|
||||
class NotificationTemplate(CommonModelNameNotUnique):
|
||||
NOTIFICATION_TYPES = [
|
||||
('awssns', _('AWS SNS'), AWSSNSBackend),
|
||||
('email', _('Email'), CustomEmailBackend),
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
@@ -498,7 +500,7 @@ class JobNotificationMixin(object):
|
||||
# Body should have at least 2 CRLF, some clients will interpret
|
||||
# the email incorrectly with blank body. So we will check that
|
||||
|
||||
if len(body.strip().splitlines()) <= 2:
|
||||
if len(body.strip().splitlines()) < 1:
|
||||
# blank body
|
||||
body = '\r\n'.join(
|
||||
[
|
||||
|
||||
@@ -10,6 +10,8 @@ from django.contrib.sessions.models import Session
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -33,6 +35,12 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('name',)
|
||||
permissions = [
|
||||
('member_organization', 'Basic participation permissions for organization'),
|
||||
('audit_organization', 'Audit everything inside the organization'),
|
||||
]
|
||||
# Remove add permission, only superuser can add
|
||||
default_permissions = ('change', 'delete', 'view')
|
||||
|
||||
instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership')
|
||||
galaxy_credentials = OrderedManyToManyField(
|
||||
@@ -103,6 +111,7 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
||||
approval_role = ImplicitRoleField(
|
||||
parent_role='admin_role',
|
||||
)
|
||||
resource = AnsibleResourceField(primary_key_field="id")
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
|
||||
@@ -134,6 +143,7 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
||||
app_label = 'main'
|
||||
unique_together = [('organization', 'name')]
|
||||
ordering = ('organization__name', 'name')
|
||||
permissions = [('member_team', 'Inherit all roles assigned to this team')]
|
||||
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
@@ -151,6 +161,7 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
||||
read_role = ImplicitRoleField(
|
||||
parent_role=['organization.auditor_role', 'member_role'],
|
||||
)
|
||||
resource = AnsibleResourceField(primary_key_field="id")
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -259,6 +259,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('id',)
|
||||
permissions = [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')]
|
||||
|
||||
default_environment = models.ForeignKey(
|
||||
'ExecutionEnvironment',
|
||||
|
||||
@@ -7,14 +7,30 @@ import threading
|
||||
import contextlib
|
||||
import re
|
||||
|
||||
# django-rest-framework
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
# crum to impersonate users
|
||||
from crum import impersonate
|
||||
|
||||
# Django
|
||||
from django.db import models, transaction, connection
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
|
||||
# Ansible_base app
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
from ansible_base.lib.utils.models import get_type_for_model
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.migrations._dab_rbac import build_role_map, get_permissions_for_role
|
||||
from awx.main.constants import role_name_to_perm_mapping, org_role_to_permission
|
||||
|
||||
__all__ = [
|
||||
'Role',
|
||||
@@ -75,6 +91,11 @@ role_descriptions = {
|
||||
}
|
||||
|
||||
|
||||
to_permissions = {}
|
||||
for k, v in role_name_to_perm_mapping.items():
|
||||
to_permissions[k] = v[0].strip('_')
|
||||
|
||||
|
||||
tls = threading.local() # thread local storage
|
||||
|
||||
|
||||
@@ -86,10 +107,8 @@ def check_singleton(func):
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
sys_admin = Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR)
|
||||
sys_audit = Role.singleton(ROLE_SINGLETON_SYSTEM_AUDITOR)
|
||||
user = args[0]
|
||||
if user in sys_admin or user in sys_audit:
|
||||
if user.is_superuser or user.is_system_auditor:
|
||||
if len(args) == 2:
|
||||
return args[1]
|
||||
return Role.objects.all()
|
||||
@@ -169,6 +188,24 @@ class Role(models.Model):
|
||||
|
||||
def __contains__(self, accessor):
|
||||
if accessor._meta.model_name == 'user':
|
||||
if accessor.is_superuser:
|
||||
return True
|
||||
if self.role_field == 'system_administrator':
|
||||
return accessor.is_superuser
|
||||
elif self.role_field == 'system_auditor':
|
||||
return accessor.is_system_auditor
|
||||
elif self.role_field in ('read_role', 'auditor_role') and accessor.is_system_auditor:
|
||||
return True
|
||||
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
if self.content_object and self.content_object._meta.model_name == 'organization' and self.role_field in org_role_to_permission:
|
||||
codename = org_role_to_permission[self.role_field]
|
||||
|
||||
return accessor.has_obj_perm(self.content_object, codename)
|
||||
|
||||
if self.role_field not in to_permissions:
|
||||
raise Exception(f'{self.role_field} evaluated but not a translatable permission')
|
||||
return accessor.has_obj_perm(self.content_object, to_permissions[self.role_field])
|
||||
return self.ancestors.filter(members=accessor).exists()
|
||||
else:
|
||||
raise RuntimeError(f'Role evaluations only valid for users, received {accessor}')
|
||||
@@ -280,6 +317,9 @@ class Role(models.Model):
|
||||
#
|
||||
#
|
||||
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
return
|
||||
|
||||
if len(additions) == 0 and len(removals) == 0:
|
||||
return
|
||||
|
||||
@@ -412,6 +452,12 @@ class Role(models.Model):
|
||||
in their organization, but some of those roles descend from
|
||||
organization admin_role, but not auditor_role.
|
||||
"""
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
from ansible_base.rbac.models import RoleEvaluation
|
||||
|
||||
q = RoleEvaluation.objects.filter(role__in=user.has_roles.all()).values_list('object_id', 'content_type_id').query
|
||||
return roles_qs.extra(where=[f'(object_id,content_type_id) in ({q})'])
|
||||
|
||||
return roles_qs.filter(
|
||||
id__in=RoleAncestorEntry.objects.filter(
|
||||
descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list(
|
||||
@@ -434,6 +480,13 @@ class Role(models.Model):
|
||||
return self.singleton_name in [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]
|
||||
|
||||
|
||||
class AncestorManager(models.Manager):
|
||||
def get_queryset(self):
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
raise RuntimeError('The old RBAC system has been disabled, this should never be called')
|
||||
return super(AncestorManager, self).get_queryset()
|
||||
|
||||
|
||||
class RoleAncestorEntry(models.Model):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -451,6 +504,8 @@ class RoleAncestorEntry(models.Model):
|
||||
content_type_id = models.PositiveIntegerField(null=False)
|
||||
object_id = models.PositiveIntegerField(null=False)
|
||||
|
||||
objects = AncestorManager()
|
||||
|
||||
|
||||
def role_summary_fields_generator(content_object, role_field):
|
||||
global role_descriptions
|
||||
@@ -479,3 +534,185 @@ def role_summary_fields_generator(content_object, role_field):
|
||||
summary['name'] = role_names[role_field]
|
||||
summary['id'] = getattr(content_object, '{}_id'.format(role_field))
|
||||
return summary
|
||||
|
||||
|
||||
# ----------------- Custom Role Compatibility -------------------------
|
||||
# The following are methods to connect this (old) RBAC system to the new
|
||||
# system which allows custom roles
|
||||
# this follows the ORM interface layer documented in docs/rbac.md
|
||||
def get_role_codenames(role):
|
||||
obj = role.content_object
|
||||
if obj is None:
|
||||
return
|
||||
f = obj._meta.get_field(role.role_field)
|
||||
parents, children = build_role_map(apps)
|
||||
return [perm.codename for perm in get_permissions_for_role(f, children, apps)]
|
||||
|
||||
|
||||
def get_role_definition(role):
|
||||
"""Given a old-style role, this gives a role definition in the new RBAC system for it"""
|
||||
obj = role.content_object
|
||||
if obj is None:
|
||||
return
|
||||
f = obj._meta.get_field(role.role_field)
|
||||
action_name = f.name.rsplit("_", 1)[0]
|
||||
model_print = type(obj).__name__
|
||||
rd_name = f'{model_print} {action_name.title()} Compat'
|
||||
perm_list = get_role_codenames(role)
|
||||
defaults = {
|
||||
'content_type_id': role.content_type_id,
|
||||
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
|
||||
}
|
||||
with impersonate(None):
|
||||
try:
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
except ValidationError:
|
||||
# This is a tricky case - practically speaking, users should not be allowed to create team roles
|
||||
# or roles that include the team member permission.
|
||||
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
|
||||
defaults['managed'] = True
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
return rd
|
||||
|
||||
|
||||
def get_role_from_object_role(object_role):
|
||||
"""
|
||||
Given an object role from the new system, return the corresponding role from the old system
|
||||
reverses naming from get_role_definition, and the ANSIBLE_BASE_ROLE_PRECREATE setting.
|
||||
"""
|
||||
rd = object_role.role_definition
|
||||
if rd.name.endswith(' Compat'):
|
||||
model_name, role_name, _ = rd.name.split()
|
||||
role_name = role_name.lower()
|
||||
role_name += '_role'
|
||||
elif rd.name.endswith(' Admin') and rd.name.count(' ') == 2:
|
||||
# cases like "Organization Project Admin"
|
||||
model_name, target_model_name, role_name = rd.name.split()
|
||||
role_name = role_name.lower()
|
||||
model_cls = apps.get_model('main', target_model_name)
|
||||
target_model_name = get_type_for_model(model_cls)
|
||||
|
||||
# exception cases completely specific to one model naming convention
|
||||
if target_model_name == 'notification_template':
|
||||
target_model_name = 'notification'
|
||||
elif target_model_name == 'workflow_job_template':
|
||||
target_model_name = 'workflow'
|
||||
|
||||
role_name = f'{target_model_name}_admin_role'
|
||||
elif rd.name.endswith(' Admin'):
|
||||
# cases like "project-admin"
|
||||
role_name = 'admin_role'
|
||||
elif rd.name == 'Organization Audit':
|
||||
role_name = 'auditor_role'
|
||||
else:
|
||||
model_name, role_name = rd.name.split()
|
||||
role_name = role_name.lower()
|
||||
role_name += '_role'
|
||||
return getattr(object_role.content_object, role_name)
|
||||
|
||||
|
||||
def give_or_remove_permission(role, actor, giving=True):
|
||||
obj = role.content_object
|
||||
if obj is None:
|
||||
return
|
||||
rd = get_role_definition(role)
|
||||
rd.give_or_remove_permission(actor, obj, giving=giving)
|
||||
|
||||
|
||||
class SyncEnabled(threading.local):
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
|
||||
|
||||
rbac_sync_enabled = SyncEnabled()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def disable_rbac_sync():
|
||||
try:
|
||||
previous_value = rbac_sync_enabled.enabled
|
||||
rbac_sync_enabled.enabled = False
|
||||
yield
|
||||
finally:
|
||||
rbac_sync_enabled.enabled = previous_value
|
||||
|
||||
|
||||
def give_creator_permissions(user, obj):
|
||||
assignment = RoleDefinition.objects.give_creator_permissions(user, obj)
|
||||
if assignment:
|
||||
with disable_rbac_sync():
|
||||
old_role = get_role_from_object_role(assignment.object_role)
|
||||
old_role.members.add(user)
|
||||
|
||||
|
||||
def sync_members_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
||||
if action.startswith('pre_'):
|
||||
return
|
||||
if not rbac_sync_enabled.enabled:
|
||||
return
|
||||
|
||||
if action == 'post_add':
|
||||
is_giving = True
|
||||
elif action == 'post_remove':
|
||||
is_giving = False
|
||||
elif action == 'post_clear':
|
||||
raise RuntimeError('Clearing of role members not supported')
|
||||
|
||||
if reverse:
|
||||
user = instance
|
||||
else:
|
||||
role = instance
|
||||
|
||||
for user_or_role_id in pk_set:
|
||||
if reverse:
|
||||
role = Role.objects.get(pk=user_or_role_id)
|
||||
else:
|
||||
user = get_user_model().objects.get(pk=user_or_role_id)
|
||||
give_or_remove_permission(role, user, giving=is_giving)
|
||||
|
||||
|
||||
def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
||||
if action.startswith('pre_'):
|
||||
return
|
||||
|
||||
if action == 'post_add':
|
||||
is_giving = True
|
||||
elif action == 'post_remove':
|
||||
is_giving = False
|
||||
elif action == 'post_clear':
|
||||
raise RuntimeError('Clearing of role members not supported')
|
||||
|
||||
if reverse:
|
||||
parent_role = instance
|
||||
else:
|
||||
child_role = instance
|
||||
|
||||
for role_id in pk_set:
|
||||
if reverse:
|
||||
try:
|
||||
child_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
parent_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
|
||||
# To a fault, we want to avoid running this if triggered from implicit_parents management
|
||||
# we only want to do anything if we know for sure this is a non-implicit team role
|
||||
if parent_role.role_field == 'member_role' and parent_role.content_type.model == 'team':
|
||||
# Team internal parents are member_role->read_role and admin_role->member_role
|
||||
# for the same object, this parenting will also be implicit_parents management
|
||||
# do nothing for internal parents, but OTHER teams may still be assigned permissions to a team
|
||||
if (child_role.content_type_id == parent_role.content_type_id) and (child_role.object_id == parent_role.object_id):
|
||||
return
|
||||
|
||||
from awx.main.models.organization import Team
|
||||
|
||||
team = Team.objects.get(pk=parent_role.object_id)
|
||||
give_or_remove_permission(child_role, team, giving=is_giving)
|
||||
|
||||
|
||||
m2m_changed.connect(sync_members_to_new_rbac, Role.members.through)
|
||||
m2m_changed.connect(sync_parents_to_new_rbac, Role.parents.through)
|
||||
|
||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models, connection
|
||||
from django.db import models, connection, transaction
|
||||
from django.core.exceptions import NON_FIELD_ERRORS
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
@@ -31,13 +31,15 @@ from rest_framework.exceptions import ParseError
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
||||
from awx.main.dispatch import get_task_queuename
|
||||
from awx.main.dispatch.control import Control as ControlDispatcher
|
||||
from awx.main.registrar import activity_stream_registrar
|
||||
from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
||||
from awx.main.models.mixins import TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
||||
from awx.main.models.rbac import to_permissions
|
||||
from awx.main.utils.common import (
|
||||
camelcase_to_underscore,
|
||||
get_model_for_type,
|
||||
@@ -196,9 +198,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
|
||||
@classmethod
|
||||
def _submodels_with_roles(cls):
|
||||
ujt_classes = [c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
|
||||
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
|
||||
return [ct.id for ct in ct_dict.values()]
|
||||
return [c for c in cls.__subclasses__() if permission_registry.is_registered(c)]
|
||||
|
||||
@classmethod
|
||||
def accessible_pk_qs(cls, accessor, role_field):
|
||||
@@ -210,7 +210,23 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
# do not use this if in a subclass
|
||||
if cls != UnifiedJobTemplate:
|
||||
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
|
||||
return ResourceMixin._accessible_pk_qs(cls, accessor, role_field, content_types=cls._submodels_with_roles())
|
||||
from ansible_base.rbac.models import RoleEvaluation
|
||||
|
||||
action = to_permissions[role_field]
|
||||
|
||||
# Special condition for super auditor
|
||||
role_subclasses = cls._submodels_with_roles()
|
||||
role_cts = ContentType.objects.get_for_models(*role_subclasses).values()
|
||||
all_codenames = {f'{action}_{cls._meta.model_name}' for cls in role_subclasses}
|
||||
if not (all_codenames - accessor.singleton_permissions()):
|
||||
qs = cls.objects.filter(polymorphic_ctype__in=role_cts)
|
||||
return qs.values_list('id', flat=True)
|
||||
|
||||
return (
|
||||
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__in=all_codenames, content_type_id__in=[ct.id for ct in role_cts])
|
||||
.values_list('object_id')
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def _perform_unique_checks(self, unique_checks):
|
||||
# Handle the list of unique fields returned above. Replace with an
|
||||
@@ -264,7 +280,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
if new_next_schedule:
|
||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||
return # no-op, common for infrequent schedules
|
||||
self.next_schedule = new_next_schedule
|
||||
|
||||
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||
if transaction.get_autocommit():
|
||||
self.next_schedule = related_schedules.first()
|
||||
else:
|
||||
self.next_schedule = related_schedules.select_for_update().first()
|
||||
|
||||
self.next_job_run = new_next_schedule.next_run
|
||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||
|
||||
@@ -814,7 +837,7 @@ class UnifiedJob(
|
||||
update_fields.append(key)
|
||||
|
||||
if parent_instance:
|
||||
if self.status in ('pending', 'waiting', 'running'):
|
||||
if self.status in ('pending', 'running'):
|
||||
if parent_instance.current_job != self:
|
||||
parent_instance_set('current_job', self)
|
||||
# Update parent with all the 'good' states of it's child
|
||||
@@ -851,7 +874,7 @@ class UnifiedJob(
|
||||
# If this job already exists in the database, retrieve a copy of
|
||||
# the job in its prior state.
|
||||
# If update_fields are given without status, then that indicates no change
|
||||
if self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
self_before = self.__class__.objects.get(pk=self.pk)
|
||||
if self_before.status != self.status:
|
||||
status_before = self_before.status
|
||||
@@ -893,7 +916,8 @@ class UnifiedJob(
|
||||
update_fields.append('elapsed')
|
||||
|
||||
# Ensure that the job template information is current.
|
||||
if self.unified_job_template != self._get_parent_instance():
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
|
||||
self.unified_job_template = self._get_parent_instance()
|
||||
if 'unified_job_template' not in update_fields:
|
||||
update_fields.append('unified_job_template')
|
||||
@@ -906,8 +930,9 @@ class UnifiedJob(
|
||||
# Okay; we're done. Perform the actual save.
|
||||
result = super(UnifiedJob, self).save(*args, **kwargs)
|
||||
|
||||
# If status changed, update the parent instance.
|
||||
if self.status != status_before:
|
||||
# If status changed, update the parent instance
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != status_before and self.status != 'waiting':
|
||||
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
||||
# This dodges lock contention at the expense of the foreign key not being
|
||||
# completely correct.
|
||||
@@ -1599,7 +1624,8 @@ class UnifiedJob(
|
||||
extra["controller_node"] = self.controller_node or "NOT_SET"
|
||||
elif state == "execution_node_chosen":
|
||||
extra["execution_node"] = self.execution_node or "NOT_SET"
|
||||
logger_job_lifecycle.info(msg, extra=extra)
|
||||
|
||||
logger_job_lifecycle.info(f"{msg} {json.dumps(extra)}")
|
||||
|
||||
@property
|
||||
def launched_by(self):
|
||||
|
||||
@@ -467,6 +467,10 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
permissions = [
|
||||
('execute_workflowjobtemplate', 'Can run this workflow job template'),
|
||||
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
|
||||
]
|
||||
|
||||
notification_templates_approvals = models.ManyToManyField(
|
||||
"NotificationTemplate",
|
||||
|
||||
70
awx/main/notifications/awssns_backend.py
Normal file
70
awx/main/notifications/awssns_backend.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import json
|
||||
import logging
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.awssns_backend')
|
||||
WEBSOCKET_TIMEOUT = 30
|
||||
|
||||
|
||||
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
init_parameters = {
|
||||
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
|
||||
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
|
||||
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
|
||||
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
|
||||
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
|
||||
}
|
||||
recipient_parameter = "sns_topic_arn"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
|
||||
session = boto3.session.Session()
|
||||
client_config = {"service_name": 'sns'}
|
||||
if aws_region:
|
||||
client_config["region_name"] = aws_region
|
||||
if aws_secret_access_key:
|
||||
client_config["aws_secret_access_key"] = aws_secret_access_key
|
||||
if aws_access_key_id:
|
||||
client_config["aws_access_key_id"] = aws_access_key_id
|
||||
if aws_session_token:
|
||||
client_config["aws_session_token"] = aws_session_token
|
||||
self.client = session.client(**client_config)
|
||||
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
def _sns_publish(self, topic_arn, message):
|
||||
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
|
||||
|
||||
def format_body(self, body):
|
||||
if isinstance(body, str):
|
||||
try:
|
||||
body = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
# convert dict body to json string
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
for message in messages:
|
||||
sns_topic_arn = str(message.recipients()[0])
|
||||
try:
|
||||
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
|
||||
sent_messages += 1
|
||||
except ClientError as error:
|
||||
if not self.fail_silently:
|
||||
raise error
|
||||
|
||||
return sent_messages
|
||||
@@ -1,5 +1,6 @@
|
||||
# Copyright (c) 2019 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
# -*-coding:utf-8-*-
|
||||
|
||||
|
||||
class CustomNotificationBase(object):
|
||||
@@ -31,3 +32,15 @@ class CustomNotificationBase(object):
|
||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
||||
},
|
||||
}
|
||||
|
||||
job_metadata_messages = {
|
||||
"started": {"body": "{{ job_metadata }}"},
|
||||
"success": {"body": "{{ job_metadata }}"},
|
||||
"error": {"body": "{{ job_metadata }}"},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,17 +27,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {
|
||||
"started": {"body": DEFAULT_BODY},
|
||||
"success": {"body": DEFAULT_BODY},
|
||||
"error": {"body": DEFAULT_BODY},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||
self.http_method = http_method
|
||||
|
||||
@@ -12,6 +12,7 @@ from . import consumers
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.routing')
|
||||
_application = None
|
||||
|
||||
|
||||
class AWXProtocolTypeRouter(ProtocolTypeRouter):
|
||||
@@ -62,15 +63,60 @@ websocket_urlpatterns = [
|
||||
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
|
||||
|
||||
websocket_relay_urlpatterns = [
|
||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
application = AWXProtocolTypeRouter(
|
||||
{
|
||||
'websocket': MultipleURLRouterAdapter(
|
||||
URLRouter(websocket_relay_urlpatterns),
|
||||
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
def application_func(cls=AWXProtocolTypeRouter) -> ProtocolTypeRouter:
|
||||
return cls(
|
||||
{
|
||||
'websocket': MultipleURLRouterAdapter(
|
||||
URLRouter(websocket_relay_urlpatterns),
|
||||
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def __getattr__(name: str) -> ProtocolTypeRouter:
|
||||
"""
|
||||
Defer instantiating application.
|
||||
For testing, we just need it to NOT run on import.
|
||||
|
||||
https://peps.python.org/pep-0562/#specification
|
||||
|
||||
Normally, someone would get application from this module via:
|
||||
from awx.main.routing import application
|
||||
|
||||
and do something with the application:
|
||||
application.do_something()
|
||||
|
||||
What does the callstack look like when the import runs?
|
||||
...
|
||||
awx.main.routing.__getattribute__(...) # <-- we don't define this so NOOP as far as we are concerned
|
||||
if '__getattr__' in awx.main.routing.__dict__: # <-- this triggers the function we are in
|
||||
return awx.main.routing.__dict__.__getattr__("application")
|
||||
|
||||
Why isn't this function simply implemented as:
|
||||
def __getattr__(name):
|
||||
if not _application:
|
||||
_application = application_func()
|
||||
return _application
|
||||
|
||||
It could. I manually tested it and it passes test_routing.py.
|
||||
|
||||
But my understanding after reading the PEP-0562 specification link above is that
|
||||
performance would be a bit worse due to the extra __getattribute__ calls when
|
||||
we reference non-global variables.
|
||||
"""
|
||||
if name == "application":
|
||||
globs = globals()
|
||||
if not globs['_application']:
|
||||
globs['_application'] = application_func()
|
||||
return globs['_application']
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||
|
||||
@@ -138,7 +138,8 @@ class TaskBase:
|
||||
|
||||
# Lock
|
||||
with task_manager_bulk_reschedule():
|
||||
with advisory_lock(f"{self.prefix}_lock", wait=False) as acquired:
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000 # convert to milliseconds
|
||||
with advisory_lock(f"{self.prefix}_lock", lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
with transaction.atomic():
|
||||
if acquired is False:
|
||||
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")
|
||||
|
||||
@@ -126,6 +126,8 @@ def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwarg
|
||||
|
||||
def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||
'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role'
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
return
|
||||
update_fields = kwargs.get('update_fields', None)
|
||||
if update_fields and 'is_superuser' not in update_fields:
|
||||
return
|
||||
@@ -137,6 +139,8 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||
|
||||
def sync_rbac_to_superuser_status(instance, sender, **kwargs):
|
||||
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
return
|
||||
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
|
||||
new_status_value = bool(kwargs['action'] == 'post_add')
|
||||
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()
|
||||
|
||||
@@ -49,6 +49,70 @@ class ReceptorConnectionType(Enum):
|
||||
STREAMTLS = 2
|
||||
|
||||
|
||||
"""
|
||||
Translate receptorctl messages that come in over stdout into
|
||||
structured messages. Currently, these are error messages.
|
||||
"""
|
||||
|
||||
|
||||
class ReceptorErrorBase:
|
||||
_MESSAGE = 'Receptor Error'
|
||||
|
||||
def __init__(self, node: str = 'N/A', state_name: str = 'N/A'):
|
||||
self.node = node
|
||||
self.state_name = state_name
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__} '{self._MESSAGE}' on node '{self.node}' with state '{self.state_name}'"
|
||||
|
||||
|
||||
class WorkUnitError(ReceptorErrorBase):
|
||||
_MESSAGE = 'unknown work unit '
|
||||
|
||||
def __init__(self, work_unit_id: str, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.work_unit_id = work_unit_id
|
||||
|
||||
def __str__(self):
|
||||
return f"{super().__str__()} work unit id '{self.work_unit_id}'"
|
||||
|
||||
|
||||
class WorkUnitCancelError(WorkUnitError):
|
||||
_MESSAGE = 'error cancelling remote unit: unknown work unit '
|
||||
|
||||
|
||||
class WorkUnitResultsError(WorkUnitError):
|
||||
_MESSAGE = 'Failed to get results: unknown work unit '
|
||||
|
||||
|
||||
class UnknownError(ReceptorErrorBase):
|
||||
_MESSAGE = 'Unknown receptor ctl error'
|
||||
|
||||
def __init__(self, msg, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._MESSAGE = msg
|
||||
|
||||
|
||||
class FuzzyError:
|
||||
def __new__(self, e: RuntimeError, node: str, state_name: str):
|
||||
"""
|
||||
At the time of writing this comment all of the sub-classes detection
|
||||
is centralized in this parent class. It's like a Router().
|
||||
Someone may find it better to push down the error detection logic into
|
||||
each sub-class.
|
||||
"""
|
||||
msg = e.args[0]
|
||||
|
||||
common_startswith = (WorkUnitCancelError, WorkUnitResultsError, WorkUnitError)
|
||||
|
||||
for klass in common_startswith:
|
||||
if msg.startswith(klass._MESSAGE):
|
||||
work_unit_id = msg[len(klass._MESSAGE) :]
|
||||
return klass(work_unit_id, node=node, state_name=state_name)
|
||||
|
||||
return UnknownError(msg, node=node, state_name=state_name)
|
||||
|
||||
|
||||
def read_receptor_config():
|
||||
# for K8S deployments, getting a lock is necessary as another process
|
||||
# may be re-writing the config at this time
|
||||
@@ -185,6 +249,7 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
timing_data['transmit_timing'] = run_start - transmit_start
|
||||
run_timing = 0.0
|
||||
stdout = ''
|
||||
state_name = 'local var never set'
|
||||
|
||||
try:
|
||||
resultfile = receptor_ctl.get_work_results(unit_id)
|
||||
@@ -205,13 +270,33 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
stdout = resultfile.read()
|
||||
stdout = str(stdout, encoding='utf-8')
|
||||
|
||||
except RuntimeError as e:
|
||||
receptor_e = FuzzyError(e, node, state_name)
|
||||
if type(receptor_e) in (
|
||||
WorkUnitError,
|
||||
WorkUnitResultsError,
|
||||
):
|
||||
logger.warning(f'While consuming job results: {receptor_e}')
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
if settings.RECEPTOR_RELEASE_WORK:
|
||||
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
||||
if res != {'released': unit_id}:
|
||||
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
||||
try:
|
||||
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
||||
|
||||
receptor_ctl.close()
|
||||
if res != {'released': unit_id}:
|
||||
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
||||
|
||||
receptor_ctl.close()
|
||||
except RuntimeError as e:
|
||||
receptor_e = FuzzyError(e, node, state_name)
|
||||
if type(receptor_e) in (
|
||||
WorkUnitError,
|
||||
WorkUnitCancelError,
|
||||
):
|
||||
logger.warning(f"While releasing work: {receptor_e}")
|
||||
else:
|
||||
logger.error(f"While releasing work: {receptor_e}")
|
||||
|
||||
if state_name.lower() == 'failed':
|
||||
work_detail = status.get('Detail', '')
|
||||
@@ -275,7 +360,7 @@ def _convert_args_to_cli(vargs):
|
||||
args = ['cleanup']
|
||||
for option in ('exclude_strings', 'remove_images'):
|
||||
if vargs.get(option):
|
||||
args.append('--{}={}'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
||||
args.append('--{}="{}"'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
||||
for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'):
|
||||
if vargs.get(option) is True:
|
||||
args.append('--{}'.format(option.replace('_', '-')))
|
||||
|
||||
@@ -6,6 +6,7 @@ import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import psycopg
|
||||
from io import StringIO
|
||||
from contextlib import redirect_stdout
|
||||
import shutil
|
||||
@@ -35,6 +36,9 @@ import ansible_runner.cleanup
|
||||
# dateutil
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.access import access_registry
|
||||
@@ -416,7 +420,7 @@ def handle_removed_image(remove_images=None):
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def cleanup_images_and_files():
|
||||
_cleanup_images_and_files()
|
||||
_cleanup_images_and_files(image_prune=True)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@@ -630,10 +634,18 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
||||
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
|
||||
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
|
||||
if sqlstate == psycopg.errors.NoData:
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
# Run local reaper
|
||||
if worker_tasks is not None:
|
||||
@@ -703,7 +715,8 @@ def awx_k8s_reaper():
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def awx_periodic_scheduler():
|
||||
with advisory_lock('awx_periodic_scheduler_lock', wait=False) as acquired:
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000
|
||||
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic scheduler, another task holds lock")
|
||||
return
|
||||
@@ -788,10 +801,19 @@ def update_inventory_computed_fields(inventory_id):
|
||||
try:
|
||||
i.update_computed_fields()
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
return
|
||||
raise
|
||||
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105
|
||||
# django raises DatabaseError("Forced update did not affect any rows.")
|
||||
|
||||
# if sqlstate is set then there was a database error and otherwise will re-raise that error
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
# otherwise
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
|
||||
|
||||
def update_smart_memberships_for_inventory(smart_inventory):
|
||||
@@ -946,3 +968,27 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
permission_check_func(creater, copy_mapping.values())
|
||||
if isinstance(new_obj, Inventory):
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
return
|
||||
|
||||
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||
return
|
||||
logger.debug("Running periodic resource sync")
|
||||
|
||||
executor = SyncExecutor()
|
||||
executor.run()
|
||||
for key, item_list in executor.results.items():
|
||||
if not item_list or key == 'noop':
|
||||
continue
|
||||
# Log creations and conflicts
|
||||
if len(item_list) > 10 and settings.LOG_AGGREGATOR_LEVEL != 'DEBUG':
|
||||
logger.info(f'Periodic resource sync {key}, first 10 items:\n{item_list[:10]}')
|
||||
else:
|
||||
logger.info(f'Periodic resource sync {key}:\n{item_list}')
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "Hello World!"
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||
"K8S_AUTH_API_KEY": "fooo",
|
||||
"K8S_AUTH_VERIFY_SSL": "False"
|
||||
}
|
||||
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"GOOGLE_BACKEND_CREDENTIALS": "{{ file_reference }}"
|
||||
}
|
||||
@@ -1,13 +1,8 @@
|
||||
from awx.main.tests.functional.conftest import * # noqa
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
# This is called for every test. Only get/set command line arguments
|
||||
# if the argument is specified in the list of test "fixturenames".
|
||||
option_value = metafunc.config.option.release
|
||||
if 'release' in metafunc.fixturenames and option_value is not None:
|
||||
metafunc.parametrize("release", [option_value])
|
||||
@pytest.fixture()
|
||||
def release():
|
||||
return os.environ.get('VERSION_TARGET', '')
|
||||
|
||||
@@ -99,7 +99,7 @@ class TestSwaggerGeneration:
|
||||
# The number of API endpoints changes over time, but let's just check
|
||||
# for a reasonable number here; if this test starts failing, raise/lower the bounds
|
||||
paths = JSON['paths']
|
||||
assert 250 < len(paths) < 375
|
||||
assert 250 < len(paths) < 400
|
||||
assert set(list(paths['/api/'].keys())) == set(['get', 'parameters'])
|
||||
assert set(list(paths['/api/v2/'].keys())) == set(['get', 'parameters'])
|
||||
assert set(list(sorted(paths['/api/v2/credentials/'].keys()))) == set(['get', 'post', 'parameters'])
|
||||
|
||||
@@ -4,7 +4,6 @@ from prometheus_client.parser import text_string_to_metric_families
|
||||
from awx.main import models
|
||||
from awx.main.analytics.metrics import metrics
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.rbac import Role
|
||||
|
||||
EXPECTED_VALUES = {
|
||||
'awx_system_info': 1.0,
|
||||
@@ -66,7 +65,6 @@ def test_metrics_permissions(get, admin, org_admin, alice, bob, organization):
|
||||
organization.auditor_role.members.add(bob)
|
||||
assert get(get_metrics_view_db_only(), user=bob).status_code == 403
|
||||
|
||||
Role.singleton('system_auditor').members.add(bob)
|
||||
bob.is_system_auditor = True
|
||||
assert get(get_metrics_view_db_only(), user=bob).status_code == 200
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.test import Client
|
||||
from rest_framework.test import APIRequestFactory
|
||||
|
||||
from awx.api.generics import LoggedLoginView
|
||||
from awx.api.versioning import drf_reverse
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -9,8 +9,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": team.pk}
|
||||
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
||||
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access)
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def test_idempotent_credential_type_setup():
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh):
|
||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'inputs': {'username': 'someusername'},
|
||||
@@ -81,7 +81,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh):
|
||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'inputs': {'username': 'someusername'},
|
||||
@@ -385,10 +385,9 @@ def test_list_created_org_credentials(post, get, organization, org_admin, org_me
|
||||
@pytest.mark.django_db
|
||||
def test_list_cannot_order_by_encrypted_field(post, get, organization, org_admin, credentialtype_ssh, order_by):
|
||||
for i, password in enumerate(('abc', 'def', 'xyz')):
|
||||
response = post(reverse('api:credential_list'), {'organization': organization.id, 'name': 'C%d' % i, 'password': password}, org_admin)
|
||||
post(reverse('api:credential_list'), {'organization': organization.id, 'name': 'C%d' % i, 'password': password}, org_admin, expect=400)
|
||||
|
||||
response = get(reverse('api:credential_list'), org_admin, QUERY_STRING='order_by=%s' % order_by, status=400)
|
||||
assert response.status_code == 400
|
||||
get(reverse('api:credential_list'), org_admin, QUERY_STRING='order_by=%s' % order_by, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -399,8 +398,7 @@ def test_inputs_cannot_contain_extra_fields(get, post, organization, admin, cred
|
||||
'credential_type': credentialtype_ssh.pk,
|
||||
'inputs': {'invalid_field': 'foo'},
|
||||
}
|
||||
response = post(reverse('api:credential_list'), params, admin)
|
||||
assert response.status_code == 400
|
||||
response = post(reverse('api:credential_list'), params, admin, expect=400)
|
||||
assert "'invalid_field' was unexpected" in response.data['inputs'][0]
|
||||
|
||||
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
def __init__(self):
|
||||
self.environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_proxy_ip_allowed(get, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||
# should just pass through
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
@@ -45,6 +53,51 @@ def test_proxy_ip_allowed(get, patch, admin):
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTrustedProxyAllowListIntegration:
|
||||
@pytest.fixture
|
||||
def url(self, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
|
||||
return url
|
||||
|
||||
@pytest.fixture
|
||||
def middleware(self):
|
||||
return HeaderTrackingMiddleware()
|
||||
|
||||
def test_x_trusted_proxy_valid_signature(self, get, admin, rsa_keypair, url, middleware): # noqa: F811
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public, PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature(self, get, admin, url, patch, middleware):
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with override_settings(PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature_valid_proxy(self, get, admin, url, middleware):
|
||||
# A valid explicit proxy SHOULD result in sensitive headers NOT being deleted, regardless of the trusted proxy signature results
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'REMOTE_ADDR': 'my.proxy.example.org',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteViews:
|
||||
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
||||
|
||||
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestImmutableSharedFields:
|
||||
@pytest.fixture(autouse=True)
|
||||
def configure_settings(self, settings):
|
||||
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
|
||||
|
||||
def test_create_raises_permission_denied(self, admin_user, post):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
resp = post(
|
||||
url=reverse('api:team_list'),
|
||||
data={'name': 'teamA', 'organization': orgA.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Creation of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
resp = delete(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Deletion of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_update(self, admin_user, patch):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
# allow patching non-shared fields
|
||||
patch(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
data={"description": "can change this field"},
|
||||
user=admin_user,
|
||||
expect=200,
|
||||
)
|
||||
orgB = Organization.objects.create(name='orgB')
|
||||
# prevent patching shared fields
|
||||
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
|
||||
assert "Cannot change shared field" in resp.data['organization']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'role',
|
||||
['admin_role', 'member_role'],
|
||||
)
|
||||
@pytest.mark.parametrize('resource', ['organization', 'team'])
|
||||
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
if resource == 'organization':
|
||||
role = getattr(orgA, role)
|
||||
elif resource == 'team':
|
||||
teamA = orgA.teams.create(name='teamA')
|
||||
role = getattr(teamA, role)
|
||||
resp = post(
|
||||
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
|
||||
data={'id': role.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']
|
||||
@@ -32,13 +32,6 @@ def node_type_instance():
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group(job_factory):
|
||||
ig = InstanceGroup(name="east")
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_instance_group(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="container")
|
||||
|
||||
@@ -131,11 +131,11 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
|
||||
# Check that job is serialized correctly
|
||||
job_id = response.data['job']
|
||||
@@ -167,12 +167,12 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -187,11 +187,11 @@ def test_job_accept_empty_tags(job_template_prompts, post, admin_user, mocker):
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -203,14 +203,14 @@ def test_slice_timeout_forks_need_int(job_template_prompts, post, admin_user, mo
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -244,12 +244,12 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -641,18 +641,18 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -670,22 +670,22 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -697,22 +697,22 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t
|
||||
job_template.host_config_key = "foo"
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -725,9 +725,9 @@ def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_u
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -738,6 +738,6 @@ def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_templ
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer
|
||||
@@ -8,10 +9,15 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.test.utils import override_settings
|
||||
|
||||
# DRF
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
# DAB
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
@@ -369,3 +375,113 @@ def test_job_template_missing_inventory(project, inventory, admin_user, post):
|
||||
)
|
||||
assert r.status_code == 400
|
||||
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestJobTemplateCallbackProxyIntegration:
|
||||
"""
|
||||
Test the interaction of provision job template callback feature and:
|
||||
settings.PROXY_IP_ALLOWED_LIST
|
||||
x-trusted-proxy http header
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(self, inventory, project):
|
||||
jt = JobTemplate.objects.create(name='test-jt', inventory=inventory, project=project, playbook='helloworld.yml', host_config_key='abcd')
|
||||
return jt
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz',
|
||||
'REMOTE_HOST': 'baz',
|
||||
'REMOTE_ADDR': 'baz',
|
||||
}
|
||||
r = post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=400, **headers
|
||||
)
|
||||
assert r.data['msg'] == 'No matching host could be found!'
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'headers, expected',
|
||||
(
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'my.proxy.example.org',
|
||||
},
|
||||
201,
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'not-my-proxy.org',
|
||||
},
|
||||
400,
|
||||
),
|
||||
),
|
||||
)
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_proxy_ip_allowed_list(self, job_template, admin_user, post, headers, expected): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='my.proxy.example.org')
|
||||
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=expected,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=[])
|
||||
def test_no_proxy_trust_all_headers(self, job_template, admin_user, post):
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
post(url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=201, **headers)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar, my.proxy.example.org',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=201,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz, my.proxy.example.org',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
**headers
|
||||
)
|
||||
|
||||
@@ -8,8 +8,10 @@ from django.db import connection
|
||||
from django.test.utils import override_settings
|
||||
from django.utils.encoding import smart_str, smart_bytes
|
||||
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.oauth import OAuth2Application as Application, OAuth2AccessToken as AccessToken
|
||||
from awx.main.tests.functional import immediate_on_commit
|
||||
from awx.sso.models import UserEnterpriseAuth
|
||||
|
||||
@@ -165,8 +165,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
|
||||
inventory.admin_role.members.add(rando)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -174,8 +174,8 @@ class TestAccessListCapabilities:
|
||||
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
|
||||
|
||||
def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
|
||||
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data, sublist='indirect_access')
|
||||
@@ -185,8 +185,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -198,8 +198,8 @@ class TestAccessListCapabilities:
|
||||
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
|
||||
# Did we assess whether team_member can remove team's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
|
||||
@@ -212,8 +212,8 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
|
||||
organization.member_role.members.add(alice)
|
||||
organization.member_role.members.add(bob)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
|
||||
# Did we assess whether bob can remove alice's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
|
||||
|
||||
@@ -3,17 +3,6 @@ import pytest
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_visible_to_orphaned_users(get, alice):
|
||||
names = set()
|
||||
|
||||
response = get(reverse('api:role_list'), user=alice)
|
||||
for item in response.data['results']:
|
||||
names.add(item['name'])
|
||||
assert 'System Auditor' in names
|
||||
assert 'System Administrator' in names
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('role,code', [('member_role', 400), ('admin_role', 400), ('inventory_admin_role', 204)])
|
||||
@pytest.mark.parametrize('reversed', [True, False])
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user