mirror of
https://github.com/ansible/awx.git
synced 2026-04-07 02:59:21 -02:30
Compare commits
310 Commits
23.8.1
...
poc-inv-cr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
21d7ccc3ea | ||
|
|
75b9df3f33 | ||
|
|
6d0c47fdd0 | ||
|
|
54b4acbdfc | ||
|
|
a41766090e | ||
|
|
34fa897dda | ||
|
|
32df114e41 | ||
|
|
018f235a64 | ||
|
|
7e77235d5e | ||
|
|
139d8f0ae2 | ||
|
|
7691365aea | ||
|
|
59f61517d4 | ||
|
|
fa670e2d7f | ||
|
|
a87a044d64 | ||
|
|
381ade1148 | ||
|
|
864a30e3d4 | ||
|
|
5f42db67e6 | ||
|
|
ddf4f288d4 | ||
|
|
e75bc8bc1e | ||
|
|
bb533287b8 | ||
|
|
9979fc659e | ||
|
|
9e5babc093 | ||
|
|
c71e2524ed | ||
|
|
48b4c62186 | ||
|
|
853730acb9 | ||
|
|
f1448fced1 | ||
|
|
7697b6a69b | ||
|
|
22a491c32c | ||
|
|
cbd9dce940 | ||
|
|
a4fdcc1cca | ||
|
|
df95439008 | ||
|
|
acd834df8b | ||
|
|
587f0ecf98 | ||
|
|
5a2091f7bf | ||
|
|
fa7423819a | ||
|
|
fde8af9f11 | ||
|
|
209e7e27b1 | ||
|
|
6c7d29a982 | ||
|
|
282ba36839 | ||
|
|
b727d2c3b3 | ||
|
|
7fc3d5c7c7 | ||
|
|
4e055f46c4 | ||
|
|
f595985b7c | ||
|
|
ea232315bf | ||
|
|
ee251812b5 | ||
|
|
00ba1ea569 | ||
|
|
d91af132c1 | ||
|
|
94e5795dfc | ||
|
|
c4688d6298 | ||
|
|
6763badea3 | ||
|
|
2c4ad6ef0f | ||
|
|
37f44d7214 | ||
|
|
98bbc836a6 | ||
|
|
b59aff50dc | ||
|
|
a70b0c1ddc | ||
|
|
db72c9d5b8 | ||
|
|
4e0d19914f | ||
|
|
6f2307f50e | ||
|
|
dbc2215bb6 | ||
|
|
7c08b29827 | ||
|
|
407194d320 | ||
|
|
853af295d9 | ||
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 | ||
|
|
6dc4a4508d | ||
|
|
cf09a4220d | ||
|
|
659c3b64de | ||
|
|
37ad690d09 | ||
|
|
7845ec7e01 | ||
|
|
a15bcf1d55 | ||
|
|
7b3fb2c2a8 | ||
|
|
6df47c8449 | ||
|
|
cae42653bf | ||
|
|
da46a29f40 | ||
|
|
0eb465531c | ||
|
|
d0fe0ed796 | ||
|
|
ceafa14c9d | ||
|
|
08e1454098 | ||
|
|
776b661fb3 | ||
|
|
af6ccdbde5 | ||
|
|
559ab3564b | ||
|
|
208ef0ce25 | ||
|
|
c3d9aa54d8 | ||
|
|
66efe7198a | ||
|
|
adf930ee42 | ||
|
|
892410477a | ||
|
|
0d4f653794 | ||
|
|
8de8f6dce2 | ||
|
|
fc9064e27f | ||
|
|
7de350dc3e | ||
|
|
d4bdaad4d8 | ||
|
|
a9b2ffa3e9 | ||
|
|
1b8d409043 | ||
|
|
da2bccf5a8 | ||
|
|
a2f083bd8e | ||
|
|
4d641b6cf5 | ||
|
|
439c3f0c23 | ||
|
|
946bbe3560 | ||
|
|
20f054d600 | ||
|
|
918d5b3565 | ||
|
|
158314af50 | ||
|
|
4754819a09 | ||
|
|
78fc23138a | ||
|
|
014534bfa5 | ||
|
|
2502e7c7d8 | ||
|
|
fb237e3834 | ||
|
|
e4646ae611 | ||
|
|
7dc77546f4 | ||
|
|
f5f85666c8 | ||
|
|
47a061eb39 | ||
|
|
c760577855 | ||
|
|
814ceb0d06 | ||
|
|
f178c84728 | ||
|
|
c0f71801f6 | ||
|
|
4e8e1398d7 | ||
|
|
3d6a8fd4ef | ||
|
|
e873bb1304 | ||
|
|
672f1eb745 | ||
|
|
199507c6f1 | ||
|
|
a176c04c14 | ||
|
|
e3af658f82 | ||
|
|
e8a3b96482 | ||
|
|
c015e8413e | ||
|
|
390c2d8907 | ||
|
|
97605c5f19 | ||
|
|
818c326160 | ||
|
|
c98727d83e | ||
|
|
a138a92e67 | ||
|
|
7aed19ffda | ||
|
|
3bb559dd09 | ||
|
|
389a729b75 | ||
|
|
2f3c9122fd | ||
|
|
733478ee19 | ||
|
|
41c6337fc1 | ||
|
|
7446da1c2f | ||
|
|
c79fca5ceb | ||
|
|
dc5f43927a | ||
|
|
35a5a81e19 | ||
|
|
9dcc11d54c | ||
|
|
74ce21fa54 | ||
|
|
eb93660b36 | ||
|
|
f50e597548 | ||
|
|
817c3b36b9 | ||
|
|
1859a6ae69 | ||
|
|
0645d342dd | ||
|
|
61ec03e540 | ||
|
|
09f0a366bf | ||
|
|
778961d31e | ||
|
|
f962c88df3 | ||
|
|
8db3ffe719 | ||
|
|
cc5d4dd119 | ||
|
|
86204cf23b | ||
|
|
468949b899 | ||
|
|
f1d9966224 | ||
|
|
b022b50966 | ||
|
|
e2f4213839 | ||
|
|
ae1235b223 | ||
|
|
c061f59f1c | ||
|
|
3edaaebba2 | ||
|
|
7cdf1c7f96 | ||
|
|
d558204192 | ||
|
|
d06ce8f911 | ||
|
|
4b6f7e0ebe | ||
|
|
370c567be1 | ||
|
|
9be64f3de5 | ||
|
|
30500e5a95 | ||
|
|
bb323c5710 | ||
|
|
7571df49d5 | ||
|
|
1559c21033 | ||
|
|
d9b81731e9 | ||
|
|
2034cca3a9 | ||
|
|
0b5e59d9cb | ||
|
|
f48b2d1ae5 | ||
|
|
b44bb98c7e | ||
|
|
8cafdf0400 | ||
|
|
3f566c8737 | ||
|
|
c8021a25bf | ||
|
|
934646a0f6 | ||
|
|
9bb97dd658 | ||
|
|
7150f5edc6 | ||
|
|
93da15c0ee | ||
|
|
ab593bda45 | ||
|
|
065bd3ae2a | ||
|
|
8ff7260bc6 | ||
|
|
a635445082 | ||
|
|
949e7efab1 | ||
|
|
615f09226f | ||
|
|
d903c524f5 | ||
|
|
393d9c39c6 | ||
|
|
dfab342bb4 | ||
|
|
12843eccf7 | ||
|
|
dd9160135d | ||
|
|
ad96a92fa7 | ||
|
|
ca8085fe7e | ||
|
|
b076cb00a9 | ||
|
|
ee9eac15dc | ||
|
|
3f2f7b75a6 | ||
|
|
b71645f3b1 | ||
|
|
eb300252b8 | ||
|
|
2e2cd7f2de | ||
|
|
727278aaa3 | ||
|
|
81825ab755 | ||
|
|
7f2a1b6b03 | ||
|
|
1b56d94d30 | ||
|
|
e1e32c971c | ||
|
|
a4a2fabc01 | ||
|
|
b7b7bfa520 | ||
|
|
887604317e | ||
|
|
d35d8b6ed7 | ||
|
|
ec28eff7f7 | ||
|
|
a5d17539c6 | ||
|
|
a49d894cf1 | ||
|
|
b3466d4449 | ||
|
|
237adc6150 | ||
|
|
09b028ee3c | ||
|
|
fb83bfbc31 | ||
|
|
88e406e121 | ||
|
|
59d0bcc63f | ||
|
|
3fb3125bc3 | ||
|
|
d70c6b9474 | ||
|
|
5549516a37 | ||
|
|
14ac91a8a2 | ||
|
|
d5753818a0 | ||
|
|
33010a2e02 | ||
|
|
14454cc670 | ||
|
|
7ab2bca16e | ||
|
|
f0f655f2c3 | ||
|
|
4286d411a7 | ||
|
|
06ad32ed8e | ||
|
|
1ebff23232 | ||
|
|
700de14c76 | ||
|
|
8605e339df | ||
|
|
e50954ce40 | ||
|
|
7caca60308 | ||
|
|
f4e13af056 | ||
|
|
decdb56288 | ||
|
|
bcd4c2e8ef | ||
|
|
d663066ac5 | ||
|
|
1ceebb275c | ||
|
|
f78ba282a6 | ||
|
|
81d88df757 | ||
|
|
0bdb01a9e9 | ||
|
|
cd91fbf59f | ||
|
|
f240e640e5 | ||
|
|
46f489185e | ||
|
|
dbb80fb7e3 | ||
|
|
cb3d357ce1 | ||
|
|
dfa4db9266 | ||
|
|
6906a88dc9 | ||
|
|
1f7be9258c | ||
|
|
dcce024424 |
10
.github/actions/awx_devel_image/action.yml
vendored
10
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,6 +11,12 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set lower case owner name
|
||||||
|
shell: bash
|
||||||
|
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
OWNER: '${{ github.repository_owner }}'
|
||||||
|
|
||||||
- name: Log in to registry
|
- name: Log in to registry
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@@ -18,11 +24,11 @@ runs:
|
|||||||
|
|
||||||
- name: Pre-pull latest devel image to warm cache
|
- name: Pre-pull latest devel image to warm cache
|
||||||
shell: bash
|
shell: bash
|
||||||
run: docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||||
|
|
||||||
- name: Build image for current source checkout
|
- name: Build image for current source checkout
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||||
COMPOSE_TAG=${{ github.base_ref }} \
|
COMPOSE_TAG=${{ github.base_ref }} \
|
||||||
make docker-compose-build
|
make docker-compose-build
|
||||||
|
|||||||
14
.github/actions/run_awx_devel/action.yml
vendored
14
.github/actions/run_awx_devel/action.yml
vendored
@@ -35,7 +35,7 @@ runs:
|
|||||||
- name: Start AWX
|
- name: Start AWX
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
|
||||||
COMPOSE_TAG=${{ github.base_ref }} \
|
COMPOSE_TAG=${{ github.base_ref }} \
|
||||||
COMPOSE_UP_OPTS="-d" \
|
COMPOSE_UP_OPTS="-d" \
|
||||||
make docker-compose
|
make docker-compose
|
||||||
@@ -57,21 +57,11 @@ runs:
|
|||||||
awx-manage update_password --username=admin --password=password
|
awx-manage update_password --username=admin --password=password
|
||||||
EOSH
|
EOSH
|
||||||
|
|
||||||
- name: Build UI
|
|
||||||
# This must be a string comparison in composite actions:
|
|
||||||
# https://github.com/actions/runner/issues/2238
|
|
||||||
if: ${{ inputs.build-ui == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
docker exec -i tools_awx_1 sh <<-EOSH
|
|
||||||
make ui-devel
|
|
||||||
EOSH
|
|
||||||
|
|
||||||
- name: Get instance data
|
- name: Get instance data
|
||||||
id: data
|
id: data
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks._sources_awx.IPAddress}}' tools_awx_1)
|
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
|
||||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
||||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
||||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
||||||
|
|||||||
3
.github/pr_labeler.yml
vendored
3
.github/pr_labeler.yml
vendored
@@ -15,5 +15,4 @@
|
|||||||
|
|
||||||
"dependencies":
|
"dependencies":
|
||||||
- any: ["awx/ui/package.json"]
|
- any: ["awx/ui/package.json"]
|
||||||
- any: ["requirements/*.txt"]
|
- any: ["requirements/*"]
|
||||||
- any: ["requirements/requirements.in"]
|
|
||||||
|
|||||||
2
.github/triage_replies.md
vendored
2
.github/triage_replies.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
## General
|
## General
|
||||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||||
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
55
.github/workflows/ci.yml
vendored
55
.github/workflows/ci.yml
vendored
@@ -38,7 +38,9 @@ jobs:
|
|||||||
- name: ui-test-general
|
- name: ui-test-general
|
||||||
command: make ui-test-general
|
command: make ui-test-general
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Build awx_devel image for running checks
|
- name: Build awx_devel image for running checks
|
||||||
uses: ./.github/actions/awx_devel_image
|
uses: ./.github/actions/awx_devel_image
|
||||||
@@ -52,7 +54,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@@ -66,15 +70,19 @@ jobs:
|
|||||||
awx-operator:
|
awx-operator:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
|
env:
|
||||||
|
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
- name: Checkout awx-operator
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false\
|
||||||
repository: ansible/awx-operator
|
repository: ansible/awx-operator
|
||||||
path: awx-operator
|
path: awx-operator
|
||||||
|
|
||||||
@@ -94,11 +102,11 @@ jobs:
|
|||||||
- name: Build AWX image
|
- name: Build AWX image
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
run: |
|
run: |
|
||||||
ansible-playbook -v tools/ansible/build.yml \
|
VERSION=`make version-for-buildyml` make awx-kube-build
|
||||||
-e headless=yes \
|
env:
|
||||||
-e awx_image=awx \
|
COMPOSE_TAG: ci
|
||||||
-e awx_image_tag=ci \
|
DEV_DOCKER_TAG_BASE: local
|
||||||
-e ansible_python_interpreter=$(which python3)
|
HEADLESS: yes
|
||||||
|
|
||||||
- name: Run test deployment with awx-operator
|
- name: Run test deployment with awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
@@ -107,10 +115,19 @@ jobs:
|
|||||||
ansible-galaxy collection install -r molecule/requirements.yml
|
ansible-galaxy collection install -r molecule/requirements.yml
|
||||||
sudo rm -f $(which kustomize)
|
sudo rm -f $(which kustomize)
|
||||||
make kustomize
|
make kustomize
|
||||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: awx
|
AWX_TEST_IMAGE: local/awx
|
||||||
AWX_TEST_VERSION: ci
|
AWX_TEST_VERSION: ci
|
||||||
|
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
|
||||||
|
STORE_DEBUG_OUTPUT: true
|
||||||
|
|
||||||
|
- name: Upload debug output
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: awx-operator-debug-output
|
||||||
|
path: ${{ env.DEBUG_OUTPUT_DIR }}
|
||||||
|
|
||||||
collection-sanity:
|
collection-sanity:
|
||||||
name: awx_collection sanity
|
name: awx_collection sanity
|
||||||
@@ -119,7 +136,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
@@ -127,10 +146,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Run sanity tests
|
- name: Run sanity tests
|
||||||
run: make test_collection_sanity
|
run: make test_collection_sanity
|
||||||
env:
|
|
||||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
|
||||||
# with the fix has not been made yet.
|
|
||||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
|
||||||
|
|
||||||
collection-integration:
|
collection-integration:
|
||||||
name: awx_collection integration
|
name: awx_collection integration
|
||||||
@@ -147,7 +162,9 @@ jobs:
|
|||||||
- name: r-z0-9
|
- name: r-z0-9
|
||||||
regex: ^[r-z0-9]
|
regex: ^[r-z0-9]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@@ -193,7 +210,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
run: python3 -m pip install --upgrade ansible-core
|
||||||
|
|||||||
57
.github/workflows/dab-release.yml
vendored
Normal file
57
.github/workflows/dab-release.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
name: django-ansible-base requirements update
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * *' # once an day @ 6 AM
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
jobs:
|
||||||
|
dab-pin-newest:
|
||||||
|
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- id: dab-release
|
||||||
|
name: Get current django-ansible-base release version
|
||||||
|
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
||||||
|
with:
|
||||||
|
owner: ansible
|
||||||
|
repo: django-ansible-base
|
||||||
|
excludes: prerelease, draft
|
||||||
|
|
||||||
|
- name: Check out respository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- id: dab-pinned
|
||||||
|
name: Get current django-ansible-base pinned version
|
||||||
|
run:
|
||||||
|
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update django-ansible-base pinned version to upstream release
|
||||||
|
run:
|
||||||
|
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||||
|
with:
|
||||||
|
base: devel
|
||||||
|
branch: bump-django-ansible-base
|
||||||
|
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
||||||
|
body: |
|
||||||
|
##### SUMMARY
|
||||||
|
Automated .github/workflows/dab-release.yml
|
||||||
|
|
||||||
|
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
||||||
|
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
||||||
|
|
||||||
|
##### ISSUE TYPE
|
||||||
|
- Bug, Docs Fix or other nominal change
|
||||||
|
|
||||||
|
##### COMPONENT NAME
|
||||||
|
- API
|
||||||
|
|
||||||
|
commit-message: |
|
||||||
|
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
||||||
|
add-paths:
|
||||||
|
requirements/requirements_git.txt
|
||||||
69
.github/workflows/devel_images.yml
vendored
69
.github/workflows/devel_images.yml
vendored
@@ -2,29 +2,54 @@
|
|||||||
name: Build/Push Development Images
|
name: Build/Push Development Images
|
||||||
env:
|
env:
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
- release_*
|
- release_*
|
||||||
- feature_*
|
- feature_*
|
||||||
jobs:
|
jobs:
|
||||||
push:
|
push-development-images:
|
||||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
timeout-minutes: 120
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
build-targets:
|
||||||
|
- image-name: awx_devel
|
||||||
|
make-target: docker-compose-buildx
|
||||||
|
- image-name: awx_kube_devel
|
||||||
|
make-target: awx-kube-dev-buildx
|
||||||
|
- image-name: awx
|
||||||
|
make-target: awx-kube-buildx
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Skipping build of awx image for non-awx repository
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Set lower case owner name
|
|
||||||
run: |
|
run: |
|
||||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
echo "Skipping build of awx image for non-awx repository"
|
||||||
|
exit 0
|
||||||
|
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Set GITHUB_ENV variables
|
||||||
|
run: |
|
||||||
|
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||||
|
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
||||||
|
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
env:
|
env:
|
||||||
OWNER: '${{ github.repository_owner }}'
|
OWNER: '${{ github.repository_owner }}'
|
||||||
|
|
||||||
@@ -37,23 +62,17 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Setup node and npm for the new UI build
|
||||||
run: |
|
uses: actions/setup-node@v2
|
||||||
docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
|
with:
|
||||||
docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
|
node-version: '18'
|
||||||
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
|
if: matrix.build-targets.image-name == 'awx'
|
||||||
|
|
||||||
- name: Build images
|
- name: Prebuild new UI for awx image (to speed up build process)
|
||||||
run: |
|
run: |
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
make ui-next
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
if: matrix.build-targets.image-name == 'awx'
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
|
|
||||||
|
|
||||||
- name: Push development images
|
- name: Build and push AWX devel images
|
||||||
run: |
|
run: |
|
||||||
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
|
make ${{ matrix.build-targets.make-target }}
|
||||||
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
|
|
||||||
|
|
||||||
- name: Push AWX k8s image, only for upstream and feature branches
|
|
||||||
run: docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
|
|
||||||
if: endsWith(github.repository, '/awx')
|
|
||||||
|
|||||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -8,7 +8,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: install tox
|
- name: install tox
|
||||||
run: pip install tox
|
run: pip install tox
|
||||||
|
|||||||
75
.github/workflows/e2e_test.yml
vendored
75
.github/workflows/e2e_test.yml
vendored
@@ -1,75 +0,0 @@
|
|||||||
---
|
|
||||||
name: E2E Tests
|
|
||||||
env:
|
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [labeled]
|
|
||||||
jobs:
|
|
||||||
e2e-test:
|
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 40
|
|
||||||
permissions:
|
|
||||||
packages: write
|
|
||||||
contents: read
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
|
||||||
id: awx
|
|
||||||
with:
|
|
||||||
build-ui: true
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Pull awx_cypress_base image
|
|
||||||
run: |
|
|
||||||
docker pull quay.io/awx/awx_cypress_base:latest
|
|
||||||
|
|
||||||
- name: Checkout test project
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: ${{ github.repository_owner }}/tower-qa
|
|
||||||
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
|
||||||
path: tower-qa
|
|
||||||
ref: devel
|
|
||||||
|
|
||||||
- name: Build cypress
|
|
||||||
run: |
|
|
||||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
|
||||||
docker build -t awx-pf-tests .
|
|
||||||
|
|
||||||
- name: Run E2E tests
|
|
||||||
env:
|
|
||||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
|
||||||
run: |
|
|
||||||
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
|
||||||
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
|
||||||
export COMMIT_INFO_SHA=$GITHUB_SHA
|
|
||||||
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
|
||||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
|
||||||
AWX_IP=${{ steps.awx.outputs.ip }}
|
|
||||||
printenv > .env
|
|
||||||
echo "Executing tests:"
|
|
||||||
docker run \
|
|
||||||
--network '_sources_default' \
|
|
||||||
--ipc=host \
|
|
||||||
--env-file=.env \
|
|
||||||
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
|
||||||
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
|
||||||
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
|
||||||
-e COMMAND="npm run cypress-concurrently-gha" \
|
|
||||||
-v /dev/shm:/dev/shm \
|
|
||||||
-v $PWD:/e2e \
|
|
||||||
-w /e2e \
|
|
||||||
awx-pf-tests run --project .
|
|
||||||
|
|
||||||
- uses: ./.github/actions/upload_awx_devel_logs
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
log-filename: e2e-${{ matrix.job }}.log
|
|
||||||
5
.github/workflows/label_issue.yml
vendored
5
.github/workflows/label_issue.yml
vendored
@@ -30,7 +30,10 @@ jobs:
|
|||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
name: Label Issue - Community
|
name: Label Issue - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
5
.github/workflows/label_pr.yml
vendored
5
.github/workflows/label_pr.yml
vendored
@@ -29,7 +29,10 @@ jobs:
|
|||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
name: Label PR - Community
|
name: Label PR - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
57
.github/workflows/promote.yml
vendored
57
.github/workflows/promote.yml
vendored
@@ -7,7 +7,11 @@ env:
|
|||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag_name:
|
||||||
|
description: 'Name for the tag of the release.'
|
||||||
|
required: true
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read # to fetch code (actions/checkout)
|
||||||
|
|
||||||
@@ -17,8 +21,20 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set GitHub Env vars for workflow_dispatch event
|
||||||
|
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set GitHub Env vars if release event
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
@@ -43,16 +59,21 @@ jobs:
|
|||||||
- name: Build collection and publish to galaxy
|
- name: Build collection and publish to galaxy
|
||||||
env:
|
env:
|
||||||
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
||||||
COLLECTION_VERSION: ${{ github.event.release.tag_name }}
|
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
||||||
COLLECTION_TEMPLATE_VERSION: true
|
COLLECTION_TEMPLATE_VERSION: true
|
||||||
run: |
|
run: |
|
||||||
|
sudo apt-get install jq
|
||||||
make build_collection
|
make build_collection
|
||||||
if [ "$(curl -L --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
||||||
echo "Galaxy release already done"; \
|
if [[ "$count" == "1" ]]; then
|
||||||
else \
|
echo "Galaxy release already done";
|
||||||
|
elif [[ "$count" == "0" ]]; then
|
||||||
ansible-galaxy collection publish \
|
ansible-galaxy collection publish \
|
||||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
||||||
|
else
|
||||||
|
echo "Unexpected count from galaxy search: $count";
|
||||||
|
exit 1;
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Set official pypi info
|
- name: Set official pypi info
|
||||||
@@ -64,6 +85,8 @@ jobs:
|
|||||||
if: ${{ github.repository_owner != 'ansible' }}
|
if: ${{ github.repository_owner != 'ansible' }}
|
||||||
|
|
||||||
- name: Build awxkit and upload to pypi
|
- name: Build awxkit and upload to pypi
|
||||||
|
env:
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.TAG_NAME }}
|
||||||
run: |
|
run: |
|
||||||
git reset --hard
|
git reset --hard
|
||||||
cd awxkit && python3 setup.py sdist bdist_wheel
|
cd awxkit && python3 setup.py sdist bdist_wheel
|
||||||
@@ -83,11 +106,15 @@ jobs:
|
|||||||
|
|
||||||
- name: Re-tag and promote awx image
|
- name: Re-tag and promote awx image
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
docker buildx imagetools create \
|
||||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
--tag quay.io/${{ github.repository }}:${{ env.TAG_NAME }}
|
||||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
docker buildx imagetools create \
|
||||||
docker push quay.io/${{ github.repository }}:latest
|
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
--tag quay.io/${{ github.repository }}:latest
|
||||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
|
||||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
- name: Re-tag and promote awx-ee image
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create \
|
||||||
|
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }} \
|
||||||
|
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }}
|
||||||
|
|||||||
102
.github/workflows/stage.yml
vendored
102
.github/workflows/stage.yml
vendored
@@ -45,11 +45,27 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
|
- name: Checkout awx-operator
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
repository: ${{ github.repository_owner }}/awx-operator
|
||||||
|
path: awx-operator
|
||||||
|
|
||||||
|
- name: Checkout awx-logos
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
repository: ansible/awx-logos
|
||||||
|
path: awx-logos
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
|
working-directory: awx
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
- name: Install python ${{ env.py_version }}
|
||||||
@@ -57,63 +73,73 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
- name: Checkout awx-logos
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: ansible/awx-logos
|
|
||||||
path: awx-logos
|
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: ${{ github.repository_owner }}/awx-operator
|
|
||||||
path: awx-operator
|
|
||||||
|
|
||||||
- name: Install playbook dependencies
|
- name: Install playbook dependencies
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install docker
|
python3 -m pip install docker
|
||||||
|
|
||||||
- name: Build and stage AWX
|
|
||||||
working-directory: awx
|
|
||||||
run: |
|
|
||||||
ansible-playbook -v tools/ansible/build.yml \
|
|
||||||
-e registry=ghcr.io \
|
|
||||||
-e registry_username=${{ github.actor }} \
|
|
||||||
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
|
|
||||||
-e awx_image=${{ github.repository }} \
|
|
||||||
-e awx_version=${{ github.event.inputs.version }} \
|
|
||||||
-e ansible_python_interpreter=$(which python3) \
|
|
||||||
-e push=yes \
|
|
||||||
-e awx_official=yes
|
|
||||||
|
|
||||||
- name: Log into registry ghcr.io
|
- name: Log into registry ghcr.io
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log into registry quay.io
|
- name: Copy logos for inclusion in sdist for official build
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
working-directory: awx
|
||||||
|
run: |
|
||||||
|
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||||
|
|
||||||
|
- name: Setup node and npm for new UI build
|
||||||
|
uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
registry: quay.io
|
node-version: '18'
|
||||||
username: ${{ secrets.QUAY_USER }}
|
|
||||||
password: ${{ secrets.QUAY_TOKEN }}
|
- name: Prebuild new UI for awx image (to speed up build process)
|
||||||
|
working-directory: awx
|
||||||
|
run: make ui-next
|
||||||
|
|
||||||
|
- name: Set build env variables
|
||||||
|
run: |
|
||||||
|
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||||
|
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
|
||||||
|
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
OWNER: ${{ github.repository_owner }}
|
||||||
|
|
||||||
|
- name: Build and stage AWX
|
||||||
|
working-directory: awx
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDX_PUSH: true
|
||||||
|
HEADLESS: false
|
||||||
|
PLATFORMS: linux/amd64,linux/arm64
|
||||||
|
run: |
|
||||||
|
make awx-kube-buildx
|
||||||
|
|
||||||
- name: tag awx-ee:latest with version input
|
- name: tag awx-ee:latest with version input
|
||||||
run: |
|
run: |
|
||||||
docker pull quay.io/ansible/awx-ee:latest
|
docker buildx imagetools create \
|
||||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
quay.io/ansible/awx-ee:latest \
|
||||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
--tag ${AWX_EE_TEST_IMAGE}
|
||||||
|
|
||||||
- name: Stage awx-operator image
|
- name: Stage awx-operator image
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
run: |
|
run: |
|
||||||
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \
|
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \
|
||||||
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
||||||
IMG=ghcr.io/${{ github.repository_owner }}/awx-operator:${{ github.event.inputs.operator_version }} \
|
IMG=${AWX_OPERATOR_TEST_IMAGE} \
|
||||||
make docker-buildx
|
make docker-buildx
|
||||||
|
|
||||||
|
- name: Pulling images for test deployment with awx-operator
|
||||||
|
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||||
|
run: |
|
||||||
|
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
||||||
|
docker pull -q ${AWX_EE_TEST_IMAGE}
|
||||||
|
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||||
|
|
||||||
- name: Run test deployment with awx-operator
|
- name: Run test deployment with awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
run: |
|
run: |
|
||||||
@@ -122,10 +148,6 @@ jobs:
|
|||||||
sudo rm -f $(which kustomize)
|
sudo rm -f $(which kustomize)
|
||||||
make kustomize
|
make kustomize
|
||||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
||||||
env:
|
|
||||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
|
||||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
|
||||||
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
|
||||||
|
|
||||||
- name: Create draft release for AWX
|
- name: Create draft release for AWX
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
|
|||||||
4
.github/workflows/update_dependabot_prs.yml
vendored
4
.github/workflows/update_dependabot_prs.yml
vendored
@@ -13,7 +13,9 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout branch
|
- name: Checkout branch
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Update PR Body
|
- name: Update PR Body
|
||||||
env:
|
env:
|
||||||
|
|||||||
6
.github/workflows/upload_schema.yml
vendored
6
.github/workflows/upload_schema.yml
vendored
@@ -18,7 +18,9 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
@@ -34,7 +36,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Pre-pull image to warm build cache
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||||
|
|
||||||
- name: Build image
|
- name: Build image
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -46,6 +46,11 @@ tools/docker-compose/overrides/
|
|||||||
tools/docker-compose-minikube/_sources
|
tools/docker-compose-minikube/_sources
|
||||||
tools/docker-compose/keycloak.awx.realm.json
|
tools/docker-compose/keycloak.awx.realm.json
|
||||||
|
|
||||||
|
!tools/docker-compose/editable_dependencies
|
||||||
|
tools/docker-compose/editable_dependencies/*
|
||||||
|
!tools/docker-compose/editable_dependencies/README.md
|
||||||
|
!tools/docker-compose/editable_dependencies/install.sh
|
||||||
|
|
||||||
# Tower setup playbook testing
|
# Tower setup playbook testing
|
||||||
setup/test/roles/postgresql
|
setup/test/roles/postgresql
|
||||||
**/provision_docker
|
**/provision_docker
|
||||||
|
|||||||
113
.vscode/launch.json
vendored
Normal file
113
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "run_ws_heartbeat",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_ws_heartbeat"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-ws-heartbeat",
|
||||||
|
"postDebugTask": "start awx-ws-heartbeat"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_cache_clear",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_cache_clear"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-cache-clear",
|
||||||
|
"postDebugTask": "start awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_callback_receiver",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_callback_receiver"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-receiver",
|
||||||
|
"postDebugTask": "start awx-receiver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_dispatcher",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_dispatcher"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-dispatcher",
|
||||||
|
"postDebugTask": "start awx-dispatcher"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_rsyslog_configurer",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_rsyslog_configurer"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-rsyslog-configurer",
|
||||||
|
"postDebugTask": "start awx-rsyslog-configurer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_cache_clear",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_cache_clear"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-cache-clear",
|
||||||
|
"postDebugTask": "start awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_wsrelay",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_wsrelay"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-wsrelay",
|
||||||
|
"postDebugTask": "start awx-wsrelay"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "daphne",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
||||||
|
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-daphne",
|
||||||
|
"postDebugTask": "start awx-daphne"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "runserver(uwsgi alternative)",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["runserver", "127.0.0.1:8052"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-uwsgi",
|
||||||
|
"postDebugTask": "start awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "runserver_plus(uwsgi alternative)",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["runserver_plus", "127.0.0.1:8052"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
||||||
|
"postDebugTask": "start awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "shell_plus",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["shell_plus"],
|
||||||
|
"django": true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
100
.vscode/tasks.json
vendored
Normal file
100
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "start awx-cache-clear",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-cache-clear",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-daphne",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-daphne"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-daphne",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-daphne"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-dispatcher",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-dispatcher",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-receiver",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-receiver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-receiver",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-receiver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-rsyslog-configurer",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-rsyslog-configurer",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-rsyslogd",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-rsyslogd",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-uwsgi",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-uwsgi",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-uwsgi and install Werkzeug",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-ws-heartbeat",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-ws-heartbeat",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-wsrelay",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-wsrelay",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -11,6 +11,8 @@ ignore: |
|
|||||||
# django template files
|
# django template files
|
||||||
awx/api/templates/instance_install_bundle/**
|
awx/api/templates/instance_install_bundle/**
|
||||||
.readthedocs.yaml
|
.readthedocs.yaml
|
||||||
|
tools/loki
|
||||||
|
tools/otel
|
||||||
|
|
||||||
extends: default
|
extends: default
|
||||||
|
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
|||||||
|
|
||||||
#### Frontend Development
|
#### Frontend Development
|
||||||
|
|
||||||
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
#### Fork and clone the AWX repo
|
#### Fork and clone the AWX repo
|
||||||
|
|
||||||
@@ -121,7 +121,7 @@ If it has someone assigned to it then that person is the person responsible for
|
|||||||
|
|
||||||
**NOTES**
|
**NOTES**
|
||||||
|
|
||||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||||
|
|
||||||
|
|
||||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||||
@@ -132,7 +132,7 @@ If it has someone assigned to it then that person is the person responsible for
|
|||||||
|
|
||||||
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||||
|
|
||||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||||
|
|
||||||
|
|
||||||
## Submitting Pull Requests
|
## Submitting Pull Requests
|
||||||
@@ -161,7 +161,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
|||||||
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||||
|
|
||||||
## Getting Help
|
## Getting Help
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ If any of those items are missing your pull request will still get the `needs_tr
|
|||||||
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
||||||
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
||||||
|
|
||||||
The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
|
The `state:needs_triage` label will remain on your pull request until a person has looked at it.
|
||||||
|
|
||||||
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
||||||
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
||||||
|
|||||||
151
Makefile
151
Makefile
@@ -1,8 +1,8 @@
|
|||||||
-include awx/ui_next/Makefile
|
-include awx/ui_next/Makefile
|
||||||
|
|
||||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||||
SHELL := bash
|
SHELL := bash
|
||||||
DOCKER_COMPOSE ?= docker-compose
|
DOCKER_COMPOSE ?= docker compose
|
||||||
OFFICIAL ?= no
|
OFFICIAL ?= no
|
||||||
NODE ?= node
|
NODE ?= node
|
||||||
NPM_BIN ?= npm
|
NPM_BIN ?= npm
|
||||||
@@ -47,6 +47,14 @@ VAULT ?= false
|
|||||||
VAULT_TLS ?= false
|
VAULT_TLS ?= false
|
||||||
# If set to true docker-compose will also start a tacacs+ instance
|
# If set to true docker-compose will also start a tacacs+ instance
|
||||||
TACACS ?= false
|
TACACS ?= false
|
||||||
|
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||||
|
OTEL ?= false
|
||||||
|
# If set to true docker-compose will also start a Loki instance
|
||||||
|
LOKI ?= false
|
||||||
|
# If set to true docker-compose will install editable dependencies
|
||||||
|
EDITABLE_DEPENDENCIES ?= false
|
||||||
|
# If set to true, use tls for postgres connection
|
||||||
|
PG_TLS ?= false
|
||||||
|
|
||||||
VENV_BASE ?= /var/lib/awx/venv
|
VENV_BASE ?= /var/lib/awx/venv
|
||||||
|
|
||||||
@@ -55,6 +63,11 @@ DEV_DOCKER_OWNER ?= ansible
|
|||||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||||
|
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||||
|
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
||||||
|
|
||||||
|
# Common command to use for running ansible-playbook
|
||||||
|
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||||
|
|
||||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||||
|
|
||||||
@@ -63,7 +76,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
|||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
|
|
||||||
@@ -75,6 +88,21 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
|||||||
|
|
||||||
I18N_FLAG_FILE = .i18n_built
|
I18N_FLAG_FILE = .i18n_built
|
||||||
|
|
||||||
|
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||||
|
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||||
|
|
||||||
|
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
|
||||||
|
# DOCKER_CACHE=--no-cache make docker-compose-build
|
||||||
|
ifeq ($(DOCKER_CACHE),)
|
||||||
|
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
|
||||||
|
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
|
||||||
|
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
|
||||||
|
else
|
||||||
|
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
endif
|
||||||
|
|
||||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||||
develop refresh adduser migrate dbchange \
|
develop refresh adduser migrate dbchange \
|
||||||
receiver test test_unit test_coverage coverage_html \
|
receiver test test_unit test_coverage coverage_html \
|
||||||
@@ -213,8 +241,6 @@ collectstatic:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||||
|
|
||||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
|
||||||
|
|
||||||
uwsgi: collectstatic
|
uwsgi: collectstatic
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -222,7 +248,7 @@ uwsgi: collectstatic
|
|||||||
uwsgi /etc/tower/uwsgi.ini
|
uwsgi /etc/tower/uwsgi.ini
|
||||||
|
|
||||||
awx-autoreload:
|
awx-autoreload:
|
||||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||||
|
|
||||||
daphne:
|
daphne:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
@@ -302,7 +328,7 @@ swagger: reports
|
|||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||||
|
|
||||||
check: black
|
check: black
|
||||||
|
|
||||||
@@ -359,7 +385,7 @@ symlink_collection:
|
|||||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||||
|
|
||||||
awx_collection_build: $(shell find awx_collection -type f)
|
awx_collection_build: $(shell find awx_collection -type f)
|
||||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||||
-e collection_version=$(COLLECTION_VERSION) \
|
-e collection_version=$(COLLECTION_VERSION) \
|
||||||
@@ -476,13 +502,7 @@ ui-test-general:
|
|||||||
$(NPM_BIN) run --prefix awx/ui pretest
|
$(NPM_BIN) run --prefix awx/ui pretest
|
||||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||||
|
|
||||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
|
||||||
HEADLESS ?= no
|
|
||||||
ifeq ($(HEADLESS), yes)
|
|
||||||
dist/$(SDIST_TAR_FILE):
|
dist/$(SDIST_TAR_FILE):
|
||||||
else
|
|
||||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
|
||||||
endif
|
|
||||||
$(PYTHON) -m build -s
|
$(PYTHON) -m build -s
|
||||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||||
|
|
||||||
@@ -513,10 +533,10 @@ endif
|
|||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
@@ -532,16 +552,26 @@ docker-compose-sources: .git/hooks/pre-commit
|
|||||||
-e enable_vault=$(VAULT) \
|
-e enable_vault=$(VAULT) \
|
||||||
-e vault_tls=$(VAULT_TLS) \
|
-e vault_tls=$(VAULT_TLS) \
|
||||||
-e enable_tacacs=$(TACACS) \
|
-e enable_tacacs=$(TACACS) \
|
||||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
-e enable_otel=$(OTEL) \
|
||||||
|
-e enable_loki=$(LOKI) \
|
||||||
|
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||||
|
-e pg_tls=$(PG_TLS) \
|
||||||
|
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
|
|
||||||
docker-compose: awx/projects docker-compose-sources
|
docker-compose: awx/projects docker-compose-sources
|
||||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||||
-e enable_vault=$(VAULT) \
|
-e enable_vault=$(VAULT) \
|
||||||
-e vault_tls=$(VAULT_TLS) \
|
-e vault_tls=$(VAULT_TLS) \
|
||||||
-e enable_ldap=$(LDAP);
|
-e enable_ldap=$(LDAP); \
|
||||||
|
$(MAKE) docker-compose-up
|
||||||
|
|
||||||
|
docker-compose-up:
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||||
|
|
||||||
|
docker-compose-down:
|
||||||
|
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
|
||||||
|
|
||||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||||
@@ -573,7 +603,7 @@ docker-compose-container-group-clean:
|
|||||||
.PHONY: Dockerfile.dev
|
.PHONY: Dockerfile.dev
|
||||||
## Generate Dockerfile.dev for awx_devel image
|
## Generate Dockerfile.dev for awx_devel image
|
||||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.dev \
|
-e dockerfile_name=Dockerfile.dev \
|
||||||
-e build_dev=True \
|
-e build_dev=True \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
@@ -584,37 +614,28 @@ docker-compose-build: Dockerfile.dev
|
|||||||
-f Dockerfile.dev \
|
-f Dockerfile.dev \
|
||||||
-t $(DEVEL_IMAGE_NAME) \
|
-t $(DEVEL_IMAGE_NAME) \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
$(DOCKER_DEVEL_CACHE_FLAG) .
|
||||||
|
|
||||||
# ## Build awx_devel image for docker compose development environment for multiple architectures
|
|
||||||
# docker-compose-buildx: Dockerfile.dev
|
|
||||||
# DOCKER_BUILDKIT=1 docker build \
|
|
||||||
# -f Dockerfile.dev \
|
|
||||||
# -t $(DEVEL_IMAGE_NAME) \
|
|
||||||
# --build-arg BUILDKIT_INLINE_CACHE=1 \
|
|
||||||
# --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
|
||||||
|
|
||||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
|
||||||
# PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
|
||||||
# architectures. (i.e. make docker-buildx IMG=myregistry/mypoperator:0.0.1). To use this option you need to:
|
|
||||||
# - able to use docker buildx . More info: https://docs.docker.com/build/buildx/
|
|
||||||
# - have enable BuildKit, More info: https://docs.docker.com/develop/develop-images/build_enhancements/
|
|
||||||
# - be able to push the image for your registry (i.e. if you do not inform a valid value via IMG=<myregistry/image:<tag>> than the export will fail)
|
|
||||||
# To properly provided solutions that supports more than one platform you should use this option.
|
|
||||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
|
||||||
.PHONY: docker-compose-buildx
|
.PHONY: docker-compose-buildx
|
||||||
docker-compose-buildx: Dockerfile.dev ## Build and push docker image for the manager for cross-platform support
|
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||||
- docker buildx create --name project-v3-builder
|
docker-compose-buildx: Dockerfile.dev
|
||||||
docker buildx use project-v3-builder
|
- docker buildx create --name docker-compose-buildx
|
||||||
- docker buildx build --push $(BUILD_ARGS) --platform=$(PLATFORMS) --tag $(DEVEL_IMAGE_NAME) -f Dockerfile.dev .
|
docker buildx use docker-compose-buildx
|
||||||
- docker buildx rm project-v3-builder
|
- docker buildx build \
|
||||||
|
--push \
|
||||||
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
|
$(DOCKER_DEVEL_CACHE_FLAG) \
|
||||||
|
--platform=$(PLATFORMS) \
|
||||||
|
--tag $(DEVEL_IMAGE_NAME) \
|
||||||
|
-f Dockerfile.dev .
|
||||||
|
- docker buildx rm docker-compose-buildx
|
||||||
|
|
||||||
docker-clean:
|
docker-clean:
|
||||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||||
|
|
||||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||||
docker volume rm -f tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
|
||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
|
|
||||||
@@ -636,9 +657,6 @@ clean-elk:
|
|||||||
docker rm tools_elasticsearch_1
|
docker rm tools_elasticsearch_1
|
||||||
docker rm tools_kibana_1
|
docker rm tools_kibana_1
|
||||||
|
|
||||||
psql-container:
|
|
||||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
|
||||||
|
|
||||||
VERSION:
|
VERSION:
|
||||||
@echo "awx: $(VERSION)"
|
@echo "awx: $(VERSION)"
|
||||||
|
|
||||||
@@ -659,7 +677,7 @@ version-for-buildyml:
|
|||||||
.PHONY: Dockerfile
|
.PHONY: Dockerfile
|
||||||
## Generate Dockerfile for awx image
|
## Generate Dockerfile for awx image
|
||||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
-e headless=$(HEADLESS)
|
-e headless=$(HEADLESS)
|
||||||
|
|
||||||
@@ -669,12 +687,29 @@ awx-kube-build: Dockerfile
|
|||||||
--build-arg VERSION=$(VERSION) \
|
--build-arg VERSION=$(VERSION) \
|
||||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
--build-arg HEADLESS=$(HEADLESS) \
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||||
|
-t $(IMAGE_KUBE) .
|
||||||
|
|
||||||
|
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||||
|
awx-kube-buildx: Dockerfile
|
||||||
|
- docker buildx create --name awx-kube-buildx
|
||||||
|
docker buildx use awx-kube-buildx
|
||||||
|
- docker buildx build \
|
||||||
|
--push \
|
||||||
|
--build-arg VERSION=$(VERSION) \
|
||||||
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
|
--platform=$(PLATFORMS) \
|
||||||
|
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||||
|
--tag $(IMAGE_KUBE) \
|
||||||
|
-f Dockerfile .
|
||||||
|
- docker buildx rm awx-kube-buildx
|
||||||
|
|
||||||
|
|
||||||
.PHONY: Dockerfile.kube-dev
|
.PHONY: Dockerfile.kube-dev
|
||||||
## Generate Docker.kube-dev for awx_kube_devel image
|
## Generate Docker.kube-dev for awx_kube_devel image
|
||||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.kube-dev \
|
-e dockerfile_name=Dockerfile.kube-dev \
|
||||||
-e kube_dev=True \
|
-e kube_dev=True \
|
||||||
-e template_dest=_build_kube_dev \
|
-e template_dest=_build_kube_dev \
|
||||||
@@ -684,12 +719,24 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
|||||||
awx-kube-dev-build: Dockerfile.kube-dev
|
awx-kube-dev-build: Dockerfile.kube-dev
|
||||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
-t $(IMAGE_KUBE_DEV) .
|
||||||
|
|
||||||
|
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||||
|
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||||
|
- docker buildx create --name awx-kube-dev-buildx
|
||||||
|
docker buildx use awx-kube-dev-buildx
|
||||||
|
- docker buildx build \
|
||||||
|
--push \
|
||||||
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
|
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||||
|
--platform=$(PLATFORMS) \
|
||||||
|
--tag $(IMAGE_KUBE_DEV) \
|
||||||
|
-f Dockerfile.kube-dev .
|
||||||
|
- docker buildx rm awx-kube-dev-buildx
|
||||||
|
|
||||||
kind-dev-load: awx-kube-dev-build
|
kind-dev-load: awx-kube-dev-build
|
||||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
|
||||||
|
|
||||||
# Translation TASKS
|
# Translation TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|||||||
@@ -154,10 +154,12 @@ def manage():
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
||||||
|
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
||||||
|
# The return of connection.pg_version is something like 12013
|
||||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||||
if (connection.pg_version // 10000) < 12:
|
if (connection.pg_version // 10000) < 12:
|
||||||
sys.stderr.write("Postgres version 12 is required\n")
|
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||||
|
|||||||
@@ -93,6 +93,7 @@ register(
|
|||||||
default='',
|
default='',
|
||||||
label=_('Login redirect override URL'),
|
label=_('Login redirect override URL'),
|
||||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
||||||
|
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
|
||||||
category=_('Authentication'),
|
category=_('Authentication'),
|
||||||
category_slug='authentication',
|
category_slug='authentication',
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -30,14 +30,21 @@ from rest_framework.permissions import IsAuthenticated
|
|||||||
from rest_framework.renderers import StaticHTMLRenderer
|
from rest_framework.renderers import StaticHTMLRenderer
|
||||||
from rest_framework.negotiation import DefaultContentNegotiation
|
from rest_framework.negotiation import DefaultContentNegotiation
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||||
from ansible_base.lib.utils.models import get_all_field_names
|
from ansible_base.lib.utils.models import get_all_field_names
|
||||||
|
from ansible_base.lib.utils.requests import get_remote_host
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||||
|
from ansible_base.rbac.permission_registry import permission_registry
|
||||||
|
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||||
|
from awx.main.models.rbac import give_creator_permissions
|
||||||
from awx.main.access import optimize_queryset
|
from awx.main.access import optimize_queryset
|
||||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||||
from awx.main.utils.licensing import server_product_name
|
from awx.main.utils.licensing import server_product_name
|
||||||
|
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||||
from awx.main.views import ApiErrorView
|
from awx.main.views import ApiErrorView
|
||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
@@ -89,20 +96,26 @@ class LoggedLoginView(auth_views.LoginView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||||
|
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||||
ret.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
ret.set_cookie(
|
||||||
|
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||||
|
)
|
||||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
else:
|
else:
|
||||||
if 'username' in self.request.POST:
|
if 'username' in self.request.POST:
|
||||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||||
ret.status_code = 401
|
ret.status_code = 401
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class LoggedLogoutView(auth_views.LogoutView):
|
class LoggedLogoutView(auth_views.LogoutView):
|
||||||
|
|
||||||
|
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
original_user = getattr(request, 'user', None)
|
original_user = getattr(request, 'user', None)
|
||||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||||
@@ -142,22 +155,23 @@ class APIView(views.APIView):
|
|||||||
Store the Django REST Framework Request object as an attribute on the
|
Store the Django REST Framework Request object as an attribute on the
|
||||||
normal Django request, store time the request started.
|
normal Django request, store time the request started.
|
||||||
"""
|
"""
|
||||||
|
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||||
|
|
||||||
self.time_started = time.time()
|
self.time_started = time.time()
|
||||||
if getattr(settings, 'SQL_DEBUG', False):
|
if getattr(settings, 'SQL_DEBUG', False):
|
||||||
self.queries_before = len(connection.queries)
|
self.queries_before = len(connection.queries)
|
||||||
|
|
||||||
|
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||||
|
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||||
|
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||||
|
else:
|
||||||
|
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||||
|
|
||||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||||
# they respect the allowed proxy list
|
# they respect the allowed proxy list
|
||||||
if all(
|
if settings.PROXY_IP_ALLOWED_LIST:
|
||||||
[
|
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||||
settings.PROXY_IP_ALLOWED_LIST,
|
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
|
||||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
|
||||||
]
|
|
||||||
):
|
|
||||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
|
||||||
if custom_header.startswith('HTTP_'):
|
|
||||||
request.environ.pop(custom_header, None)
|
|
||||||
|
|
||||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||||
request.drf_request = drf_request
|
request.drf_request = drf_request
|
||||||
@@ -202,17 +216,21 @@ class APIView(views.APIView):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
|
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||||
msg_data = {
|
msg_data = {
|
||||||
'status_code': response.status_code,
|
'status_code': response.status_code,
|
||||||
'user_name': request.user,
|
'user_name': request.user,
|
||||||
'url_path': request.path,
|
'url_path': request.path,
|
||||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
'remote_addr': ip,
|
||||||
}
|
}
|
||||||
|
|
||||||
if type(response.data) is dict:
|
if type(response.data) is dict:
|
||||||
msg_data['error'] = response.data.get('error', response.status_text)
|
msg_data['error'] = response.data.get('error', response.status_text)
|
||||||
elif type(response.data) is list:
|
elif type(response.data) is list:
|
||||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
if len(response.data) > 0 and isinstance(response.data[0], str):
|
||||||
|
msg_data['error'] = str(response.data[0])
|
||||||
|
else:
|
||||||
|
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||||
else:
|
else:
|
||||||
msg_data['error'] = response.status_text
|
msg_data['error'] = response.status_text
|
||||||
|
|
||||||
@@ -472,7 +490,11 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
|||||||
|
|
||||||
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||||
# Base class for a list view that allows creating new objects.
|
# Base class for a list view that allows creating new objects.
|
||||||
pass
|
def perform_create(self, serializer):
|
||||||
|
super().perform_create(serializer)
|
||||||
|
if serializer.Meta.model in permission_registry.all_registered_models:
|
||||||
|
if self.request and self.request.user:
|
||||||
|
give_creator_permissions(self.request.user, serializer.instance)
|
||||||
|
|
||||||
|
|
||||||
class ParentMixin(object):
|
class ParentMixin(object):
|
||||||
@@ -792,6 +814,7 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||||
|
deprecated = True
|
||||||
serializer_class = ResourceAccessListElementSerializer
|
serializer_class = ResourceAccessListElementSerializer
|
||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
@@ -799,6 +822,15 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
|||||||
obj = self.get_parent_object()
|
obj = self.get_parent_object()
|
||||||
|
|
||||||
content_type = ContentType.objects.get_for_model(obj)
|
content_type = ContentType.objects.get_for_model(obj)
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
||||||
|
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
||||||
|
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
|
||||||
|
if auditor_role:
|
||||||
|
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
||||||
|
return qs.distinct()
|
||||||
|
|
||||||
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
||||||
|
|
||||||
ancestors = set()
|
ancestors = set()
|
||||||
@@ -958,7 +990,7 @@ class CopyAPIView(GenericAPIView):
|
|||||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
||||||
)
|
)
|
||||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||||
new_obj.admin_role.members.add(request.user)
|
give_creator_permissions(request.user, new_obj)
|
||||||
if sub_objs:
|
if sub_objs:
|
||||||
permission_check_func = None
|
permission_check_func = None
|
||||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||||
|
|||||||
@@ -36,11 +36,13 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
field_info = OrderedDict()
|
field_info = OrderedDict()
|
||||||
field_info['type'] = self.label_lookup[field]
|
field_info['type'] = self.label_lookup[field]
|
||||||
field_info['required'] = getattr(field, 'required', False)
|
field_info['required'] = getattr(field, 'required', False)
|
||||||
|
field_info['hidden'] = getattr(field, 'hidden', False)
|
||||||
|
|
||||||
text_attrs = [
|
text_attrs = [
|
||||||
'read_only',
|
'read_only',
|
||||||
'label',
|
'label',
|
||||||
'help_text',
|
'help_text',
|
||||||
|
'warning_text',
|
||||||
'min_length',
|
'min_length',
|
||||||
'max_length',
|
'max_length',
|
||||||
'min_value',
|
'min_value',
|
||||||
@@ -101,7 +103,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
default = field.get_default()
|
default = field.get_default()
|
||||||
if type(default) is UUID:
|
if type(default) is UUID:
|
||||||
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
||||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
|
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost':
|
||||||
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
||||||
field_info['default'] = default
|
field_info['default'] = default
|
||||||
except serializers.SkipField:
|
except serializers.SkipField:
|
||||||
|
|||||||
@@ -43,11 +43,14 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
|||||||
# Django-Polymorphic
|
# Django-Polymorphic
|
||||||
from polymorphic.models import PolymorphicModel
|
from polymorphic.models import PolymorphicModel
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
from ansible_base.lib.utils.models import get_type_for_model
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.access import get_user_capabilities
|
from awx.main.access import get_user_capabilities
|
||||||
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE
|
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE, org_role_to_permission
|
||||||
from awx.main.models import (
|
from awx.main.models import (
|
||||||
ActivityStream,
|
ActivityStream,
|
||||||
AdHocCommand,
|
AdHocCommand,
|
||||||
@@ -102,7 +105,7 @@ from awx.main.models import (
|
|||||||
CLOUD_INVENTORY_SOURCES,
|
CLOUD_INVENTORY_SOURCES,
|
||||||
)
|
)
|
||||||
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
||||||
from awx.main.models.rbac import role_summary_fields_generator, RoleAncestorEntry
|
from awx.main.models.rbac import role_summary_fields_generator, give_creator_permissions, get_role_codenames, to_permissions, get_role_from_object_role
|
||||||
from awx.main.fields import ImplicitRoleField
|
from awx.main.fields import ImplicitRoleField
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
get_model_for_type,
|
get_model_for_type,
|
||||||
@@ -191,6 +194,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
|||||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||||
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
||||||
'credential_type': DEFAULT_SUMMARY_FIELDS,
|
'credential_type': DEFAULT_SUMMARY_FIELDS,
|
||||||
|
'resource': ('ansible_id', 'resource_type'),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -2762,13 +2766,26 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
team_content_type = ContentType.objects.get_for_model(Team)
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
content_type = ContentType.objects.get_for_model(obj)
|
content_type = ContentType.objects.get_for_model(obj)
|
||||||
|
|
||||||
def get_roles_on_resource(parent_role):
|
reversed_org_map = {}
|
||||||
"Returns a string list of the roles a parent_role has for current obj."
|
for k, v in org_role_to_permission.items():
|
||||||
return list(
|
reversed_org_map[v] = k
|
||||||
RoleAncestorEntry.objects.filter(ancestor=parent_role, content_type_id=content_type.id, object_id=obj.id)
|
reversed_role_map = {}
|
||||||
.values_list('role_field', flat=True)
|
for k, v in to_permissions.items():
|
||||||
.distinct()
|
reversed_role_map[v] = k
|
||||||
)
|
|
||||||
|
def get_roles_from_perms(perm_list):
|
||||||
|
"""given a list of permission codenames return a list of role names"""
|
||||||
|
role_names = set()
|
||||||
|
for codename in perm_list:
|
||||||
|
action = codename.split('_', 1)[0]
|
||||||
|
if action in reversed_role_map:
|
||||||
|
role_names.add(reversed_role_map[action])
|
||||||
|
elif codename in reversed_org_map:
|
||||||
|
if isinstance(obj, Organization):
|
||||||
|
role_names.add(reversed_org_map[codename])
|
||||||
|
if 'view_organization' not in role_names:
|
||||||
|
role_names.add('read_role')
|
||||||
|
return list(role_names)
|
||||||
|
|
||||||
def format_role_perm(role):
|
def format_role_perm(role):
|
||||||
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
||||||
@@ -2785,13 +2802,21 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
else:
|
else:
|
||||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||||
role_dict['user_capabilities'] = {'unattach': False}
|
role_dict['user_capabilities'] = {'unattach': False}
|
||||||
return {'role': role_dict, 'descendant_roles': get_roles_on_resource(role)}
|
|
||||||
|
model_name = content_type.model
|
||||||
|
if isinstance(obj, Organization):
|
||||||
|
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name) or codename.startswith('add_')]
|
||||||
|
else:
|
||||||
|
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name)]
|
||||||
|
|
||||||
|
return {'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)}
|
||||||
|
|
||||||
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
||||||
ret = []
|
ret = []
|
||||||
|
team = naive_team_role.content_object
|
||||||
team_role = naive_team_role
|
team_role = naive_team_role
|
||||||
if naive_team_role.role_field == 'admin_role':
|
if naive_team_role.role_field == 'admin_role':
|
||||||
team_role = naive_team_role.content_object.member_role
|
team_role = team.member_role
|
||||||
for role in team_role.children.filter(id__in=permissive_role_ids).all():
|
for role in team_role.children.filter(id__in=permissive_role_ids).all():
|
||||||
role_dict = {
|
role_dict = {
|
||||||
'id': role.id,
|
'id': role.id,
|
||||||
@@ -2811,10 +2836,87 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
else:
|
else:
|
||||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||||
role_dict['user_capabilities'] = {'unattach': False}
|
role_dict['user_capabilities'] = {'unattach': False}
|
||||||
ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(team_role)})
|
|
||||||
|
descendant_perms = list(
|
||||||
|
RoleEvaluation.objects.filter(role__in=team.has_roles.all(), object_id=obj.id, content_type_id=content_type.id)
|
||||||
|
.values_list('codename', flat=True)
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
ret.append({'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)})
|
||||||
|
return ret
|
||||||
|
|
||||||
|
gfk_kwargs = dict(content_type_id=content_type.id, object_id=obj.id)
|
||||||
|
direct_permissive_role_ids = Role.objects.filter(**gfk_kwargs).values_list('id', flat=True)
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
ret['summary_fields']['direct_access'] = []
|
||||||
|
ret['summary_fields']['indirect_access'] = []
|
||||||
|
|
||||||
|
new_roles_seen = set()
|
||||||
|
all_team_roles = set()
|
||||||
|
all_permissive_role_ids = set()
|
||||||
|
for evaluation in RoleEvaluation.objects.filter(role__in=user.has_roles.all(), **gfk_kwargs).prefetch_related('role'):
|
||||||
|
new_role = evaluation.role
|
||||||
|
if new_role.id in new_roles_seen:
|
||||||
|
continue
|
||||||
|
new_roles_seen.add(new_role.id)
|
||||||
|
old_role = get_role_from_object_role(new_role)
|
||||||
|
all_permissive_role_ids.add(old_role.id)
|
||||||
|
|
||||||
|
if int(new_role.object_id) == obj.id and new_role.content_type_id == content_type.id:
|
||||||
|
ret['summary_fields']['direct_access'].append(format_role_perm(old_role))
|
||||||
|
elif new_role.content_type_id == team_content_type.id:
|
||||||
|
all_team_roles.add(old_role)
|
||||||
|
else:
|
||||||
|
ret['summary_fields']['indirect_access'].append(format_role_perm(old_role))
|
||||||
|
|
||||||
|
# Lazy role creation gives us a big problem, where some intermediate roles are not easy to find
|
||||||
|
# like when a team has indirect permission, so here we get all roles the users teams have
|
||||||
|
# these contribute to all potential permission-granting roles of the object
|
||||||
|
user_teams_qs = permission_registry.team_model.objects.filter(member_roles__in=ObjectRole.objects.filter(users=user))
|
||||||
|
team_obj_roles = ObjectRole.objects.filter(teams__in=user_teams_qs)
|
||||||
|
for evaluation in RoleEvaluation.objects.filter(role__in=team_obj_roles, **gfk_kwargs).prefetch_related('role'):
|
||||||
|
new_role = evaluation.role
|
||||||
|
if new_role.id in new_roles_seen:
|
||||||
|
continue
|
||||||
|
new_roles_seen.add(new_role.id)
|
||||||
|
old_role = get_role_from_object_role(new_role)
|
||||||
|
all_permissive_role_ids.add(old_role.id)
|
||||||
|
|
||||||
|
# In DAB RBAC, superuser is strictly a user flag, and global roles are not in the RoleEvaluation table
|
||||||
|
if user.is_superuser:
|
||||||
|
ret['summary_fields'].setdefault('indirect_access', [])
|
||||||
|
all_role_names = [field.name for field in obj._meta.get_fields() if isinstance(field, ImplicitRoleField)]
|
||||||
|
ret['summary_fields']['indirect_access'].append(
|
||||||
|
{
|
||||||
|
"role": {
|
||||||
|
"id": None,
|
||||||
|
"name": _("System Administrator"),
|
||||||
|
"description": _("Can manage all aspects of the system"),
|
||||||
|
"user_capabilities": {"unattach": False},
|
||||||
|
},
|
||||||
|
"descendant_roles": all_role_names,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
elif user.is_system_auditor:
|
||||||
|
ret['summary_fields'].setdefault('indirect_access', [])
|
||||||
|
ret['summary_fields']['indirect_access'].append(
|
||||||
|
{
|
||||||
|
"role": {
|
||||||
|
"id": None,
|
||||||
|
"name": _("System Auditor"),
|
||||||
|
"description": _("Can view all aspects of the system"),
|
||||||
|
"user_capabilities": {"unattach": False},
|
||||||
|
},
|
||||||
|
"descendant_roles": ["read_role"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
ret['summary_fields']['direct_access'].extend([y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in all_team_roles) for y in x])
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
direct_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('id', flat=True)
|
|
||||||
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
||||||
|
|
||||||
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
|
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
|
||||||
@@ -3083,7 +3185,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
|||||||
credential = super(CredentialSerializerCreate, self).create(validated_data)
|
credential = super(CredentialSerializerCreate, self).create(validated_data)
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
credential.admin_role.members.add(user)
|
give_creator_permissions(user, credential)
|
||||||
if team:
|
if team:
|
||||||
if not credential.organization or team.organization.id != credential.organization.id:
|
if not credential.organization or team.organization.id != credential.organization.id:
|
||||||
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
||||||
@@ -5279,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get_body(self, obj):
|
def get_body(self, obj):
|
||||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
|
||||||
if isinstance(obj.body, dict):
|
if isinstance(obj.body, dict):
|
||||||
if 'body' in obj.body:
|
if 'body' in obj.body:
|
||||||
return obj.body['body']
|
return obj.body['body']
|
||||||
@@ -5301,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
|
|||||||
def to_representation(self, obj):
|
def to_representation(self, obj):
|
||||||
ret = super(NotificationSerializer, self).to_representation(obj)
|
ret = super(NotificationSerializer, self).to_representation(obj)
|
||||||
|
|
||||||
if obj.notification_type == 'webhook':
|
if obj.notification_type in ('webhook', 'awssns'):
|
||||||
ret.pop('subject')
|
ret.pop('subject')
|
||||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
|
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
|
||||||
ret.pop('body')
|
ret.pop('body')
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -5594,7 +5696,7 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP]:
|
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP] and not obj.managed:
|
||||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||||
if obj.node_type == 'execution':
|
if obj.node_type == 'execution':
|
||||||
|
|||||||
@@ -2,6 +2,12 @@
|
|||||||
- hosts: all
|
- hosts: all
|
||||||
become: yes
|
become: yes
|
||||||
tasks:
|
tasks:
|
||||||
|
- name: Create the receptor group
|
||||||
|
group:
|
||||||
|
{% verbatim %}
|
||||||
|
name: "{{ receptor_group }}"
|
||||||
|
{% endverbatim %}
|
||||||
|
state: present
|
||||||
- name: Create the receptor user
|
- name: Create the receptor user
|
||||||
user:
|
user:
|
||||||
{% verbatim %}
|
{% verbatim %}
|
||||||
|
|||||||
@@ -2,28 +2,21 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.urls import NoReverseMatch
|
|
||||||
|
|
||||||
from rest_framework.reverse import _reverse
|
from rest_framework.reverse import reverse as drf_reverse
|
||||||
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
||||||
|
|
||||||
|
|
||||||
def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
def is_optional_api_urlpattern_prefix_request(request):
|
||||||
"""
|
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
||||||
Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
|
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
||||||
query string parameters.
|
return True
|
||||||
"""
|
return False
|
||||||
scheme = getattr(request, 'versioning_scheme', None)
|
|
||||||
if scheme is not None:
|
|
||||||
try:
|
|
||||||
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
|
|
||||||
except NoReverseMatch:
|
|
||||||
# In case the versioning scheme reversal fails, fallback to the
|
|
||||||
# default implementation
|
|
||||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
|
||||||
else:
|
|
||||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
|
||||||
|
|
||||||
|
|
||||||
|
def transform_optional_api_urlpattern_prefix_url(request, url):
|
||||||
|
if is_optional_api_urlpattern_prefix_request(request):
|
||||||
|
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -60,6 +60,11 @@ from oauth2_provider.models import get_access_token_model
|
|||||||
import pytz
|
import pytz
|
||||||
from wsgiref.util import FileWrapper
|
from wsgiref.util import FileWrapper
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||||
|
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||||
from awx.main.access import get_user_queryset
|
from awx.main.access import get_user_queryset
|
||||||
@@ -87,6 +92,7 @@ from awx.api.generics import (
|
|||||||
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
|
from awx.main.models.rbac import get_role_definition
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
extract_ansible_vars,
|
extract_ansible_vars,
|
||||||
@@ -124,6 +130,7 @@ from awx.api.views.mixin import (
|
|||||||
from awx.api.pagination import UnifiedJobEventPagination
|
from awx.api.pagination import UnifiedJobEventPagination
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views')
|
logger = logging.getLogger('awx.api.views')
|
||||||
|
|
||||||
|
|
||||||
@@ -272,16 +279,24 @@ class DashboardJobsGraphView(APIView):
|
|||||||
|
|
||||||
success_query = user_unified_jobs.filter(status='successful')
|
success_query = user_unified_jobs.filter(status='successful')
|
||||||
failed_query = user_unified_jobs.filter(status='failed')
|
failed_query = user_unified_jobs.filter(status='failed')
|
||||||
|
canceled_query = user_unified_jobs.filter(status='canceled')
|
||||||
|
error_query = user_unified_jobs.filter(status='error')
|
||||||
|
|
||||||
if job_type == 'inv_sync':
|
if job_type == 'inv_sync':
|
||||||
success_query = success_query.filter(instance_of=models.InventoryUpdate)
|
success_query = success_query.filter(instance_of=models.InventoryUpdate)
|
||||||
failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
|
failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
|
||||||
|
canceled_query = canceled_query.filter(instance_of=models.InventoryUpdate)
|
||||||
|
error_query = error_query.filter(instance_of=models.InventoryUpdate)
|
||||||
elif job_type == 'playbook_run':
|
elif job_type == 'playbook_run':
|
||||||
success_query = success_query.filter(instance_of=models.Job)
|
success_query = success_query.filter(instance_of=models.Job)
|
||||||
failed_query = failed_query.filter(instance_of=models.Job)
|
failed_query = failed_query.filter(instance_of=models.Job)
|
||||||
|
canceled_query = canceled_query.filter(instance_of=models.Job)
|
||||||
|
error_query = error_query.filter(instance_of=models.Job)
|
||||||
elif job_type == 'scm_update':
|
elif job_type == 'scm_update':
|
||||||
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
||||||
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
||||||
|
canceled_query = canceled_query.filter(instance_of=models.ProjectUpdate)
|
||||||
|
error_query = error_query.filter(instance_of=models.ProjectUpdate)
|
||||||
|
|
||||||
end = now()
|
end = now()
|
||||||
interval = 'day'
|
interval = 'day'
|
||||||
@@ -297,10 +312,12 @@ class DashboardJobsGraphView(APIView):
|
|||||||
else:
|
else:
|
||||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
dashboard_data = {"jobs": {"successful": [], "failed": [], "canceled": [], "error": []}}
|
||||||
|
|
||||||
succ_list = dashboard_data['jobs']['successful']
|
succ_list = dashboard_data['jobs']['successful']
|
||||||
fail_list = dashboard_data['jobs']['failed']
|
fail_list = dashboard_data['jobs']['failed']
|
||||||
|
canceled_list = dashboard_data['jobs']['canceled']
|
||||||
|
error_list = dashboard_data['jobs']['error']
|
||||||
|
|
||||||
qs_s = (
|
qs_s = (
|
||||||
success_query.filter(finished__range=(start, end))
|
success_query.filter(finished__range=(start, end))
|
||||||
@@ -318,6 +335,22 @@ class DashboardJobsGraphView(APIView):
|
|||||||
.annotate(agg=Count('id', distinct=True))
|
.annotate(agg=Count('id', distinct=True))
|
||||||
)
|
)
|
||||||
data_f = {item['d']: item['agg'] for item in qs_f}
|
data_f = {item['d']: item['agg'] for item in qs_f}
|
||||||
|
qs_c = (
|
||||||
|
canceled_query.filter(finished__range=(start, end))
|
||||||
|
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||||
|
.order_by()
|
||||||
|
.values('d')
|
||||||
|
.annotate(agg=Count('id', distinct=True))
|
||||||
|
)
|
||||||
|
data_c = {item['d']: item['agg'] for item in qs_c}
|
||||||
|
qs_e = (
|
||||||
|
error_query.filter(finished__range=(start, end))
|
||||||
|
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||||
|
.order_by()
|
||||||
|
.values('d')
|
||||||
|
.annotate(agg=Count('id', distinct=True))
|
||||||
|
)
|
||||||
|
data_e = {item['d']: item['agg'] for item in qs_e}
|
||||||
|
|
||||||
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
for d in itertools.count():
|
for d in itertools.count():
|
||||||
@@ -326,6 +359,8 @@ class DashboardJobsGraphView(APIView):
|
|||||||
break
|
break
|
||||||
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
||||||
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
||||||
|
canceled_list.append([time.mktime(date.timetuple()), data_c.get(date, 0)])
|
||||||
|
error_list.append([time.mktime(date.timetuple()), data_e.get(date, 0)])
|
||||||
|
|
||||||
return Response(dashboard_data)
|
return Response(dashboard_data)
|
||||||
|
|
||||||
@@ -508,6 +543,7 @@ class InstanceGroupAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceGroupObjectRolesList(SubListAPIView):
|
class InstanceGroupObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.InstanceGroup
|
parent_model = models.InstanceGroup
|
||||||
@@ -677,16 +713,81 @@ class AuthView(APIView):
|
|||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
def immutablesharedfields(cls):
|
||||||
|
'''
|
||||||
|
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||||
|
|
||||||
|
Works by overriding these view methods:
|
||||||
|
- create
|
||||||
|
- delete
|
||||||
|
- perform_update
|
||||||
|
create and delete are overridden to raise a PermissionDenied exception.
|
||||||
|
perform_update is overridden to check if any shared fields are being modified,
|
||||||
|
and raise a PermissionDenied exception if so.
|
||||||
|
'''
|
||||||
|
# create instead of perform_create because some of our views
|
||||||
|
# override create instead of perform_create
|
||||||
|
if hasattr(cls, 'create'):
|
||||||
|
cls.original_create = cls.create
|
||||||
|
|
||||||
|
@functools.wraps(cls.create)
|
||||||
|
def create_wrapper(*args, **kwargs):
|
||||||
|
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
return cls.original_create(*args, **kwargs)
|
||||||
|
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||||
|
|
||||||
|
cls.create = create_wrapper
|
||||||
|
|
||||||
|
if hasattr(cls, 'delete'):
|
||||||
|
cls.original_delete = cls.delete
|
||||||
|
|
||||||
|
@functools.wraps(cls.delete)
|
||||||
|
def delete_wrapper(*args, **kwargs):
|
||||||
|
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
return cls.original_delete(*args, **kwargs)
|
||||||
|
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||||
|
|
||||||
|
cls.delete = delete_wrapper
|
||||||
|
|
||||||
|
if hasattr(cls, 'perform_update'):
|
||||||
|
cls.original_perform_update = cls.perform_update
|
||||||
|
|
||||||
|
@functools.wraps(cls.perform_update)
|
||||||
|
def update_wrapper(*args, **kwargs):
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
view, serializer = args
|
||||||
|
instance = view.get_object()
|
||||||
|
if instance:
|
||||||
|
if isinstance(instance, models.Organization):
|
||||||
|
shared_fields = OrganizationType._declared_fields.keys()
|
||||||
|
elif isinstance(instance, models.User):
|
||||||
|
shared_fields = UserType._declared_fields.keys()
|
||||||
|
elif isinstance(instance, models.Team):
|
||||||
|
shared_fields = TeamType._declared_fields.keys()
|
||||||
|
attrs = serializer.validated_data
|
||||||
|
for field in shared_fields:
|
||||||
|
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||||
|
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||||
|
return cls.original_perform_update(*args, **kwargs)
|
||||||
|
|
||||||
|
cls.perform_update = update_wrapper
|
||||||
|
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamList(ListCreateAPIView):
|
class TeamList(ListCreateAPIView):
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamUsersList(BaseUsersList):
|
class TeamUsersList(BaseUsersList):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -696,6 +797,7 @@ class TeamUsersList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class TeamRolesList(SubListAttachDetachAPIView):
|
class TeamRolesList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||||
metadata_class = RoleMetadata
|
metadata_class = RoleMetadata
|
||||||
@@ -735,10 +837,12 @@ class TeamRolesList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class TeamObjectRolesList(SubListAPIView):
|
class TeamObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Team
|
parent_model = models.Team
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -756,8 +860,15 @@ class TeamProjectsList(SubListAPIView):
|
|||||||
self.check_parent_access(team)
|
self.check_parent_access(team)
|
||||||
model_ct = ContentType.objects.get_for_model(self.model)
|
model_ct = ContentType.objects.get_for_model(self.model)
|
||||||
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
||||||
proj_roles = models.Role.objects.filter(Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk), content_type=model_ct)
|
|
||||||
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
|
rd = get_role_definition(team.member_role)
|
||||||
|
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
|
||||||
|
if role is None:
|
||||||
|
# Team has no permissions, therefore team has no projects
|
||||||
|
return self.model.objects.none()
|
||||||
|
else:
|
||||||
|
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||||
|
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
|
||||||
|
|
||||||
|
|
||||||
class TeamActivityStreamList(SubListAPIView):
|
class TeamActivityStreamList(SubListAPIView):
|
||||||
@@ -772,10 +883,23 @@ class TeamActivityStreamList(SubListAPIView):
|
|||||||
self.check_parent_access(parent)
|
self.check_parent_access(parent)
|
||||||
|
|
||||||
qs = self.request.user.get_queryset(self.model)
|
qs = self.request.user.get_queryset(self.model)
|
||||||
|
|
||||||
return qs.filter(
|
return qs.filter(
|
||||||
Q(team=parent)
|
Q(team=parent)
|
||||||
| Q(project__in=models.Project.accessible_objects(parent.member_role, 'read_role'))
|
| Q(
|
||||||
| Q(credential__in=models.Credential.accessible_objects(parent.member_role, 'read_role'))
|
project__in=RoleEvaluation.objects.filter(
|
||||||
|
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Project).id, codename='view_project'
|
||||||
|
)
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
| Q(
|
||||||
|
credential__in=RoleEvaluation.objects.filter(
|
||||||
|
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Credential).id, codename='view_credential'
|
||||||
|
)
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1027,10 +1151,12 @@ class ProjectAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectObjectRolesList(SubListAPIView):
|
class ProjectObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Project
|
parent_model = models.Project
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -1043,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
|||||||
copy_return_serializer_class = serializers.ProjectSerializer
|
copy_return_serializer_class = serializers.ProjectSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class UserList(ListCreateAPIView):
|
class UserList(ListCreateAPIView):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -1188,6 +1315,7 @@ class UserTeamsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class UserRolesList(SubListAttachDetachAPIView):
|
class UserRolesList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||||
metadata_class = RoleMetadata
|
metadata_class = RoleMetadata
|
||||||
@@ -1212,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
|||||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||||
role = get_object_or_400(models.Role, pk=sub_id)
|
role = get_object_or_400(models.Role, pk=sub_id)
|
||||||
|
|
||||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||||
|
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
for model in [models.Organization, models.Team]:
|
||||||
|
ct = content_types[model]
|
||||||
|
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||||
|
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||||
|
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
credential_content_type = content_types[models.Credential]
|
||||||
if role.content_type == credential_content_type:
|
if role.content_type == credential_content_type:
|
||||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||||
@@ -1284,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
|||||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -1462,10 +1600,12 @@ class CredentialAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class CredentialObjectRolesList(SubListAPIView):
|
class CredentialObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Credential
|
parent_model = models.Credential
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -2252,12 +2392,13 @@ class JobTemplateList(ListCreateAPIView):
|
|||||||
serializer_class = serializers.JobTemplateSerializer
|
serializer_class = serializers.JobTemplateSerializer
|
||||||
always_allow_superuser = False
|
always_allow_superuser = False
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def check_permissions(self, request):
|
||||||
ret = super(JobTemplateList, self).post(request, *args, **kwargs)
|
if request.method == 'POST':
|
||||||
if ret.status_code == 201:
|
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||||
job_template = models.JobTemplate.objects.get(id=ret.data['id'])
|
if not can_access:
|
||||||
job_template.admin_role.members.add(request.user)
|
self.permission_denied(request, message=messages)
|
||||||
return ret
|
|
||||||
|
super(JobTemplateList, self).check_permissions(request)
|
||||||
|
|
||||||
|
|
||||||
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
@@ -2638,12 +2779,7 @@ class JobTemplateCallback(GenericAPIView):
|
|||||||
host for the current request.
|
host for the current request.
|
||||||
"""
|
"""
|
||||||
# Find the list of remote host names/IPs to check.
|
# Find the list of remote host names/IPs to check.
|
||||||
remote_hosts = set()
|
remote_hosts = set(get_remote_hosts(self.request))
|
||||||
for header in settings.REMOTE_HOST_HEADERS:
|
|
||||||
for value in self.request.META.get(header, '').split(','):
|
|
||||||
value = value.strip()
|
|
||||||
if value:
|
|
||||||
remote_hosts.add(value)
|
|
||||||
# Add the reverse lookup of IP addresses.
|
# Add the reverse lookup of IP addresses.
|
||||||
for rh in list(remote_hosts):
|
for rh in list(remote_hosts):
|
||||||
try:
|
try:
|
||||||
@@ -2804,10 +2940,12 @@ class JobTemplateAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class JobTemplateObjectRolesList(SubListAPIView):
|
class JobTemplateObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.JobTemplate
|
parent_model = models.JobTemplate
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -2981,6 +3119,14 @@ class WorkflowJobTemplateList(ListCreateAPIView):
|
|||||||
serializer_class = serializers.WorkflowJobTemplateSerializer
|
serializer_class = serializers.WorkflowJobTemplateSerializer
|
||||||
always_allow_superuser = False
|
always_allow_superuser = False
|
||||||
|
|
||||||
|
def check_permissions(self, request):
|
||||||
|
if request.method == 'POST':
|
||||||
|
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||||
|
if not can_access:
|
||||||
|
self.permission_denied(request, message=messages)
|
||||||
|
|
||||||
|
super(WorkflowJobTemplateList, self).check_permissions(request)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
model = models.WorkflowJobTemplate
|
model = models.WorkflowJobTemplate
|
||||||
@@ -3190,10 +3336,12 @@ class WorkflowJobTemplateAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateObjectRolesList(SubListAPIView):
|
class WorkflowJobTemplateObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.WorkflowJobTemplate
|
parent_model = models.WorkflowJobTemplate
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -4202,6 +4350,7 @@ class ActivityStreamDetail(RetrieveAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleList(ListAPIView):
|
class RoleList(ListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
@@ -4209,11 +4358,13 @@ class RoleList(ListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleDetail(RetrieveAPIView):
|
class RoleDetail(RetrieveAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
|
|
||||||
|
|
||||||
class RoleUsersList(SubListAttachDetachAPIView):
|
class RoleUsersList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4234,7 +4385,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
|||||||
user = get_object_or_400(models.User, pk=sub_id)
|
user = get_object_or_400(models.User, pk=sub_id)
|
||||||
role = self.get_parent_object()
|
role = self.get_parent_object()
|
||||||
|
|
||||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
for model in [models.Organization, models.Team]:
|
||||||
|
ct = content_types[model]
|
||||||
|
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||||
|
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||||
|
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
credential_content_type = content_types[models.Credential]
|
||||||
if role.content_type == credential_content_type:
|
if role.content_type == credential_content_type:
|
||||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||||
@@ -4248,6 +4407,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleTeamsList(SubListAttachDetachAPIView):
|
class RoleTeamsList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4292,10 +4452,12 @@ class RoleTeamsList(SubListAttachDetachAPIView):
|
|||||||
team.member_role.children.remove(role)
|
team.member_role.children.remove(role)
|
||||||
else:
|
else:
|
||||||
team.member_role.children.add(role)
|
team.member_role.children.add(role)
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class RoleParentsList(SubListAPIView):
|
class RoleParentsList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4309,6 +4471,7 @@ class RoleParentsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleChildrenList(SubListAPIView):
|
class RoleChildrenList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
|
|||||||
@@ -48,23 +48,23 @@ class AnalyticsRootView(APIView):
|
|||||||
|
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
data['authorized'] = reverse('api:analytics_authorized')
|
data['authorized'] = reverse('api:analytics_authorized', request=request)
|
||||||
data['reports'] = reverse('api:analytics_reports_list')
|
data['reports'] = reverse('api:analytics_reports_list', request=request)
|
||||||
data['report_options'] = reverse('api:analytics_report_options_list')
|
data['report_options'] = reverse('api:analytics_report_options_list', request=request)
|
||||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate')
|
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request)
|
||||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options')
|
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request)
|
||||||
data['event_explorer'] = reverse('api:analytics_event_explorer')
|
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request)
|
||||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options')
|
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request)
|
||||||
data['host_explorer'] = reverse('api:analytics_host_explorer')
|
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request)
|
||||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options')
|
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request)
|
||||||
data['job_explorer'] = reverse('api:analytics_job_explorer')
|
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request)
|
||||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options')
|
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request)
|
||||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer')
|
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request)
|
||||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options')
|
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request)
|
||||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer')
|
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request)
|
||||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options')
|
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request)
|
||||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer')
|
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request)
|
||||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options')
|
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request)
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -152,6 +152,7 @@ class InventoryObjectRolesList(SubListAPIView):
|
|||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
|
|||||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
|||||||
CredentialSerializer,
|
CredentialSerializer,
|
||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||||
|
from awx.api.views import immutablesharedfields
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.organization')
|
logger = logging.getLogger('awx.api.views.organization')
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
|||||||
relationship = 'inventories'
|
relationship = 'inventories'
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationUsersList(BaseUsersList):
|
class OrganizationUsersList(BaseUsersList):
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
|||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationAdminsList(BaseUsersList):
|
class OrganizationAdminsList(BaseUsersList):
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
|||||||
parent_key = 'organization'
|
parent_key = 'organization'
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||||
model = Team
|
model = Team
|
||||||
serializer_class = TeamSerializer
|
serializer_class = TeamSerializer
|
||||||
@@ -226,6 +232,7 @@ class OrganizationObjectRolesList(SubListAPIView):
|
|||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from django.utils.decorators import method_decorator
|
|||||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.urls import reverse as django_reverse
|
||||||
|
|
||||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@@ -27,7 +28,7 @@ from awx.main.analytics import all_collectors
|
|||||||
from awx.main.ha import is_ha_environment
|
from awx.main.ha import is_ha_environment
|
||||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||||
from awx.api.versioning import reverse, drf_reverse
|
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse
|
||||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
@@ -39,19 +40,19 @@ logger = logging.getLogger('awx.api.views.root')
|
|||||||
class ApiRootView(APIView):
|
class ApiRootView(APIView):
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _('REST API')
|
name = _('REST API')
|
||||||
versioning_class = None
|
versioning_class = URLPathVersioning
|
||||||
swagger_topic = 'Versioning'
|
swagger_topic = 'Versioning'
|
||||||
|
|
||||||
@method_decorator(ensure_csrf_cookie)
|
@method_decorator(ensure_csrf_cookie)
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
'''List supported API versions'''
|
'''List supported API versions'''
|
||||||
|
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
|
||||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
data['description'] = _('AWX REST API')
|
data['description'] = _('AWX REST API')
|
||||||
data['current_version'] = v2
|
data['current_version'] = v2
|
||||||
data['available_versions'] = dict(v2=v2)
|
data['available_versions'] = dict(v2=v2)
|
||||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
if not is_optional_api_urlpattern_prefix_request(request):
|
||||||
|
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||||
@@ -130,6 +131,10 @@ class ApiVersionRootView(APIView):
|
|||||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||||
data['bulk'] = reverse('api:bulk', request=request)
|
data['bulk'] = reverse('api:bulk', request=request)
|
||||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
||||||
|
data['service_index'] = django_reverse('service-index-root')
|
||||||
|
data['role_definitions'] = django_reverse('roledefinition-list')
|
||||||
|
data['role_user_assignments'] = django_reverse('roleuserassignment-list')
|
||||||
|
data['role_team_assignments'] = django_reverse('roleteamassignment-list')
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ register(
|
|||||||
# Optional; category_slug will be slugified version of category if not
|
# Optional; category_slug will be slugified version of category if not
|
||||||
# explicitly provided.
|
# explicitly provided.
|
||||||
category_slug='cows',
|
category_slug='cows',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -61,6 +61,10 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
try:
|
try:
|
||||||
|
if isinstance(value, str):
|
||||||
|
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
|
||||||
|
# DRF changed truthy and falsy lists to be capitalized
|
||||||
|
value = value.lower()
|
||||||
if isinstance(value, (list, tuple)):
|
if isinstance(value, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_representation(value)
|
return super(StringListBooleanField, self).to_representation(value)
|
||||||
elif value in BooleanField.TRUE_VALUES:
|
elif value in BooleanField.TRUE_VALUES:
|
||||||
@@ -78,6 +82,8 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
def to_internal_value(self, data):
|
def to_internal_value(self, data):
|
||||||
try:
|
try:
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = data.lower()
|
||||||
if isinstance(data, (list, tuple)):
|
if isinstance(data, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_internal_value(data)
|
return super(StringListBooleanField, self).to_internal_value(data)
|
||||||
elif data in BooleanField.TRUE_VALUES:
|
elif data in BooleanField.TRUE_VALUES:
|
||||||
|
|||||||
@@ -127,6 +127,8 @@ class SettingsRegistry(object):
|
|||||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||||
unit = field_kwargs.pop('unit', None)
|
unit = field_kwargs.pop('unit', None)
|
||||||
|
hidden = field_kwargs.pop('hidden', False)
|
||||||
|
warning_text = field_kwargs.pop('warning_text', None)
|
||||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||||
field_kwargs['child'].source = None
|
field_kwargs['child'].source = None
|
||||||
field_instance = field_class(**field_kwargs)
|
field_instance = field_class(**field_kwargs)
|
||||||
@@ -134,12 +136,14 @@ class SettingsRegistry(object):
|
|||||||
field_instance.category = category
|
field_instance.category = category
|
||||||
field_instance.depends_on = depends_on
|
field_instance.depends_on = depends_on
|
||||||
field_instance.unit = unit
|
field_instance.unit = unit
|
||||||
|
field_instance.hidden = hidden
|
||||||
if placeholder is not empty:
|
if placeholder is not empty:
|
||||||
field_instance.placeholder = placeholder
|
field_instance.placeholder = placeholder
|
||||||
field_instance.defined_in_file = defined_in_file
|
field_instance.defined_in_file = defined_in_file
|
||||||
if field_instance.defined_in_file:
|
if field_instance.defined_in_file:
|
||||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||||
field_instance.encrypted = encrypted
|
field_instance.encrypted = encrypted
|
||||||
|
field_instance.warning_text = warning_text
|
||||||
original_field_instance = field_instance
|
original_field_instance = field_instance
|
||||||
if field_class != original_field_class:
|
if field_class != original_field_class:
|
||||||
original_field_instance = original_field_class(**field_kwargs)
|
original_field_instance = original_field_class(**field_kwargs)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# Python
|
# Python
|
||||||
import contextlib
|
import contextlib
|
||||||
import logging
|
import logging
|
||||||
|
import psycopg
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
@@ -13,7 +14,7 @@ from django.conf import settings, UserSettingsHolder
|
|||||||
from django.core.cache import cache as django_cache
|
from django.core.cache import cache as django_cache
|
||||||
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
||||||
from django.db import transaction, connection
|
from django.db import transaction, connection
|
||||||
from django.db.utils import Error as DBError, ProgrammingError
|
from django.db.utils import DatabaseError, ProgrammingError
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
@@ -80,18 +81,26 @@ def _ctit_db_wrapper(trans_safe=False):
|
|||||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||||
transaction.set_rollback(False)
|
transaction.set_rollback(False)
|
||||||
yield
|
yield
|
||||||
except DBError as exc:
|
except ProgrammingError as e:
|
||||||
|
# Exception raised for programming errors
|
||||||
|
# Examples may be table not found or already exists,
|
||||||
|
# this generally means we can't fetch Tower configuration
|
||||||
|
# because the database hasn't actually finished migrating yet;
|
||||||
|
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||||
|
# has come up *before* the database has finished migrating, and
|
||||||
|
# especially that the conf.settings table doesn't exist yet
|
||||||
|
# syntax error in the SQL statement, wrong number of parameters specified, etc.
|
||||||
if trans_safe:
|
if trans_safe:
|
||||||
level = logger.warning
|
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}')
|
||||||
if isinstance(exc, ProgrammingError):
|
else:
|
||||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
logger.exception('Error modifying something related to database settings.')
|
||||||
# this generally means we can't fetch Tower configuration
|
except DatabaseError as e:
|
||||||
# because the database hasn't actually finished migrating yet;
|
if trans_safe:
|
||||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
cause = e.__cause__
|
||||||
# has come up *before* the database has finished migrating, and
|
if cause and hasattr(cause, 'sqlstate'):
|
||||||
# especially that the conf.settings table doesn't exist yet
|
sqlstate = cause.sqlstate
|
||||||
level = logger.debug
|
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||||
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||||
else:
|
else:
|
||||||
logger.exception('Error modifying something related to database settings.')
|
logger.exception('Error modifying something related to database settings.')
|
||||||
finally:
|
finally:
|
||||||
|
|||||||
@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||||
|
|
||||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||||
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||||
|
|
||||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||||
|
|
||||||
|
|
||||||
def test_empty_setting(settings, mocker):
|
def test_empty_setting(settings, mocker):
|
||||||
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||||
|
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
settings.AWX_SOME_SETTING
|
settings.AWX_SOME_SETTING
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||||
|
|
||||||
|
|
||||||
def test_setting_from_db(settings, mocker):
|
def test_setting_from_db(settings, mocker):
|
||||||
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
|
|||||||
|
|
||||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||||
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
|
|||||||
|
|
||||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
|
||||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||||
|
|
||||||
assert existing_setting.value == 'NEW-VALUE'
|
assert existing_setting.value == 'NEW-VALUE'
|
||||||
existing_setting.save.assert_called_with(update_fields=['value'])
|
existing_setting.save.assert_called_with(update_fields=['value'])
|
||||||
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||||
|
|
||||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
|
||||||
del settings.AWX_SOME_SETTING
|
del settings.AWX_SOME_SETTING
|
||||||
|
|
||||||
assert existing_setting.delete.call_count == 1
|
assert existing_setting.delete.call_count == 1
|
||||||
|
|
||||||
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
|||||||
# use its primary key as part of the encryption key
|
# use its primary key as part of the encryption key
|
||||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||||
|
|
||||||
|
|
||||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||||
|
|||||||
@@ -20,7 +20,10 @@ from rest_framework.exceptions import ParseError, PermissionDenied
|
|||||||
# Django OAuth Toolkit
|
# Django OAuth Toolkit
|
||||||
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
from ansible_base.lib.utils.validation import to_python_boolean
|
from ansible_base.lib.utils.validation import to_python_boolean
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
@@ -72,8 +75,6 @@ from awx.main.models import (
|
|||||||
WorkflowJobTemplateNode,
|
WorkflowJobTemplateNode,
|
||||||
WorkflowApproval,
|
WorkflowApproval,
|
||||||
WorkflowApprovalTemplate,
|
WorkflowApprovalTemplate,
|
||||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
|
||||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
|
||||||
)
|
)
|
||||||
from awx.main.models.mixins import ResourceMixin
|
from awx.main.models.mixins import ResourceMixin
|
||||||
|
|
||||||
@@ -264,7 +265,11 @@ class BaseAccess(object):
|
|||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
return self.user.is_superuser
|
if self.user.is_superuser:
|
||||||
|
return True
|
||||||
|
if obj._meta.model_name in [cls._meta.model_name for cls in permission_registry.all_registered_models]:
|
||||||
|
return self.user.has_obj_perm(obj, 'delete')
|
||||||
|
return False
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
return self.can_add({'reference_obj': obj})
|
return self.can_add({'reference_obj': obj})
|
||||||
@@ -593,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
|||||||
- a superuser
|
- a superuser
|
||||||
- admin role on the Instance group
|
- admin role on the Instance group
|
||||||
I can add/delete Instance Groups:
|
I can add/delete Instance Groups:
|
||||||
- a superuser(system administrator)
|
- a superuser(system administrator), because these are not org-scoped
|
||||||
I can use Instance Groups when I have:
|
I can use Instance Groups when I have:
|
||||||
- use_role on the instance group
|
- use_role on the instance group
|
||||||
"""
|
"""
|
||||||
@@ -622,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
|||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||||
return False
|
return False
|
||||||
return self.user.is_superuser
|
return self.user.has_obj_perm(obj, 'delete')
|
||||||
|
|
||||||
|
|
||||||
class UserAccess(BaseAccess):
|
class UserAccess(BaseAccess):
|
||||||
@@ -639,7 +644,10 @@ class UserAccess(BaseAccess):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
prefetch_related = ('profile',)
|
prefetch_related = (
|
||||||
|
'profile',
|
||||||
|
'resource',
|
||||||
|
)
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||||
@@ -648,9 +656,7 @@ class UserAccess(BaseAccess):
|
|||||||
qs = (
|
qs = (
|
||||||
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
|
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
|
||||||
| User.objects.filter(pk=self.user.id)
|
| User.objects.filter(pk=self.user.id)
|
||||||
| User.objects.filter(
|
| User.objects.filter(is_superuser=True)
|
||||||
pk__in=Role.objects.filter(singleton_name__in=[ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
|
|
||||||
)
|
|
||||||
).distinct()
|
).distinct()
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
@@ -708,6 +714,15 @@ class UserAccess(BaseAccess):
|
|||||||
if not allow_orphans:
|
if not allow_orphans:
|
||||||
# in these cases only superusers can modify orphan users
|
# in these cases only superusers can modify orphan users
|
||||||
return False
|
return False
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
# Permission granted if the user has all permissions that the target user has
|
||||||
|
target_perms = set(
|
||||||
|
RoleEvaluation.objects.filter(role__in=obj.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||||
|
)
|
||||||
|
user_perms = set(
|
||||||
|
RoleEvaluation.objects.filter(role__in=self.user.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||||
|
)
|
||||||
|
return not (target_perms - user_perms)
|
||||||
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
|
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
|
||||||
else:
|
else:
|
||||||
return self.is_all_org_admin(obj)
|
return self.is_all_org_admin(obj)
|
||||||
@@ -835,6 +850,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
prefetch_related = (
|
prefetch_related = (
|
||||||
'created_by',
|
'created_by',
|
||||||
'modified_by',
|
'modified_by',
|
||||||
|
'resource', # dab_resource_registry
|
||||||
)
|
)
|
||||||
# organization admin_role is not a parent of organization auditor_role
|
# organization admin_role is not a parent of organization auditor_role
|
||||||
notification_attach_roles = ['admin_role', 'auditor_role']
|
notification_attach_roles = ['admin_role', 'auditor_role']
|
||||||
@@ -945,9 +961,6 @@ class InventoryAccess(BaseAccess):
|
|||||||
def can_update(self, obj):
|
def can_update(self, obj):
|
||||||
return self.user in obj.update_role
|
return self.user in obj.update_role
|
||||||
|
|
||||||
def can_delete(self, obj):
|
|
||||||
return self.can_admin(obj, None)
|
|
||||||
|
|
||||||
def can_run_ad_hoc_commands(self, obj):
|
def can_run_ad_hoc_commands(self, obj):
|
||||||
return self.user in obj.adhoc_role
|
return self.user in obj.adhoc_role
|
||||||
|
|
||||||
@@ -1303,6 +1316,7 @@ class TeamAccess(BaseAccess):
|
|||||||
'created_by',
|
'created_by',
|
||||||
'modified_by',
|
'modified_by',
|
||||||
'organization',
|
'organization',
|
||||||
|
'resource', # dab_resource_registry
|
||||||
)
|
)
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
@@ -1373,12 +1387,11 @@ class TeamAccess(BaseAccess):
|
|||||||
class ExecutionEnvironmentAccess(BaseAccess):
|
class ExecutionEnvironmentAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see an execution environment when:
|
I can see an execution environment when:
|
||||||
- I'm a superuser
|
- I can see its organization
|
||||||
- I'm a member of the same organization
|
- It is a global ExecutionEnvironment
|
||||||
- it is a global ExecutionEnvironment
|
|
||||||
I can create/change an execution environment when:
|
I can create/change an execution environment when:
|
||||||
- I'm a superuser
|
- I'm a superuser
|
||||||
- I'm an admin for the organization(s)
|
- I have an organization or object role that gives access
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = ExecutionEnvironment
|
model = ExecutionEnvironment
|
||||||
@@ -1387,7 +1400,9 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
|||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
return ExecutionEnvironment.objects.filter(
|
return ExecutionEnvironment.objects.filter(
|
||||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
|
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role'))
|
||||||
|
| Q(organization__isnull=True)
|
||||||
|
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
|
||||||
).distinct()
|
).distinct()
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
@@ -1400,13 +1415,19 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
|||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if obj and obj.organization_id is None:
|
if obj and obj.organization_id is None:
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
if self.user not in obj.organization.execution_environment_admin_role:
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
raise PermissionDenied
|
if not self.user.has_obj_perm(obj, 'change'):
|
||||||
if data and 'organization' in data:
|
|
||||||
new_org = get_object_from_data('organization', Organization, data, obj=obj)
|
|
||||||
if not new_org or self.user not in new_org.execution_environment_admin_role:
|
|
||||||
return False
|
return False
|
||||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
else:
|
||||||
|
if self.user not in obj.organization.execution_environment_admin_role:
|
||||||
|
raise PermissionDenied
|
||||||
|
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
|
||||||
|
return False
|
||||||
|
# Special case that check_related does not catch, org users can not remove the organization from the EE
|
||||||
|
if data and ('organization' in data or 'organization_id' in data):
|
||||||
|
if (not data.get('organization')) and (not data.get('organization_id')):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
if obj.managed:
|
if obj.managed:
|
||||||
@@ -1578,6 +1599,8 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
|||||||
inventory = get_value(Inventory, 'inventory')
|
inventory = get_value(Inventory, 'inventory')
|
||||||
if inventory:
|
if inventory:
|
||||||
if self.user not in inventory.use_role:
|
if self.user not in inventory.use_role:
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
@@ -1586,11 +1609,16 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
|||||||
project = get_value(Project, 'project')
|
project = get_value(Project, 'project')
|
||||||
# If the user has admin access to the project (as an org admin), should
|
# If the user has admin access to the project (as an org admin), should
|
||||||
# be able to proceed without additional checks.
|
# be able to proceed without additional checks.
|
||||||
if project:
|
if not project:
|
||||||
return self.user in project.use_role
|
|
||||||
else:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if self.user not in project.use_role:
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['project'] = [_('You do not have use permission on Project')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_copy_related(self, obj):
|
def can_copy_related(self, obj):
|
||||||
"""
|
"""
|
||||||
@@ -2074,11 +2102,23 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if not data: # So the browseable API will work
|
if not data: # So the browseable API will work
|
||||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||||
|
|
||||||
return bool(
|
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
|
||||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
if data.get('organization', None) is None:
|
||||||
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
if self.save_messages:
|
||||||
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||||
)
|
return False
|
||||||
|
|
||||||
|
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
if self.save_messages:
|
if self.save_messages:
|
||||||
@@ -2587,6 +2627,8 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
|||||||
if not JobLaunchConfigAccess(self.user).can_add(data):
|
if not JobLaunchConfigAccess(self.user).can_add(data):
|
||||||
return False
|
return False
|
||||||
if not data:
|
if not data:
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return self.user.has_roles.filter(permission_partials__codename__in=['execute_jobtemplate', 'update_project', 'update_inventory']).exists()
|
||||||
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
|
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
|
||||||
|
|
||||||
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
||||||
@@ -2608,13 +2650,15 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
|||||||
|
|
||||||
class NotificationTemplateAccess(BaseAccess):
|
class NotificationTemplateAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see/use a notification_template if I have permission to
|
Run standard logic from DAB RBAC
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
prefetch_related = ('created_by', 'modified_by', 'organization')
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return self.model.access_qs(self.user, 'view')
|
||||||
return self.model.objects.filter(
|
return self.model.objects.filter(
|
||||||
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
|
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
|
||||||
).distinct()
|
).distinct()
|
||||||
@@ -2627,10 +2671,7 @@ class NotificationTemplateAccess(BaseAccess):
|
|||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if obj.organization is None:
|
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||||
# only superusers are allowed to edit orphan notification templates
|
|
||||||
return False
|
|
||||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
|
||||||
|
|
||||||
def can_admin(self, obj, data):
|
def can_admin(self, obj, data):
|
||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
@@ -2640,9 +2681,7 @@ class NotificationTemplateAccess(BaseAccess):
|
|||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_start(self, obj, validate_license=True):
|
def can_start(self, obj, validate_license=True):
|
||||||
if obj.organization is None:
|
return self.can_change(obj, None)
|
||||||
return False
|
|
||||||
return self.user in obj.organization.notification_admin_role
|
|
||||||
|
|
||||||
|
|
||||||
class NotificationAccess(BaseAccess):
|
class NotificationAccess(BaseAccess):
|
||||||
@@ -2783,7 +2822,7 @@ class ActivityStreamAccess(BaseAccess):
|
|||||||
| Q(notification_template__organization__in=auditing_orgs)
|
| Q(notification_template__organization__in=auditing_orgs)
|
||||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||||
| Q(label__organization__in=auditing_orgs)
|
| Q(label__organization__in=auditing_orgs)
|
||||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
| Q(role__in=Role.visible_roles(self.user) if auditing_orgs else [])
|
||||||
)
|
)
|
||||||
|
|
||||||
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||||
@@ -2840,13 +2879,10 @@ class RoleAccess(BaseAccess):
|
|||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
result = Role.visible_roles(self.user)
|
result = Role.visible_roles(self.user)
|
||||||
# Sanity check: is the requesting user an orphaned non-admin/auditor?
|
# Make system admin/auditor mandatorily visible.
|
||||||
# if yes, make system admin/auditor mandatorily visible.
|
mandatories = ('system_administrator', 'system_auditor')
|
||||||
if not self.user.is_superuser and not self.user.is_system_auditor and not self.user.organizations.exists():
|
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
||||||
mandatories = ('system_administrator', 'system_auditor')
|
return result | super_qs
|
||||||
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
|
||||||
result = result | super_qs
|
|
||||||
return result
|
|
||||||
|
|
||||||
def can_add(self, obj, data):
|
def can_add(self, obj, data):
|
||||||
# Unsupported for now
|
# Unsupported for now
|
||||||
|
|||||||
@@ -66,10 +66,8 @@ class FixedSlidingWindow:
|
|||||||
|
|
||||||
|
|
||||||
class RelayWebsocketStatsManager:
|
class RelayWebsocketStatsManager:
|
||||||
def __init__(self, event_loop, local_hostname):
|
def __init__(self, local_hostname):
|
||||||
self._local_hostname = local_hostname
|
self._local_hostname = local_hostname
|
||||||
|
|
||||||
self._event_loop = event_loop
|
|
||||||
self._stats = dict()
|
self._stats = dict()
|
||||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||||
|
|
||||||
@@ -94,7 +92,10 @@ class RelayWebsocketStatsManager:
|
|||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.async_task = self._event_loop.create_task(self.run_loop())
|
self.async_task = asyncio.get_running_loop().create_task(
|
||||||
|
self.run_loop(),
|
||||||
|
name='RelayWebsocketStatsManager.run_loop',
|
||||||
|
)
|
||||||
return self.async_task
|
return self.async_task
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
|||||||
resolved_action,
|
resolved_action,
|
||||||
resolved_role,
|
resolved_role,
|
||||||
-- '-' operator listed here:
|
-- '-' operator listed here:
|
||||||
-- https://www.postgresql.org/docs/12/functions-json.html
|
-- https://www.postgresql.org/docs/15/functions-json.html
|
||||||
-- note that operator is only supported by jsonb objects
|
-- note that operator is only supported by jsonb objects
|
||||||
-- https://www.postgresql.org/docs/current/datatype-json.html
|
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||||
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||||
|
|||||||
@@ -1,7 +1,40 @@
|
|||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from awx.main.utils.named_url_graph import _customize_graph, generate_graph
|
||||||
|
from awx.conf import register, fields
|
||||||
|
|
||||||
|
|
||||||
class MainConfig(AppConfig):
|
class MainConfig(AppConfig):
|
||||||
name = 'awx.main'
|
name = 'awx.main'
|
||||||
verbose_name = _('Main')
|
verbose_name = _('Main')
|
||||||
|
|
||||||
|
def load_named_url_feature(self):
|
||||||
|
models = [m for m in self.get_models() if hasattr(m, 'get_absolute_url')]
|
||||||
|
generate_graph(models)
|
||||||
|
_customize_graph()
|
||||||
|
register(
|
||||||
|
'NAMED_URL_FORMATS',
|
||||||
|
field_class=fields.DictField,
|
||||||
|
read_only=True,
|
||||||
|
label=_('Formats of all available named urls'),
|
||||||
|
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
||||||
|
category=_('Named URL'),
|
||||||
|
category_slug='named-url',
|
||||||
|
)
|
||||||
|
register(
|
||||||
|
'NAMED_URL_GRAPH_NODES',
|
||||||
|
field_class=fields.DictField,
|
||||||
|
read_only=True,
|
||||||
|
label=_('List of all named url graph nodes.'),
|
||||||
|
help_text=_(
|
||||||
|
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
||||||
|
' Use this list to programmatically generate named URLs for resources'
|
||||||
|
),
|
||||||
|
category=_('Named URL'),
|
||||||
|
category_slug='named-url',
|
||||||
|
)
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
super().ready()
|
||||||
|
|
||||||
|
self.load_named_url_feature()
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
|
from django.core.checks import Error
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
@@ -92,6 +93,7 @@ register(
|
|||||||
),
|
),
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -774,6 +776,7 @@ register(
|
|||||||
allow_null=True,
|
allow_null=True,
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
|
required=False,
|
||||||
)
|
)
|
||||||
register(
|
register(
|
||||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||||
@@ -815,6 +818,7 @@ register(
|
|||||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||||
category=_('Bulk Actions'),
|
category=_('Bulk Actions'),
|
||||||
category_slug='bulk',
|
category_slug='bulk',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -825,6 +829,7 @@ register(
|
|||||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||||
category=_('Bulk Actions'),
|
category=_('Bulk Actions'),
|
||||||
category_slug='bulk',
|
category_slug='bulk',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -835,6 +840,7 @@ register(
|
|||||||
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||||
category=_('Bulk Actions'),
|
category=_('Bulk Actions'),
|
||||||
category_slug='bulk',
|
category_slug='bulk',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -845,6 +851,7 @@ register(
|
|||||||
help_text=_('Enable preview of new user interface.'),
|
help_text=_('Enable preview of new user interface.'),
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -922,6 +929,16 @@ register(
|
|||||||
category_slug='debug',
|
category_slug='debug',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'RECEPTOR_KEEP_WORK_ON_ERROR',
|
||||||
|
field_class=fields.BooleanField,
|
||||||
|
label=_('Keep receptor work on error'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Prevent receptor work from being released on when error is detected'),
|
||||||
|
category=('Debug'),
|
||||||
|
category_slug='debug',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def logging_validate(serializer, attrs):
|
def logging_validate(serializer, attrs):
|
||||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||||
@@ -948,3 +965,27 @@ def logging_validate(serializer, attrs):
|
|||||||
|
|
||||||
|
|
||||||
register_validate('logging', logging_validate)
|
register_validate('logging', logging_validate)
|
||||||
|
|
||||||
|
|
||||||
|
def csrf_trusted_origins_validate(serializer, attrs):
|
||||||
|
if not serializer.instance or not hasattr(serializer.instance, 'CSRF_TRUSTED_ORIGINS'):
|
||||||
|
return attrs
|
||||||
|
if 'CSRF_TRUSTED_ORIGINS' not in attrs:
|
||||||
|
return attrs
|
||||||
|
errors = []
|
||||||
|
for origin in attrs['CSRF_TRUSTED_ORIGINS']:
|
||||||
|
if "://" not in origin:
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS "
|
||||||
|
"setting must start with a scheme (usually http:// or "
|
||||||
|
"https://) but found %s. See the release notes for details." % origin,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if errors:
|
||||||
|
error_messages = [error.msg for error in errors]
|
||||||
|
raise serializers.ValidationError(_('\n'.join(error_messages)))
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
register_validate('system', csrf_trusted_origins_validate)
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ __all__ = [
|
|||||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||||
]
|
]
|
||||||
|
|
||||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||||
PRIVILEGE_ESCALATION_METHODS = [
|
PRIVILEGE_ESCALATION_METHODS = [
|
||||||
('sudo', _('Sudo')),
|
('sudo', _('Sudo')),
|
||||||
('su', _('Su')),
|
('su', _('Su')),
|
||||||
@@ -43,6 +43,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
|||||||
}
|
}
|
||||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||||
ACTIVE_STATES = CAN_CANCEL
|
ACTIVE_STATES = CAN_CANCEL
|
||||||
|
ERROR_STATES = ('error',)
|
||||||
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
||||||
CENSOR_VALUE = '************'
|
CENSOR_VALUE = '************'
|
||||||
ENV_BLOCKLIST = frozenset(
|
ENV_BLOCKLIST = frozenset(
|
||||||
@@ -114,3 +115,28 @@ SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS = 'unique_managed_hosts'
|
|||||||
|
|
||||||
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
|
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
|
||||||
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
|
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
|
||||||
|
|
||||||
|
# Data for RBAC compatibility layer
|
||||||
|
role_name_to_perm_mapping = {
|
||||||
|
'adhoc_role': ['adhoc_'],
|
||||||
|
'approval_role': ['approve_'],
|
||||||
|
'auditor_role': ['audit_'],
|
||||||
|
'admin_role': ['change_', 'add_', 'delete_'],
|
||||||
|
'execute_role': ['execute_'],
|
||||||
|
'read_role': ['view_'],
|
||||||
|
'update_role': ['update_'],
|
||||||
|
'member_role': ['member_'],
|
||||||
|
'use_role': ['use_'],
|
||||||
|
}
|
||||||
|
|
||||||
|
org_role_to_permission = {
|
||||||
|
'notification_admin_role': 'add_notificationtemplate',
|
||||||
|
'project_admin_role': 'add_project',
|
||||||
|
'execute_role': 'execute_jobtemplate',
|
||||||
|
'inventory_admin_role': 'add_inventory',
|
||||||
|
'credential_admin_role': 'add_credential',
|
||||||
|
'workflow_admin_role': 'add_workflowjobtemplate',
|
||||||
|
'job_template_admin_role': 'change_jobtemplate', # TODO: this doesnt really work, solution not clear
|
||||||
|
'execution_environment_admin_role': 'add_executionenvironment',
|
||||||
|
'auditor_role': 'view_project', # TODO: also doesnt really work
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
|
from azure.keyvault.secrets import SecretClient
|
||||||
|
from azure.identity import ClientSecretCredential
|
||||||
|
from msrestazure import azure_cloud
|
||||||
|
|
||||||
from .plugin import CredentialPlugin
|
from .plugin import CredentialPlugin
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
|
|
||||||
from azure.common.credentials import ServicePrincipalCredentials
|
|
||||||
from msrestazure import azure_cloud
|
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||||
@@ -54,22 +55,9 @@ azure_keyvault_inputs = {
|
|||||||
|
|
||||||
|
|
||||||
def azure_keyvault_backend(**kwargs):
|
def azure_keyvault_backend(**kwargs):
|
||||||
url = kwargs['url']
|
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret'])
|
||||||
[cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
|
kv = SecretClient(credential=csc, vault_url=kwargs['url'])
|
||||||
|
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
|
||||||
def auth_callback(server, resource, scope):
|
|
||||||
credentials = ServicePrincipalCredentials(
|
|
||||||
url=url,
|
|
||||||
client_id=kwargs['client'],
|
|
||||||
secret=kwargs['secret'],
|
|
||||||
tenant=kwargs['tenant'],
|
|
||||||
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
|
||||||
)
|
|
||||||
token = credentials.token
|
|
||||||
return token['token_type'], token['access_token']
|
|
||||||
|
|
||||||
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
|
|
||||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
|
||||||
|
|
||||||
|
|
||||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import psycopg
|
import psycopg
|
||||||
import select
|
import select
|
||||||
|
from copy import deepcopy
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
@@ -94,14 +95,15 @@ class PubSub(object):
|
|||||||
|
|
||||||
|
|
||||||
def create_listener_connection():
|
def create_listener_connection():
|
||||||
conf = settings.DATABASES['default'].copy()
|
conf = deepcopy(settings.DATABASES['default'])
|
||||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
conf['OPTIONS'] = deepcopy(conf.get('OPTIONS', {}))
|
||||||
# Modify the application name to distinguish from other connections the process might use
|
# Modify the application name to distinguish from other connections the process might use
|
||||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||||
|
|
||||||
# Apply overrides specifically for the listener connection
|
# Apply overrides specifically for the listener connection
|
||||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||||
conf[k] = v
|
if k != 'OPTIONS':
|
||||||
|
conf[k] = v
|
||||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||||
conf['OPTIONS'][k] = v
|
conf['OPTIONS'][k] = v
|
||||||
|
|
||||||
|
|||||||
@@ -259,6 +259,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
current_downtime = time.time() - self.pg_down_time
|
current_downtime = time.time() - self.pg_down_time
|
||||||
if current_downtime > self.pg_max_wait:
|
if current_downtime > self.pg_max_wait:
|
||||||
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
||||||
|
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||||
|
for worker in self.pool.workers:
|
||||||
|
try:
|
||||||
|
worker.quit()
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||||
raise
|
raise
|
||||||
# Wait for a second before next attempt, but still listen for any shutdown signals
|
# Wait for a second before next attempt, but still listen for any shutdown signals
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@@ -270,6 +276,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
except Exception:
|
except Exception:
|
||||||
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
||||||
logger.exception('Encountered unhandled error in dispatcher main loop')
|
logger.exception('Encountered unhandled error in dispatcher main loop')
|
||||||
|
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||||
|
for worker in self.pool.workers:
|
||||||
|
try:
|
||||||
|
worker.quit()
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
|||||||
kwargs.setdefault('related_name', '+')
|
kwargs.setdefault('related_name', '+')
|
||||||
kwargs.setdefault('null', 'True')
|
kwargs.setdefault('null', 'True')
|
||||||
kwargs.setdefault('editable', False)
|
kwargs.setdefault('editable', False)
|
||||||
kwargs.setdefault('on_delete', models.CASCADE)
|
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
|
|||||||
12
awx/main/management/commands/check_instance_ready.py
Normal file
12
awx/main/management/commands/check_instance_ready.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from awx.main.models.ha import Instance
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
if Instance.objects.me().node_state != Instance.States.READY:
|
||||||
|
raise CommandError('Instance is not ready') # so that return code is not 0
|
||||||
|
|
||||||
|
return
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
# All Rights Reserved
|
# All Rights Reserved
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import transaction
|
||||||
from crum import impersonate
|
from crum import impersonate
|
||||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||||
from awx.main.signals import disable_computed_fields
|
from awx.main.signals import disable_computed_fields
|
||||||
@@ -13,6 +14,12 @@ class Command(BaseCommand):
|
|||||||
help = 'Creates a preload tower data if there is none.'
|
help = 'Creates a preload tower data if there is none.'
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
|
# Wrap the operation in an atomic block, so we do not on accident
|
||||||
|
# create the organization but not create the project, etc.
|
||||||
|
with transaction.atomic():
|
||||||
|
self._handle()
|
||||||
|
|
||||||
|
def _handle(self):
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
# Create a default organization as the first superuser found.
|
# Create a default organization as the first superuser found.
|
||||||
@@ -43,10 +50,11 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||||
c, _ = Credential.objects.get_or_create(
|
c, _ = Credential.objects.get_or_create(
|
||||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
credential_type=ssh_type, name='Demo Credential', inputs={'username': getattr(superuser, 'username', 'null')}, created_by=superuser
|
||||||
)
|
)
|
||||||
|
|
||||||
c.admin_role.members.add(superuser)
|
if superuser:
|
||||||
|
c.admin_role.members.add(superuser)
|
||||||
|
|
||||||
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
||||||
name='Ansible Galaxy',
|
name='Ansible Galaxy',
|
||||||
|
|||||||
195
awx/main/management/commands/dump_auth_config.py
Normal file
195
awx/main/management/commands/dump_auth_config.py
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from awx.conf import settings_registry
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
|
||||||
|
|
||||||
|
DAB_SAML_AUTHENTICATOR_KEYS = {
|
||||||
|
"SP_ENTITY_ID": True,
|
||||||
|
"SP_PUBLIC_CERT": True,
|
||||||
|
"SP_PRIVATE_KEY": True,
|
||||||
|
"ORG_INFO": True,
|
||||||
|
"TECHNICAL_CONTACT": True,
|
||||||
|
"SUPPORT_CONTACT": True,
|
||||||
|
"SP_EXTRA": False,
|
||||||
|
"SECURITY_CONFIG": False,
|
||||||
|
"EXTRA_DATA": False,
|
||||||
|
"ENABLED_IDPS": True,
|
||||||
|
"CALLBACK_URL": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
DAB_LDAP_AUTHENTICATOR_KEYS = {
|
||||||
|
"SERVER_URI": True,
|
||||||
|
"BIND_DN": False,
|
||||||
|
"BIND_PASSWORD": False,
|
||||||
|
"CONNECTION_OPTIONS": False,
|
||||||
|
"GROUP_TYPE": True,
|
||||||
|
"GROUP_TYPE_PARAMS": True,
|
||||||
|
"GROUP_SEARCH": False,
|
||||||
|
"START_TLS": False,
|
||||||
|
"USER_DN_TEMPLATE": True,
|
||||||
|
"USER_ATTR_MAP": True,
|
||||||
|
"USER_SEARCH": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def is_enabled(self, settings, keys):
|
||||||
|
missing_fields = []
|
||||||
|
for key, required in keys.items():
|
||||||
|
if required and not settings.get(key):
|
||||||
|
missing_fields.append(key)
|
||||||
|
if missing_fields:
|
||||||
|
return False, missing_fields
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||||
|
awx_ldap_settings = {}
|
||||||
|
|
||||||
|
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
|
||||||
|
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
|
||||||
|
value = getattr(settings, awx_ldap_setting, None)
|
||||||
|
awx_ldap_settings[key] = value
|
||||||
|
|
||||||
|
grouped_settings = {}
|
||||||
|
|
||||||
|
for key, value in awx_ldap_settings.items():
|
||||||
|
match = re.search(r'(\d+)', key)
|
||||||
|
index = int(match.group()) if match else 0
|
||||||
|
new_key = re.sub(r'\d+_', '', key)
|
||||||
|
|
||||||
|
if index not in grouped_settings:
|
||||||
|
grouped_settings[index] = {}
|
||||||
|
|
||||||
|
grouped_settings[index][new_key] = value
|
||||||
|
if new_key == "GROUP_TYPE" and value:
|
||||||
|
grouped_settings[index][new_key] = type(value).__name__
|
||||||
|
|
||||||
|
if new_key == "SERVER_URI" and value:
|
||||||
|
value = value.split(", ")
|
||||||
|
grouped_settings[index][new_key] = value
|
||||||
|
|
||||||
|
if type(value).__name__ == "LDAPSearch":
|
||||||
|
data = []
|
||||||
|
data.append(value.base_dn)
|
||||||
|
data.append("SCOPE_SUBTREE")
|
||||||
|
data.append(value.filterstr)
|
||||||
|
grouped_settings[index][new_key] = data
|
||||||
|
|
||||||
|
return grouped_settings
|
||||||
|
|
||||||
|
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||||
|
awx_saml_settings = {}
|
||||||
|
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||||
|
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
|
||||||
|
|
||||||
|
return awx_saml_settings
|
||||||
|
|
||||||
|
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||||
|
config = {
|
||||||
|
"type": f"ansible_base.authentication.authenticator_plugins.{type}",
|
||||||
|
"name": name,
|
||||||
|
"enabled": enabled,
|
||||||
|
"create_objects": True,
|
||||||
|
"users_unique": False,
|
||||||
|
"remove_users": True,
|
||||||
|
"configuration": {},
|
||||||
|
}
|
||||||
|
for k in keys:
|
||||||
|
v = awx_settings.get(k)
|
||||||
|
config["configuration"].update({k: v})
|
||||||
|
|
||||||
|
if type == "saml":
|
||||||
|
idp_to_key_mapping = {
|
||||||
|
"url": "IDP_URL",
|
||||||
|
"x509cert": "IDP_X509_CERT",
|
||||||
|
"entity_id": "IDP_ENTITY_ID",
|
||||||
|
"attr_email": "IDP_ATTR_EMAIL",
|
||||||
|
"attr_groups": "IDP_GROUPS",
|
||||||
|
"attr_username": "IDP_ATTR_USERNAME",
|
||||||
|
"attr_last_name": "IDP_ATTR_LAST_NAME",
|
||||||
|
"attr_first_name": "IDP_ATTR_FIRST_NAME",
|
||||||
|
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
|
||||||
|
}
|
||||||
|
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
|
||||||
|
for key in idp_to_key_mapping:
|
||||||
|
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
|
||||||
|
if value is not None:
|
||||||
|
config["name"] = idp_name
|
||||||
|
config["configuration"].update({idp_to_key_mapping[key]: value})
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"output_file",
|
||||||
|
nargs="?",
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
help="Output JSON file path",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
try:
|
||||||
|
data = []
|
||||||
|
|
||||||
|
# dump SAML settings
|
||||||
|
awx_saml_settings = self.get_awx_saml_settings()
|
||||||
|
awx_saml_enabled, saml_missing_fields = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||||
|
if awx_saml_enabled:
|
||||||
|
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||||
|
data.append(
|
||||||
|
self.format_config_data(
|
||||||
|
awx_saml_enabled,
|
||||||
|
awx_saml_settings,
|
||||||
|
"saml",
|
||||||
|
self.DAB_SAML_AUTHENTICATOR_KEYS,
|
||||||
|
awx_saml_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
data.append({"SAML_missing_fields": saml_missing_fields})
|
||||||
|
|
||||||
|
# dump LDAP settings
|
||||||
|
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||||
|
for awx_ldap_name, awx_ldap_settings in awx_ldap_group_settings.items():
|
||||||
|
awx_ldap_enabled, ldap_missing_fields = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||||
|
if awx_ldap_enabled:
|
||||||
|
data.append(
|
||||||
|
self.format_config_data(
|
||||||
|
awx_ldap_enabled,
|
||||||
|
awx_ldap_settings,
|
||||||
|
"ldap",
|
||||||
|
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||||
|
f"LDAP_{awx_ldap_name}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
data.append({f"LDAP_{awx_ldap_name}_missing_fields": ldap_missing_fields})
|
||||||
|
|
||||||
|
# write to file if requested
|
||||||
|
if options["output_file"]:
|
||||||
|
# Define the path for the output JSON file
|
||||||
|
output_file = options["output_file"]
|
||||||
|
|
||||||
|
# Ensure the directory exists
|
||||||
|
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||||
|
|
||||||
|
# Write data to the JSON file
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
json.dump(data, f, indent=4)
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
|
||||||
|
else:
|
||||||
|
self.stdout.write(json.dumps(data, indent=4))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
|
||||||
|
sys.exit(1)
|
||||||
@@ -92,8 +92,6 @@ class Command(BaseCommand):
|
|||||||
return host_stats
|
return host_stats
|
||||||
|
|
||||||
def handle(self, *arg, **options):
|
def handle(self, *arg, **options):
|
||||||
WebsocketsMetricsServer().start()
|
|
||||||
|
|
||||||
# it's necessary to delay this import in case
|
# it's necessary to delay this import in case
|
||||||
# database migrations are still running
|
# database migrations are still running
|
||||||
from awx.main.models.ha import Instance
|
from awx.main.models.ha import Instance
|
||||||
@@ -103,8 +101,9 @@ class Command(BaseCommand):
|
|||||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
time.sleep(10)
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
|
||||||
return
|
return
|
||||||
|
|
||||||
# In containerized deployments, migrations happen in the task container,
|
# In containerized deployments, migrations happen in the task container,
|
||||||
@@ -123,13 +122,14 @@ class Command(BaseCommand):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
my_hostname = Instance.objects.my_hostname()
|
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
|
||||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||||
# ensure network connectivity between the task and web instance
|
# ensure network connectivity between the task and web instance
|
||||||
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
time.sleep(5)
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
|
||||||
return
|
return
|
||||||
|
|
||||||
if options.get('status'):
|
if options.get('status'):
|
||||||
@@ -166,8 +166,16 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
WebsocketsMetricsServer().start()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
logger.info('Starting Websocket Relayer...')
|
||||||
websocket_relay_manager = WebSocketRelayManager()
|
websocket_relay_manager = WebSocketRelayManager()
|
||||||
asyncio.run(websocket_relay_manager.run())
|
asyncio.run(websocket_relay_manager.run())
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
logger.info('Terminating Websocket Relayer')
|
logger.info('Terminating Websocket Relayer')
|
||||||
|
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
|
||||||
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
|
||||||
|
return
|
||||||
|
|||||||
@@ -1,25 +1,25 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from pathlib import Path, PurePosixPath
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import logout
|
from django.contrib.auth import logout
|
||||||
from django.contrib.auth.models import User
|
from django.db.migrations.recorder import MigrationRecorder
|
||||||
from django.db.migrations.executor import MigrationExecutor
|
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.shortcuts import redirect
|
from django.shortcuts import redirect
|
||||||
from django.apps import apps
|
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
from django.utils.deprecation import MiddlewareMixin
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.urls import reverse, resolve
|
from django.urls import reverse, resolve
|
||||||
|
|
||||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
from awx.main import migrations
|
||||||
from awx.conf import fields, register
|
|
||||||
from awx.main.utils.profiling import AWXProfiler
|
from awx.main.utils.profiling import AWXProfiler
|
||||||
|
from awx.main.utils.common import memoize
|
||||||
|
from awx.urls import get_urlpatterns
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.middleware')
|
logger = logging.getLogger('awx.main.middleware')
|
||||||
@@ -97,49 +97,7 @@ class DisableLocalAuthMiddleware(MiddlewareMixin):
|
|||||||
logout(request)
|
logout(request)
|
||||||
|
|
||||||
|
|
||||||
def _customize_graph():
|
|
||||||
from awx.main.models import Instance, Schedule, UnifiedJobTemplate
|
|
||||||
|
|
||||||
for model in [Schedule, UnifiedJobTemplate]:
|
|
||||||
if model in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[model].remove_bindings()
|
|
||||||
settings.NAMED_URL_GRAPH.pop(model)
|
|
||||||
if User not in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[User] = GraphNode(User, ['username'], [])
|
|
||||||
settings.NAMED_URL_GRAPH[User].add_bindings()
|
|
||||||
if Instance not in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[Instance] = GraphNode(Instance, ['hostname'], [])
|
|
||||||
settings.NAMED_URL_GRAPH[Instance].add_bindings()
|
|
||||||
|
|
||||||
|
|
||||||
class URLModificationMiddleware(MiddlewareMixin):
|
class URLModificationMiddleware(MiddlewareMixin):
|
||||||
def __init__(self, get_response):
|
|
||||||
models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')]
|
|
||||||
generate_graph(models)
|
|
||||||
_customize_graph()
|
|
||||||
register(
|
|
||||||
'NAMED_URL_FORMATS',
|
|
||||||
field_class=fields.DictField,
|
|
||||||
read_only=True,
|
|
||||||
label=_('Formats of all available named urls'),
|
|
||||||
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
|
||||||
category=_('Named URL'),
|
|
||||||
category_slug='named-url',
|
|
||||||
)
|
|
||||||
register(
|
|
||||||
'NAMED_URL_GRAPH_NODES',
|
|
||||||
field_class=fields.DictField,
|
|
||||||
read_only=True,
|
|
||||||
label=_('List of all named url graph nodes.'),
|
|
||||||
help_text=_(
|
|
||||||
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
|
||||||
' Use this list to programmatically generate named URLs for resources'
|
|
||||||
),
|
|
||||||
category=_('Named URL'),
|
|
||||||
category_slug='named-url',
|
|
||||||
)
|
|
||||||
super().__init__(get_response)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _hijack_for_old_jt_name(node, kwargs, named_url):
|
def _hijack_for_old_jt_name(node, kwargs, named_url):
|
||||||
try:
|
try:
|
||||||
@@ -180,14 +138,36 @@ class URLModificationMiddleware(MiddlewareMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _convert_named_url(cls, url_path):
|
def _convert_named_url(cls, url_path):
|
||||||
url_units = url_path.split('/')
|
default_prefix = PurePosixPath('/api/v2/')
|
||||||
# If the identifier is an empty string, it is always invalid.
|
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
|
||||||
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
|
|
||||||
return url_path
|
url_path_original = url_path
|
||||||
resource = url_units[3]
|
url_path = PurePosixPath(url_path)
|
||||||
|
|
||||||
|
if set(optional_prefix.parts).issubset(set(url_path.parts)):
|
||||||
|
url_prefix = optional_prefix
|
||||||
|
elif set(default_prefix.parts).issubset(set(url_path.parts)):
|
||||||
|
url_prefix = default_prefix
|
||||||
|
else:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
# Remove prefix
|
||||||
|
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
|
||||||
|
try:
|
||||||
|
resource_path = PurePosixPath(url_path.parts[0])
|
||||||
|
name = url_path.parts[1]
|
||||||
|
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
|
||||||
|
except IndexError:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
resource = resource_path.parts[0]
|
||||||
if resource in settings.NAMED_URL_MAPPINGS:
|
if resource in settings.NAMED_URL_MAPPINGS:
|
||||||
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
|
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
|
||||||
return '/'.join(url_units)
|
else:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
|
||||||
|
return PurePosixPath(*parts).as_posix() + '/'
|
||||||
|
|
||||||
def process_request(self, request):
|
def process_request(self, request):
|
||||||
old_path = request.path_info
|
old_path = request.path_info
|
||||||
@@ -198,9 +178,46 @@ class URLModificationMiddleware(MiddlewareMixin):
|
|||||||
request.path_info = new_path
|
request.path_info = new_path
|
||||||
|
|
||||||
|
|
||||||
|
@memoize(ttl=20)
|
||||||
|
def is_migrating():
|
||||||
|
latest_number = 0
|
||||||
|
latest_name = ''
|
||||||
|
for migration_path in Path(migrations.__path__[0]).glob('[0-9]*.py'):
|
||||||
|
try:
|
||||||
|
migration_number = int(migration_path.name.split('_', 1)[0])
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
if migration_number > latest_number:
|
||||||
|
latest_number = migration_number
|
||||||
|
latest_name = migration_path.name[: -len('.py')]
|
||||||
|
return not MigrationRecorder(connection).migration_qs.filter(app='main', name=latest_name).exists()
|
||||||
|
|
||||||
|
|
||||||
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||||
def process_request(self, request):
|
def process_request(self, request):
|
||||||
executor = MigrationExecutor(connection)
|
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
|
||||||
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
|
||||||
return redirect(reverse("ui:migrations_notran"))
|
return redirect(reverse("ui:migrations_notran"))
|
||||||
|
|
||||||
|
|
||||||
|
class OptionalURLPrefixPath(MiddlewareMixin):
|
||||||
|
@functools.lru_cache
|
||||||
|
def _url_optional(self, prefix):
|
||||||
|
# Relavant Django code path https://github.com/django/django/blob/stable/4.2.x/django/core/handlers/base.py#L300
|
||||||
|
#
|
||||||
|
# resolve_request(request)
|
||||||
|
# get_resolver(request.urlconf)
|
||||||
|
# _get_cached_resolver(request.urlconf) <-- cached via @functools.cache
|
||||||
|
#
|
||||||
|
# Django will attempt to cache the value(s) of request.urlconf
|
||||||
|
# Being hashable is a prerequisit for being cachable.
|
||||||
|
# tuple() is hashable list() is not.
|
||||||
|
# Hence the tuple(list()) wrap.
|
||||||
|
return tuple(get_urlpatterns(prefix=prefix))
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
prefix = settings.OPTIONAL_API_URLPATTERN_PREFIX
|
||||||
|
|
||||||
|
if request.path.startswith(f"/api/{prefix}"):
|
||||||
|
request.urlconf = self._url_optional(prefix)
|
||||||
|
else:
|
||||||
|
request.urlconf = 'awx.urls'
|
||||||
|
|||||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='job_template_admin_role',
|
name='job_template_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='credential_admin_role',
|
name='credential_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='inventory_admin_role',
|
name='inventory_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='project_admin_role',
|
name='project_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='workflow_admin_role',
|
name='workflow_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='notification_admin_role',
|
name='notification_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='inventory',
|
model_name='inventory',
|
||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
|||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'organization.execute_role'],
|
parent_role=['admin_role', 'organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
|||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'admin_role',
|
'admin_role',
|
||||||
'execute_role',
|
'execute_role',
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='member_role',
|
name='member_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='approval_role',
|
name='approval_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.approval_role', 'admin_role'],
|
parent_role=['organization.approval_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.job_template_admin_role'],
|
parent_role=['organization.job_template_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'organization.execute_role'],
|
parent_role=['admin_role', 'organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='execution_environment_admin_role',
|
name='execution_environment_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator'],
|
parent_role=['singleton:system_administrator'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.role',
|
to='main.role',
|
||||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.role',
|
to='main.role',
|
||||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='instancegroup',
|
model_name='instancegroup',
|
||||||
name='use_role',
|
name='use_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -0,0 +1,58 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2024-02-15 20:51
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('main', '0189_inbound_hop_nodes'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventorysource',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
('terraform', 'Terraform State'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdate',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
('terraform', 'Terraform State'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
85
awx/main/migrations/0191_add_django_permissions.py
Normal file
85
awx/main/migrations/0191_add_django_permissions.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2023-11-13 20:10
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('main', '0190_alter_inventorysource_source_and_more'),
|
||||||
|
('dab_rbac', '__first__'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
# Add custom permissions for all special actions, like update, use, adhoc, and so on
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='credential',
|
||||||
|
options={'ordering': ('name',), 'permissions': [('use_credential', 'Can use credential in a job or related resource')]},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='instancegroup',
|
||||||
|
options={'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')]},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='inventory',
|
||||||
|
options={
|
||||||
|
'ordering': ('name',),
|
||||||
|
'permissions': [
|
||||||
|
('use_inventory', 'Can use inventory in a job template'),
|
||||||
|
('adhoc_inventory', 'Can run ad hoc commands'),
|
||||||
|
('update_inventory', 'Can update inventory sources in inventory'),
|
||||||
|
],
|
||||||
|
'verbose_name_plural': 'inventories',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='jobtemplate',
|
||||||
|
options={'ordering': ('name',), 'permissions': [('execute_jobtemplate', 'Can run this job template')]},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='project',
|
||||||
|
options={
|
||||||
|
'ordering': ('id',),
|
||||||
|
'permissions': [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='workflowjobtemplate',
|
||||||
|
options={
|
||||||
|
'permissions': [
|
||||||
|
('execute_workflowjobtemplate', 'Can run this workflow job template'),
|
||||||
|
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
|
||||||
|
]
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='organization',
|
||||||
|
options={
|
||||||
|
'default_permissions': ('change', 'delete', 'view'),
|
||||||
|
'ordering': ('name',),
|
||||||
|
'permissions': [
|
||||||
|
('member_organization', 'Basic participation permissions for organization'),
|
||||||
|
('audit_organization', 'Audit everything inside the organization'),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='team',
|
||||||
|
options={'ordering': ('organization__name', 'name'), 'permissions': [('member_team', 'Inherit all roles assigned to this team')]},
|
||||||
|
),
|
||||||
|
# Remove add default permission for a few models
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='jobtemplate',
|
||||||
|
options={
|
||||||
|
'default_permissions': ('change', 'delete', 'view'),
|
||||||
|
'ordering': ('name',),
|
||||||
|
'permissions': [('execute_jobtemplate', 'Can run this job template')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='instancegroup',
|
||||||
|
options={
|
||||||
|
'default_permissions': ('change', 'delete', 'view'),
|
||||||
|
'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
20
awx/main/migrations/0192_custom_roles.py
Normal file
20
awx/main/migrations/0192_custom_roles.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2023-11-21 02:06
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
from awx.main.migrations._dab_rbac import migrate_to_new_rbac, create_permissions_as_operation, setup_managed_role_definitions
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('main', '0191_add_django_permissions'),
|
||||||
|
('dab_rbac', '__first__'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
# make sure permissions and content types have been created by now
|
||||||
|
# these normally run in a post_migrate signal but we need them for our logic
|
||||||
|
migrations.RunPython(create_permissions_as_operation, migrations.RunPython.noop),
|
||||||
|
migrations.RunPython(setup_managed_role_definitions, migrations.RunPython.noop),
|
||||||
|
migrations.RunPython(migrate_to_new_rbac, migrations.RunPython.noop),
|
||||||
|
]
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2024-05-08 07:29
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0192_custom_roles'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notification',
|
||||||
|
name='notification_type',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('awssns', 'AWS SNS'),
|
||||||
|
('email', 'Email'),
|
||||||
|
('grafana', 'Grafana'),
|
||||||
|
('irc', 'IRC'),
|
||||||
|
('mattermost', 'Mattermost'),
|
||||||
|
('pagerduty', 'Pagerduty'),
|
||||||
|
('rocketchat', 'Rocket.Chat'),
|
||||||
|
('slack', 'Slack'),
|
||||||
|
('twilio', 'Twilio'),
|
||||||
|
('webhook', 'Webhook'),
|
||||||
|
],
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationtemplate',
|
||||||
|
name='notification_type',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('awssns', 'AWS SNS'),
|
||||||
|
('email', 'Email'),
|
||||||
|
('grafana', 'Grafana'),
|
||||||
|
('irc', 'IRC'),
|
||||||
|
('mattermost', 'Mattermost'),
|
||||||
|
('pagerduty', 'Pagerduty'),
|
||||||
|
('rocketchat', 'Rocket.Chat'),
|
||||||
|
('slack', 'Slack'),
|
||||||
|
('twilio', 'Twilio'),
|
||||||
|
('webhook', 'Webhook'),
|
||||||
|
],
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0193_alter_notification_notification_type_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventorysource',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
('terraform', 'Terraform State'),
|
||||||
|
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdate',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
('terraform', 'Terraform State'),
|
||||||
|
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
26
awx/main/migrations/0195_EE_permissions.py
Normal file
26
awx/main/migrations/0195_EE_permissions.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2024-06-20 15:55
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def delete_execution_environment_read_role(apps, schema_editor):
|
||||||
|
permission_classes = [apps.get_model('auth', 'Permission'), apps.get_model('dab_rbac', 'DABPermission')]
|
||||||
|
for permission_cls in permission_classes:
|
||||||
|
ee_read_perm = permission_cls.objects.filter(codename='view_executionenvironment').first()
|
||||||
|
if ee_read_perm:
|
||||||
|
ee_read_perm.delete()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0194_alter_inventorysource_source_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='executionenvironment',
|
||||||
|
options={'default_permissions': ('add', 'change', 'delete'), 'ordering': ('-created',)},
|
||||||
|
),
|
||||||
|
migrations.RunPython(delete_execution_environment_read_role, migrations.RunPython.noop),
|
||||||
|
]
|
||||||
402
awx/main/migrations/_dab_rbac.py
Normal file
402
awx/main/migrations/_dab_rbac.py
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from django.apps import apps as global_apps
|
||||||
|
from django.db.models import ForeignKey
|
||||||
|
from django.conf import settings
|
||||||
|
from ansible_base.rbac.migrations._utils import give_permissions
|
||||||
|
from ansible_base.rbac.management import create_dab_permissions
|
||||||
|
|
||||||
|
from awx.main.fields import ImplicitRoleField
|
||||||
|
from awx.main.constants import role_name_to_perm_mapping
|
||||||
|
|
||||||
|
from ansible_base.rbac.permission_registry import permission_registry
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger('awx.main.migrations._dab_rbac')
|
||||||
|
|
||||||
|
|
||||||
|
def create_permissions_as_operation(apps, schema_editor):
|
||||||
|
create_dab_permissions(global_apps.get_app_config("main"), apps=apps)
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Data structures and methods for the migration of old Role model to ObjectRole
|
||||||
|
"""
|
||||||
|
|
||||||
|
system_admin = ImplicitRoleField(name='system_administrator')
|
||||||
|
system_auditor = ImplicitRoleField(name='system_auditor')
|
||||||
|
system_admin.model = None
|
||||||
|
system_auditor.model = None
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_parent_role(f, role_path):
|
||||||
|
"""
|
||||||
|
Given a field and a path declared in parent_role from the field definition, like
|
||||||
|
execute_role = ImplicitRoleField(parent_role='admin_role')
|
||||||
|
This expects to be passed in (execute_role object, "admin_role")
|
||||||
|
It hould return the admin_role from that object
|
||||||
|
"""
|
||||||
|
if role_path == 'singleton:system_administrator':
|
||||||
|
return system_admin
|
||||||
|
elif role_path == 'singleton:system_auditor':
|
||||||
|
return system_auditor
|
||||||
|
else:
|
||||||
|
related_field = f
|
||||||
|
current_model = f.model
|
||||||
|
for related_field_name in role_path.split('.'):
|
||||||
|
related_field = current_model._meta.get_field(related_field_name)
|
||||||
|
if isinstance(related_field, ForeignKey) and not isinstance(related_field, ImplicitRoleField):
|
||||||
|
current_model = related_field.related_model
|
||||||
|
return related_field
|
||||||
|
|
||||||
|
|
||||||
|
def build_role_map(apps):
|
||||||
|
"""
|
||||||
|
For the old Role model, this builds and returns dictionaries (children, parents)
|
||||||
|
which give a global mapping of the ImplicitRoleField instances according to the graph
|
||||||
|
"""
|
||||||
|
models = set(apps.get_app_config('main').get_models())
|
||||||
|
|
||||||
|
all_fields = set()
|
||||||
|
parents = {}
|
||||||
|
children = {}
|
||||||
|
|
||||||
|
all_fields.add(system_admin)
|
||||||
|
all_fields.add(system_auditor)
|
||||||
|
|
||||||
|
for cls in models:
|
||||||
|
for f in cls._meta.get_fields():
|
||||||
|
if isinstance(f, ImplicitRoleField):
|
||||||
|
all_fields.add(f)
|
||||||
|
|
||||||
|
for f in all_fields:
|
||||||
|
if f.parent_role is not None:
|
||||||
|
if isinstance(f.parent_role, str):
|
||||||
|
parent_roles = [f.parent_role]
|
||||||
|
else:
|
||||||
|
parent_roles = f.parent_role
|
||||||
|
|
||||||
|
# SPECIAL CASE: organization auditor_role is not a child of admin_role
|
||||||
|
# this makes no practical sense and conflicts with expected managed role
|
||||||
|
# so we put it in as a hack here
|
||||||
|
if f.name == 'auditor_role' and f.model._meta.model_name == 'organization':
|
||||||
|
parent_roles.append('admin_role')
|
||||||
|
|
||||||
|
parent_list = []
|
||||||
|
for rel_name in parent_roles:
|
||||||
|
parent_list.append(resolve_parent_role(f, rel_name))
|
||||||
|
|
||||||
|
parents[f] = parent_list
|
||||||
|
|
||||||
|
# build children lookup from parents lookup
|
||||||
|
for child_field, parent_list in parents.items():
|
||||||
|
for parent_field in parent_list:
|
||||||
|
children.setdefault(parent_field, [])
|
||||||
|
children[parent_field].append(child_field)
|
||||||
|
|
||||||
|
return (parents, children)
|
||||||
|
|
||||||
|
|
||||||
|
def get_descendents(f, children_map):
|
||||||
|
"""
|
||||||
|
Given ImplicitRoleField F and the children mapping, returns all descendents
|
||||||
|
of that field, as a set of other fields, including itself
|
||||||
|
"""
|
||||||
|
ret = {f}
|
||||||
|
if f in children_map:
|
||||||
|
for child_field in children_map[f]:
|
||||||
|
ret.update(get_descendents(child_field, children_map))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_permissions_for_role(role_field, children_map, apps):
|
||||||
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||||
|
|
||||||
|
perm_list = []
|
||||||
|
for child_field in get_descendents(role_field, children_map):
|
||||||
|
if child_field.name in role_name_to_perm_mapping:
|
||||||
|
for perm_name in role_name_to_perm_mapping[child_field.name]:
|
||||||
|
if perm_name == 'add_' and role_field.model._meta.model_name != 'organization':
|
||||||
|
continue # only organizations can contain add permissions
|
||||||
|
perm = Permission.objects.filter(content_type=ContentType.objects.get_for_model(child_field.model), codename__startswith=perm_name).first()
|
||||||
|
if perm is not None and perm not in perm_list:
|
||||||
|
perm_list.append(perm)
|
||||||
|
|
||||||
|
# special case for two models that have object roles but no organization roles in old system
|
||||||
|
if role_field.name == 'notification_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
|
||||||
|
ct = ContentType.objects.get_for_model(apps.get_model('main', 'NotificationTemplate'))
|
||||||
|
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
|
||||||
|
if role_field.name == 'execution_environment_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
|
||||||
|
ct = ContentType.objects.get_for_model(apps.get_model('main', 'ExecutionEnvironment'))
|
||||||
|
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
|
||||||
|
|
||||||
|
# more special cases for those same above special org-level roles
|
||||||
|
if role_field.name == 'auditor_role':
|
||||||
|
perm_list.append(Permission.objects.get(codename='view_notificationtemplate'))
|
||||||
|
|
||||||
|
return perm_list
|
||||||
|
|
||||||
|
|
||||||
|
def model_class(ct, apps):
|
||||||
|
"""
|
||||||
|
You can not use model methods in migrations, so this duplicates
|
||||||
|
what ContentType.model_class does, using current apps
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return apps.get_model(ct.app_label, ct.model)
|
||||||
|
except LookupError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_to_new_rbac(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
This method moves the assigned permissions from the old rbac.py models
|
||||||
|
to the new RoleDefinition and ObjectRole models
|
||||||
|
"""
|
||||||
|
Role = apps.get_model('main', 'Role')
|
||||||
|
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
|
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||||
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
|
||||||
|
# remove add premissions that are not valid for migrations from old versions
|
||||||
|
for perm_str in ('add_organization', 'add_jobtemplate'):
|
||||||
|
perm = Permission.objects.filter(codename=perm_str).first()
|
||||||
|
if perm:
|
||||||
|
perm.delete()
|
||||||
|
|
||||||
|
managed_definitions = dict()
|
||||||
|
for role_definition in RoleDefinition.objects.filter(managed=True):
|
||||||
|
permissions = frozenset(role_definition.permissions.values_list('id', flat=True))
|
||||||
|
managed_definitions[permissions] = role_definition
|
||||||
|
|
||||||
|
# Build map of old role model
|
||||||
|
parents, children = build_role_map(apps)
|
||||||
|
|
||||||
|
# NOTE: this import is expected to break at some point, and then just move the data here
|
||||||
|
from awx.main.models.rbac import role_descriptions
|
||||||
|
|
||||||
|
for role in Role.objects.prefetch_related('members', 'parents').iterator():
|
||||||
|
if role.singleton_name:
|
||||||
|
continue # only bothering to migrate object roles
|
||||||
|
|
||||||
|
team_roles = []
|
||||||
|
for parent in role.parents.all():
|
||||||
|
if parent.id not in json.loads(role.implicit_parents):
|
||||||
|
team_roles.append(parent)
|
||||||
|
|
||||||
|
# we will not create any roles that do not have any users or teams
|
||||||
|
if not (role.members.all() or team_roles):
|
||||||
|
logger.debug(f'Skipping role {role.role_field} for {role.content_type.model}-{role.object_id} due to no members')
|
||||||
|
continue
|
||||||
|
|
||||||
|
# get a list of permissions that the old role would grant
|
||||||
|
object_cls = apps.get_model(f'main.{role.content_type.model}')
|
||||||
|
object = object_cls.objects.get(pk=role.object_id) # WORKAROUND, role.content_object does not work in migrations
|
||||||
|
f = object._meta.get_field(role.role_field) # should be ImplicitRoleField
|
||||||
|
perm_list = get_permissions_for_role(f, children, apps)
|
||||||
|
|
||||||
|
permissions = frozenset(perm.id for perm in perm_list)
|
||||||
|
|
||||||
|
# With the needed permissions established, obtain the RoleDefinition this will need, priorities:
|
||||||
|
# 1. If it exists as a managed RoleDefinition then obviously use that
|
||||||
|
# 2. If we already created this for a prior role, use that
|
||||||
|
# 3. Create a new RoleDefinition that lists those permissions
|
||||||
|
if permissions in managed_definitions:
|
||||||
|
role_definition = managed_definitions[permissions]
|
||||||
|
else:
|
||||||
|
action = role.role_field.rsplit('_', 1)[0] # remove the _field ending of the name
|
||||||
|
role_definition_name = f'{model_class(role.content_type, apps).__name__} {action.title()}'
|
||||||
|
|
||||||
|
description = role_descriptions[role.role_field]
|
||||||
|
if type(description) == dict:
|
||||||
|
if role.content_type.model in description:
|
||||||
|
description = description.get(role.content_type.model)
|
||||||
|
else:
|
||||||
|
description = description.get('default')
|
||||||
|
if '%s' in description:
|
||||||
|
description = description % role.content_type.model
|
||||||
|
|
||||||
|
role_definition, created = RoleDefinition.objects.get_or_create(
|
||||||
|
name=role_definition_name,
|
||||||
|
defaults={'description': description, 'content_type_id': role.content_type_id},
|
||||||
|
)
|
||||||
|
|
||||||
|
if created:
|
||||||
|
logger.info(f'Created custom Role Definition {role_definition_name}, pk={role_definition.pk}')
|
||||||
|
role_definition.permissions.set(perm_list)
|
||||||
|
|
||||||
|
# Create the object role and add users to it
|
||||||
|
give_permissions(
|
||||||
|
apps,
|
||||||
|
role_definition,
|
||||||
|
users=role.members.all(),
|
||||||
|
teams=[tr.object_id for tr in team_roles],
|
||||||
|
object_id=role.object_id,
|
||||||
|
content_type_id=role.content_type_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new replacement system auditor role
|
||||||
|
new_system_auditor, created = RoleDefinition.objects.get_or_create(
|
||||||
|
name='System Auditor',
|
||||||
|
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
|
||||||
|
)
|
||||||
|
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
|
||||||
|
|
||||||
|
# migrate is_system_auditor flag, because it is no longer handled by a system role
|
||||||
|
old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first()
|
||||||
|
if old_system_auditor:
|
||||||
|
# if the system auditor role is not present, this is a new install and no users should exist
|
||||||
|
ct = 0
|
||||||
|
for user in role.members.all():
|
||||||
|
RoleUserAssignment.objects.create(user=user, role_definition=new_system_auditor)
|
||||||
|
ct += 1
|
||||||
|
if ct:
|
||||||
|
logger.info(f'Migrated {ct} users to new system auditor flag')
|
||||||
|
|
||||||
|
|
||||||
|
def get_or_create_managed(name, description, ct, permissions, RoleDefinition):
|
||||||
|
role_definition, created = RoleDefinition.objects.get_or_create(name=name, defaults={'managed': True, 'description': description, 'content_type': ct})
|
||||||
|
role_definition.permissions.set(list(permissions))
|
||||||
|
|
||||||
|
if not role_definition.managed:
|
||||||
|
role_definition.managed = True
|
||||||
|
role_definition.save(update_fields=['managed'])
|
||||||
|
|
||||||
|
if created:
|
||||||
|
logger.info(f'Created RoleDefinition {role_definition.name} pk={role_definition} with {len(permissions)} permissions')
|
||||||
|
|
||||||
|
return role_definition
|
||||||
|
|
||||||
|
|
||||||
|
def setup_managed_role_definitions(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
Idepotent method to create or sync the managed role definitions
|
||||||
|
"""
|
||||||
|
to_create = {
|
||||||
|
'object_admin': '{cls.__name__} Admin',
|
||||||
|
'org_admin': 'Organization Admin',
|
||||||
|
'org_children': 'Organization {cls.__name__} Admin',
|
||||||
|
'special': '{cls.__name__} {action}',
|
||||||
|
}
|
||||||
|
|
||||||
|
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||||
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
|
Organization = apps.get_model(settings.ANSIBLE_BASE_ORGANIZATION_MODEL)
|
||||||
|
org_ct = ContentType.objects.get_for_model(Organization)
|
||||||
|
managed_role_definitions = []
|
||||||
|
|
||||||
|
org_perms = set()
|
||||||
|
for cls in permission_registry.all_registered_models:
|
||||||
|
ct = ContentType.objects.get_for_model(cls)
|
||||||
|
cls_name = cls._meta.model_name
|
||||||
|
object_perms = set(Permission.objects.filter(content_type=ct))
|
||||||
|
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
|
||||||
|
if cls_name != 'instancegroup':
|
||||||
|
org_perms.update(object_perms)
|
||||||
|
|
||||||
|
if 'object_admin' in to_create and cls_name != 'organization':
|
||||||
|
indiv_perms = object_perms.copy()
|
||||||
|
add_perms = [perm for perm in indiv_perms if perm.codename.startswith('add_')]
|
||||||
|
if add_perms:
|
||||||
|
for perm in add_perms:
|
||||||
|
indiv_perms.remove(perm)
|
||||||
|
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
|
||||||
|
org_child_perms = object_perms.copy()
|
||||||
|
org_child_perms.add(Permission.objects.get(codename='view_organization'))
|
||||||
|
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['org_children'].format(cls=cls),
|
||||||
|
f'Has all permissions to {cls._meta.verbose_name_plural} within an organization',
|
||||||
|
org_ct,
|
||||||
|
org_child_perms,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'special' in to_create:
|
||||||
|
special_perms = []
|
||||||
|
for perm in object_perms:
|
||||||
|
# Organization auditor is handled separately
|
||||||
|
if perm.codename.split('_')[0] not in ('add', 'change', 'delete', 'view', 'audit'):
|
||||||
|
special_perms.append(perm)
|
||||||
|
for perm in special_perms:
|
||||||
|
action = perm.codename.split('_')[0]
|
||||||
|
view_perm = Permission.objects.get(content_type=ct, codename__startswith='view_')
|
||||||
|
perm_list = [perm, view_perm]
|
||||||
|
# Handle special-case where adhoc role also listed use permission
|
||||||
|
if action == 'adhoc':
|
||||||
|
for other_perm in object_perms:
|
||||||
|
if other_perm.codename == 'use_inventory':
|
||||||
|
perm_list.append(other_perm)
|
||||||
|
break
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['special'].format(cls=cls, action=action.title()),
|
||||||
|
f'Has {action} permissions to a single {cls._meta.verbose_name}',
|
||||||
|
ct,
|
||||||
|
perm_list,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'org_admin' in to_create:
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['org_admin'].format(cls=Organization),
|
||||||
|
'Has all permissions to a single organization and all objects inside of it',
|
||||||
|
org_ct,
|
||||||
|
org_perms,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Special "organization action" roles
|
||||||
|
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
|
||||||
|
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
'Organization Audit',
|
||||||
|
'Has permission to view all objects inside of a single organization',
|
||||||
|
org_ct,
|
||||||
|
audit_permissions,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
org_execute_permissions = {'view_jobtemplate', 'execute_jobtemplate', 'view_workflowjobtemplate', 'execute_workflowjobtemplate', 'view_organization'}
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
'Organization Execute',
|
||||||
|
'Has permission to execute all runnable objects in the organization',
|
||||||
|
org_ct,
|
||||||
|
[perm for perm in org_perms if perm.codename in org_execute_permissions],
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
org_approval_permissions = {'view_organization', 'view_workflowjobtemplate', 'approve_workflowjobtemplate'}
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
'Organization Approval',
|
||||||
|
'Has permission to approve any workflow steps within a single organization',
|
||||||
|
org_ct,
|
||||||
|
[perm for perm in org_perms if perm.codename in org_approval_permissions],
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
unexpected_role_definitions = RoleDefinition.objects.filter(managed=True).exclude(pk__in=[rd.pk for rd in managed_role_definitions])
|
||||||
|
for role_definition in unexpected_role_definitions:
|
||||||
|
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
|
||||||
|
role_definition.delete()
|
||||||
@@ -1,12 +1,19 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings # noqa
|
from django.conf import settings # noqa
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.db.models.signals import pre_delete # noqa
|
from django.db.models.signals import pre_delete # noqa
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment
|
||||||
from ansible_base.lib.utils.models import prevent_search
|
from ansible_base.lib.utils.models import prevent_search
|
||||||
|
from ansible_base.lib.utils.models import user_summary_fields
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
|
from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
|
||||||
@@ -99,6 +106,8 @@ from awx.main.access import get_user_queryset, check_user_access, check_user_acc
|
|||||||
User.add_to_class('get_queryset', get_user_queryset)
|
User.add_to_class('get_queryset', get_user_queryset)
|
||||||
User.add_to_class('can_access', check_user_access)
|
User.add_to_class('can_access', check_user_access)
|
||||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||||
|
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
|
||||||
|
User.add_to_class('summary_fields', user_summary_fields)
|
||||||
|
|
||||||
|
|
||||||
def convert_jsonfields():
|
def convert_jsonfields():
|
||||||
@@ -167,17 +176,17 @@ pre_delete.connect(cleanup_created_modified_by, sender=User)
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def user_get_organizations(user):
|
def user_get_organizations(user):
|
||||||
return Organization.objects.filter(member_role__members=user)
|
return Organization.access_qs(user, 'member')
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_get_admin_of_organizations(user):
|
def user_get_admin_of_organizations(user):
|
||||||
return Organization.objects.filter(admin_role__members=user)
|
return Organization.access_qs(user, 'change')
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_get_auditor_of_organizations(user):
|
def user_get_auditor_of_organizations(user):
|
||||||
return Organization.objects.filter(auditor_role__members=user)
|
return Organization.access_qs(user, 'audit')
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -191,11 +200,21 @@ User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
|
|||||||
User.add_to_class('created', created)
|
User.add_to_class('created', created)
|
||||||
|
|
||||||
|
|
||||||
|
def get_system_auditor_role():
|
||||||
|
rd, created = RoleDefinition.objects.get_or_create(
|
||||||
|
name='System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
|
||||||
|
)
|
||||||
|
if created:
|
||||||
|
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_is_system_auditor(user):
|
def user_is_system_auditor(user):
|
||||||
if not hasattr(user, '_is_system_auditor'):
|
if not hasattr(user, '_is_system_auditor'):
|
||||||
if user.pk:
|
if user.pk:
|
||||||
user._is_system_auditor = user.roles.filter(singleton_name='system_auditor', role_field='system_auditor').exists()
|
rd = get_system_auditor_role()
|
||||||
|
user._is_system_auditor = RoleUserAssignment.objects.filter(user=user, role_definition=rd).exists()
|
||||||
else:
|
else:
|
||||||
# Odd case where user is unsaved, this should never be relied on
|
# Odd case where user is unsaved, this should never be relied on
|
||||||
return False
|
return False
|
||||||
@@ -209,17 +228,17 @@ def user_is_system_auditor(user, tf):
|
|||||||
# time they've logged in, and we've just created the new User in this
|
# time they've logged in, and we've just created the new User in this
|
||||||
# request), we need one to set up the system auditor role
|
# request), we need one to set up the system auditor role
|
||||||
user.save()
|
user.save()
|
||||||
if tf:
|
rd = get_system_auditor_role()
|
||||||
role = Role.singleton('system_auditor')
|
assignment = RoleUserAssignment.objects.filter(user=user, role_definition=rd).first()
|
||||||
# must check if member to not duplicate activity stream
|
prior_value = bool(assignment)
|
||||||
if user not in role.members.all():
|
if prior_value != bool(tf):
|
||||||
role.members.add(user)
|
if assignment:
|
||||||
user._is_system_auditor = True
|
assignment.delete()
|
||||||
else:
|
else:
|
||||||
role = Role.singleton('system_auditor')
|
rd.give_global_permission(user)
|
||||||
if user in role.members.all():
|
user._is_system_auditor = bool(tf)
|
||||||
role.members.remove(user)
|
entry = ActivityStream.objects.create(changes=json.dumps({"is_system_auditor": [prior_value, bool(tf)]}), object1='user', operation='update')
|
||||||
user._is_system_auditor = False
|
entry.user.add(user)
|
||||||
|
|
||||||
|
|
||||||
User.add_to_class('is_system_auditor', user_is_system_auditor)
|
User.add_to_class('is_system_auditor', user_is_system_auditor)
|
||||||
@@ -287,6 +306,10 @@ activity_stream_registrar.connect(WorkflowApprovalTemplate)
|
|||||||
activity_stream_registrar.connect(OAuth2Application)
|
activity_stream_registrar.connect(OAuth2Application)
|
||||||
activity_stream_registrar.connect(OAuth2AccessToken)
|
activity_stream_registrar.connect(OAuth2AccessToken)
|
||||||
|
|
||||||
|
# Register models
|
||||||
|
permission_registry.register(Project, Team, WorkflowJobTemplate, JobTemplate, Inventory, Organization, Credential, NotificationTemplate, ExecutionEnvironment)
|
||||||
|
permission_registry.register(InstanceGroup, parent_field_name=None) # Not part of an organization
|
||||||
|
|
||||||
# prevent API filtering on certain Django-supplied sensitive fields
|
# prevent API filtering on certain Django-supplied sensitive fields
|
||||||
prevent_search(User._meta.get_field('password'))
|
prevent_search(User._meta.get_field('password'))
|
||||||
prevent_search(OAuth2AccessToken._meta.get_field('token'))
|
prevent_search(OAuth2AccessToken._meta.get_field('token'))
|
||||||
|
|||||||
@@ -7,6 +7,9 @@ from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
|
||||||
# Django-CRUM
|
# Django-CRUM
|
||||||
from crum import get_current_user
|
from crum import get_current_user
|
||||||
|
|
||||||
@@ -139,6 +142,23 @@ class BaseModel(models.Model):
|
|||||||
self.save(update_fields=update_fields)
|
self.save(update_fields=update_fields)
|
||||||
return update_fields
|
return update_fields
|
||||||
|
|
||||||
|
def summary_fields(self):
|
||||||
|
"""
|
||||||
|
This exists for use by django-ansible-base,
|
||||||
|
which has standard patterns that differ from AWX, but we enable views from DAB
|
||||||
|
for those views to list summary_fields for AWX models, those models need to provide this
|
||||||
|
"""
|
||||||
|
from awx.api.serializers import SUMMARIZABLE_FK_FIELDS
|
||||||
|
|
||||||
|
model_name = get_type_for_model(self)
|
||||||
|
related_fields = SUMMARIZABLE_FK_FIELDS.get(model_name, {})
|
||||||
|
summary_data = {}
|
||||||
|
for field_name in related_fields:
|
||||||
|
fval = getattr(self, field_name, None)
|
||||||
|
if fval is not None:
|
||||||
|
summary_data[field_name] = fval
|
||||||
|
return summary_data
|
||||||
|
|
||||||
|
|
||||||
class CreatedModifiedModel(BaseModel):
|
class CreatedModifiedModel(BaseModel):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -15,12 +15,16 @@ from jinja2 import sandbox
|
|||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _, gettext_noop
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
|
||||||
|
# DRF
|
||||||
|
from rest_framework.serializers import ValidationError as DRFValidationError
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
@@ -41,8 +45,9 @@ from awx.main.models.rbac import (
|
|||||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||||
)
|
)
|
||||||
|
from awx.main.models import Team, Organization
|
||||||
from awx.main.utils import encrypt_field
|
from awx.main.utils import encrypt_field
|
||||||
from . import injectors as builtin_injectors
|
from awx_plugins.credentials import injectors as builtin_injectors
|
||||||
|
|
||||||
__all__ = ['Credential', 'CredentialType', 'CredentialInputSource', 'build_safe_env']
|
__all__ = ['Credential', 'CredentialType', 'CredentialInputSource', 'build_safe_env']
|
||||||
|
|
||||||
@@ -83,6 +88,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
|||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
unique_together = ('organization', 'name', 'credential_type')
|
unique_together = ('organization', 'name', 'credential_type')
|
||||||
|
permissions = [('use_credential', 'Can use credential in a job or related resource')]
|
||||||
|
|
||||||
PASSWORD_FIELDS = ['inputs']
|
PASSWORD_FIELDS = ['inputs']
|
||||||
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
|
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
|
||||||
@@ -314,6 +320,16 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
|||||||
else:
|
else:
|
||||||
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
||||||
|
|
||||||
|
def validate_role_assignment(self, actor, role_definition):
|
||||||
|
if self.organization:
|
||||||
|
if isinstance(actor, User):
|
||||||
|
if actor.is_superuser or Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists():
|
||||||
|
return
|
||||||
|
if isinstance(actor, Team):
|
||||||
|
if actor.organization == self.organization:
|
||||||
|
return
|
||||||
|
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
|
||||||
|
|
||||||
|
|
||||||
class CredentialType(CommonModelNameNotUnique):
|
class CredentialType(CommonModelNameNotUnique):
|
||||||
"""
|
"""
|
||||||
@@ -585,658 +601,6 @@ class ManagedCredentialType(SimpleNamespace):
|
|||||||
return CredentialType(**self.get_creation_params())
|
return CredentialType(**self.get_creation_params())
|
||||||
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='ssh',
|
|
||||||
kind='ssh',
|
|
||||||
name=gettext_noop('Machine'),
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
|
||||||
{'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
|
|
||||||
{
|
|
||||||
'id': 'ssh_public_key_data',
|
|
||||||
'label': gettext_noop('Signed SSH Certificate'),
|
|
||||||
'type': 'string',
|
|
||||||
'multiline': True,
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
|
||||||
{
|
|
||||||
'id': 'become_method',
|
|
||||||
'label': gettext_noop('Privilege Escalation Method'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Specify a method for "become" operations. This is equivalent to specifying the --become-method Ansible parameter.'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'become_username',
|
|
||||||
'label': gettext_noop('Privilege Escalation Username'),
|
|
||||||
'type': 'string',
|
|
||||||
},
|
|
||||||
{'id': 'become_password', 'label': gettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='scm',
|
|
||||||
kind='scm',
|
|
||||||
name=gettext_noop('Source Control'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True},
|
|
||||||
{'id': 'ssh_key_data', 'label': gettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
|
|
||||||
{'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='vault',
|
|
||||||
kind='vault',
|
|
||||||
name=gettext_noop('Vault'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'vault_password', 'label': gettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
|
||||||
{
|
|
||||||
'id': 'vault_id',
|
|
||||||
'label': gettext_noop('Vault Identifier'),
|
|
||||||
'type': 'string',
|
|
||||||
'format': 'vault_id',
|
|
||||||
'help_text': gettext_noop(
|
|
||||||
'Specify an (optional) Vault ID. This is '
|
|
||||||
'equivalent to specifying the --vault-id '
|
|
||||||
'Ansible parameter for providing multiple Vault '
|
|
||||||
'passwords. Note: this feature only works in '
|
|
||||||
'Ansible 2.4+.'
|
|
||||||
),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['vault_password'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='net',
|
|
||||||
kind='net',
|
|
||||||
name=gettext_noop('Network'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
|
|
||||||
{
|
|
||||||
'id': 'ssh_key_unlock',
|
|
||||||
'label': gettext_noop('Private Key Passphrase'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'authorize',
|
|
||||||
'label': gettext_noop('Authorize'),
|
|
||||||
'type': 'boolean',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'authorize_password',
|
|
||||||
'label': gettext_noop('Authorize Password'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'dependencies': {
|
|
||||||
'authorize_password': ['authorize'],
|
|
||||||
},
|
|
||||||
'required': ['username'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='aws',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('Amazon Web Services'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'username', 'label': gettext_noop('Access Key'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Secret Key'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'security_token',
|
|
||||||
'label': gettext_noop('STS Token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'help_text': gettext_noop(
|
|
||||||
'Security Token Service (STS) is a web service '
|
|
||||||
'that enables you to request temporary, '
|
|
||||||
'limited-privilege credentials for AWS Identity '
|
|
||||||
'and Access Management (IAM) users.'
|
|
||||||
),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['username', 'password'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='openstack',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('OpenStack'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password (API Key)'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'host',
|
|
||||||
'label': gettext_noop('Host (Authentication URL)'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('The host to authenticate with. For example, https://openstack.business.com/v2.0/'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'project',
|
|
||||||
'label': gettext_noop('Project (Tenant Name)'),
|
|
||||||
'type': 'string',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'project_domain_name',
|
|
||||||
'label': gettext_noop('Project (Domain Name)'),
|
|
||||||
'type': 'string',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'domain',
|
|
||||||
'label': gettext_noop('Domain Name'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop(
|
|
||||||
'OpenStack domains define administrative boundaries. '
|
|
||||||
'It is only needed for Keystone v3 authentication '
|
|
||||||
'URLs. Refer to the documentation for '
|
|
||||||
'common scenarios.'
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'region',
|
|
||||||
'label': gettext_noop('Region Name'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('For some cloud providers, like OVH, region must be specified'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'verify_ssl',
|
|
||||||
'label': gettext_noop('Verify SSL'),
|
|
||||||
'type': 'boolean',
|
|
||||||
'default': True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['username', 'password', 'host', 'project'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='vmware',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('VMware vCenter'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'host',
|
|
||||||
'label': gettext_noop('VCenter Host'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Enter the hostname or IP address that corresponds to your VMware vCenter.'),
|
|
||||||
},
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['host', 'username', 'password'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='satellite6',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('Red Hat Satellite 6'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'host',
|
|
||||||
'label': gettext_noop('Satellite 6 URL'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat Satellite 6 server. For example, https://satellite.example.org'),
|
|
||||||
},
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['host', 'username', 'password'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='gce',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('Google Compute Engine'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'username',
|
|
||||||
'label': gettext_noop('Service Account Email Address'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('The email address assigned to the Google Compute Engine service account.'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'project',
|
|
||||||
'label': 'Project',
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop(
|
|
||||||
'The Project ID is the GCE assigned identification. '
|
|
||||||
'It is often constructed as three words or two words '
|
|
||||||
'followed by a three-digit number. Examples: project-id-000 '
|
|
||||||
'and another-project-id'
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'ssh_key_data',
|
|
||||||
'label': gettext_noop('RSA Private Key'),
|
|
||||||
'type': 'string',
|
|
||||||
'format': 'ssh_private_key',
|
|
||||||
'secret': True,
|
|
||||||
'multiline': True,
|
|
||||||
'help_text': gettext_noop('Paste the contents of the PEM file associated with the service account email.'),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['username', 'ssh_key_data'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='azure_rm',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('Microsoft Azure Resource Manager'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'subscription',
|
|
||||||
'label': gettext_noop('Subscription ID'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Subscription ID is an Azure construct, which is mapped to a username.'),
|
|
||||||
},
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{'id': 'client', 'label': gettext_noop('Client ID'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'secret',
|
|
||||||
'label': gettext_noop('Client Secret'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{'id': 'tenant', 'label': gettext_noop('Tenant ID'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'cloud_environment',
|
|
||||||
'label': gettext_noop('Azure Cloud Environment'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when using Azure GovCloud or Azure stack.'),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['subscription'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='github_token',
|
|
||||||
kind='token',
|
|
||||||
name=gettext_noop('GitHub Personal Access Token'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'token',
|
|
||||||
'label': gettext_noop('Token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'help_text': gettext_noop('This token needs to come from your profile settings in GitHub'),
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'required': ['token'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='gitlab_token',
|
|
||||||
kind='token',
|
|
||||||
name=gettext_noop('GitLab Personal Access Token'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'token',
|
|
||||||
'label': gettext_noop('Token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'help_text': gettext_noop('This token needs to come from your profile settings in GitLab'),
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'required': ['token'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='bitbucket_dc_token',
|
|
||||||
kind='token',
|
|
||||||
name=gettext_noop('Bitbucket Data Center HTTP Access Token'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'token',
|
|
||||||
'label': gettext_noop('Token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'help_text': gettext_noop('This token needs to come from your user settings in Bitbucket'),
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'required': ['token'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='insights',
|
|
||||||
kind='insights',
|
|
||||||
name=gettext_noop('Insights'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True},
|
|
||||||
],
|
|
||||||
'required': ['username', 'password'],
|
|
||||||
},
|
|
||||||
injectors={
|
|
||||||
'extra_vars': {
|
|
||||||
"scm_username": "{{username}}",
|
|
||||||
"scm_password": "{{password}}",
|
|
||||||
},
|
|
||||||
'env': {
|
|
||||||
'INSIGHTS_USER': '{{username}}',
|
|
||||||
'INSIGHTS_PASSWORD': '{{password}}',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='rhv',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('Red Hat Virtualization'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{'id': 'host', 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': gettext_noop('The host to authenticate with.')},
|
|
||||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'ca_file',
|
|
||||||
'label': gettext_noop('CA File'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Absolute file path to the CA file to use (optional)'),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['host', 'username', 'password'],
|
|
||||||
},
|
|
||||||
injectors={
|
|
||||||
# The duplication here is intentional; the ovirt4 inventory plugin
|
|
||||||
# writes a .ini file for authentication, while the ansible modules for
|
|
||||||
# ovirt4 use a separate authentication process that support
|
|
||||||
# environment variables; by injecting both, we support both
|
|
||||||
'file': {
|
|
||||||
'template': '\n'.join(
|
|
||||||
[
|
|
||||||
'[ovirt]',
|
|
||||||
'ovirt_url={{host}}',
|
|
||||||
'ovirt_username={{username}}',
|
|
||||||
'ovirt_password={{password}}',
|
|
||||||
'{% if ca_file %}ovirt_ca_file={{ca_file}}{% endif %}',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
},
|
|
||||||
'env': {'OVIRT_INI_PATH': '{{tower.filename}}', 'OVIRT_URL': '{{host}}', 'OVIRT_USERNAME': '{{username}}', 'OVIRT_PASSWORD': '{{password}}'},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='controller',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('Red Hat Ansible Automation Platform'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'host',
|
|
||||||
'label': gettext_noop('Red Hat Ansible Automation Platform'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'username',
|
|
||||||
'label': gettext_noop('Username'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop(
|
|
||||||
'Red Hat Ansible Automation Platform username id to authenticate as.This should not be set if an OAuth token is being used.'
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'oauth_token',
|
|
||||||
'label': gettext_noop('OAuth Token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'help_text': gettext_noop('An OAuth token to use to authenticate with.This should not be set if username/password are being used.'),
|
|
||||||
},
|
|
||||||
{'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False},
|
|
||||||
],
|
|
||||||
'required': ['host'],
|
|
||||||
},
|
|
||||||
injectors={
|
|
||||||
'env': {
|
|
||||||
'TOWER_HOST': '{{host}}',
|
|
||||||
'TOWER_USERNAME': '{{username}}',
|
|
||||||
'TOWER_PASSWORD': '{{password}}',
|
|
||||||
'TOWER_VERIFY_SSL': '{{verify_ssl}}',
|
|
||||||
'TOWER_OAUTH_TOKEN': '{{oauth_token}}',
|
|
||||||
'CONTROLLER_HOST': '{{host}}',
|
|
||||||
'CONTROLLER_USERNAME': '{{username}}',
|
|
||||||
'CONTROLLER_PASSWORD': '{{password}}',
|
|
||||||
'CONTROLLER_VERIFY_SSL': '{{verify_ssl}}',
|
|
||||||
'CONTROLLER_OAUTH_TOKEN': '{{oauth_token}}',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='kubernetes_bearer_token',
|
|
||||||
kind='kubernetes',
|
|
||||||
name=gettext_noop('OpenShift or Kubernetes API Bearer Token'),
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'host',
|
|
||||||
'label': gettext_noop('OpenShift or Kubernetes API Endpoint'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'bearer_token',
|
|
||||||
'label': gettext_noop('API authentication bearer token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'verify_ssl',
|
|
||||||
'label': gettext_noop('Verify SSL'),
|
|
||||||
'type': 'boolean',
|
|
||||||
'default': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'ssl_ca_cert',
|
|
||||||
'label': gettext_noop('Certificate Authority data'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'multiline': True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['host', 'bearer_token'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='registry',
|
|
||||||
kind='registry',
|
|
||||||
name=gettext_noop('Container Registry'),
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'host',
|
|
||||||
'label': gettext_noop('Authentication URL'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('Authentication endpoint for the container registry.'),
|
|
||||||
'default': 'quay.io',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'username',
|
|
||||||
'label': gettext_noop('Username'),
|
|
||||||
'type': 'string',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'password',
|
|
||||||
'label': gettext_noop('Password or Token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'help_text': gettext_noop('A password or token used to authenticate with'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'verify_ssl',
|
|
||||||
'label': gettext_noop('Verify SSL'),
|
|
||||||
'type': 'boolean',
|
|
||||||
'default': True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['host'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='galaxy_api_token',
|
|
||||||
kind='galaxy',
|
|
||||||
name=gettext_noop('Ansible Galaxy/Automation Hub API Token'),
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'url',
|
|
||||||
'label': gettext_noop('Galaxy Server URL'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('The URL of the Galaxy instance to connect to.'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'auth_url',
|
|
||||||
'label': gettext_noop('Auth Server URL'),
|
|
||||||
'type': 'string',
|
|
||||||
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using SSO auth.'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'id': 'token',
|
|
||||||
'label': gettext_noop('API Token'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'help_text': gettext_noop('A token to use for authentication against the Galaxy instance.'),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['url'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='gpg_public_key',
|
|
||||||
kind='cryptography',
|
|
||||||
name=gettext_noop('GPG Public Key'),
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'gpg_public_key',
|
|
||||||
'label': gettext_noop('GPG Public Key'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'multiline': True,
|
|
||||||
'help_text': gettext_noop('GPG Public Key used to validate content signatures.'),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['gpg_public_key'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ManagedCredentialType(
|
|
||||||
namespace='terraform',
|
|
||||||
kind='cloud',
|
|
||||||
name=gettext_noop('Terraform backend configuration'),
|
|
||||||
managed=True,
|
|
||||||
inputs={
|
|
||||||
'fields': [
|
|
||||||
{
|
|
||||||
'id': 'configuration',
|
|
||||||
'label': gettext_noop('Backend configuration'),
|
|
||||||
'type': 'string',
|
|
||||||
'secret': True,
|
|
||||||
'multiline': True,
|
|
||||||
'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'required': ['configuration'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CredentialInputSource(PrimordialModel):
|
class CredentialInputSource(PrimordialModel):
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
@@ -1300,6 +664,7 @@ class CredentialInputSource(PrimordialModel):
|
|||||||
view_name = 'api:credential_input_source_detail'
|
view_name = 'api:credential_input_source_detail'
|
||||||
return reverse(view_name, kwargs={'pk': self.pk}, request=request)
|
return reverse(view_name, kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
|
from awx_plugins.credentials.plugins import *
|
||||||
|
|
||||||
for ns, plugin in credential_plugins.items():
|
for ns, plugin in credential_plugins.items():
|
||||||
CredentialType.load_plugin(ns, plugin)
|
CredentialType.load_plugin(ns, plugin)
|
||||||
|
|||||||
@@ -4,11 +4,12 @@ import datetime
|
|||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
import logging
|
import logging
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
import itertools
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
from django.db import models, DatabaseError
|
from django.db import models, DatabaseError, transaction
|
||||||
from django.db.models.functions import Cast
|
from django.db.models.functions import Cast
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.dateparse import parse_datetime
|
||||||
from django.utils.text import Truncator
|
from django.utils.text import Truncator
|
||||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
|||||||
def _update_host_metrics(updated_hosts_list):
|
def _update_host_metrics(updated_hosts_list):
|
||||||
from awx.main.models import HostMetric # circular import
|
from awx.main.models import HostMetric # circular import
|
||||||
|
|
||||||
# bulk-create
|
|
||||||
current_time = now()
|
current_time = now()
|
||||||
HostMetric.objects.bulk_create(
|
|
||||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
# FUTURE:
|
||||||
)
|
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||||
# bulk-update
|
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||||
batch_start, batch_size = 0, 1000
|
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||||
while batch_start <= len(updated_hosts_list):
|
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
|
||||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
args = [iter(updated_hosts_list)] * 500
|
||||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
for hosts in itertools.zip_longest(*args):
|
||||||
)
|
with transaction.atomic():
|
||||||
batch_start += batch_size
|
HostMetric.objects.bulk_create(
|
||||||
|
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||||
|
)
|
||||||
|
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||||
|
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def job_verbosity(self):
|
def job_verbosity(self):
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.models.base import CommonModel
|
from awx.main.models.base import CommonModel
|
||||||
from awx.main.validators import validate_container_image_name
|
from awx.main.validators import validate_container_image_name
|
||||||
@@ -12,6 +14,8 @@ __all__ = ['ExecutionEnvironment']
|
|||||||
class ExecutionEnvironment(CommonModel):
|
class ExecutionEnvironment(CommonModel):
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('-created',)
|
ordering = ('-created',)
|
||||||
|
# Remove view permission, as a temporary solution, defer to organization read permission
|
||||||
|
default_permissions = ('add', 'change', 'delete')
|
||||||
|
|
||||||
PULL_CHOICES = [
|
PULL_CHOICES = [
|
||||||
('always', _("Always pull container before running.")),
|
('always', _("Always pull container before running.")),
|
||||||
@@ -53,3 +57,12 @@ class ExecutionEnvironment(CommonModel):
|
|||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
|
def validate_role_assignment(self, actor, role_definition):
|
||||||
|
if self.managed:
|
||||||
|
raise ValidationError({'object_id': _('Can not assign object roles to managed Execution Environments')})
|
||||||
|
if self.organization_id is None:
|
||||||
|
raise ValidationError({'object_id': _('Can not assign object roles to global Execution Environments')})
|
||||||
|
|
||||||
|
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
|
||||||
|
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})
|
||||||
|
|||||||
@@ -485,6 +485,9 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin, ResourceMi
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
|
permissions = [('use_instancegroup', 'Can use instance group in a preference list of a resource')]
|
||||||
|
# Since this has no direct organization field only superuser can add, so remove add permission
|
||||||
|
default_permissions = ('change', 'delete', 'view')
|
||||||
|
|
||||||
def set_default_policy_fields(self):
|
def set_default_policy_fields(self):
|
||||||
self.policy_instance_list = []
|
self.policy_instance_list = []
|
||||||
|
|||||||
@@ -10,8 +10,6 @@ import copy
|
|||||||
import os.path
|
import os.path
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models, connection
|
from django.db import models, connection
|
||||||
@@ -26,6 +24,7 @@ from django.db.models import Q
|
|||||||
from rest_framework.exceptions import ParseError
|
from rest_framework.exceptions import ParseError
|
||||||
|
|
||||||
from ansible_base.lib.utils.models import prevent_search
|
from ansible_base.lib.utils.models import prevent_search
|
||||||
|
from awx_plugins.inventory.plugins import PluginFileInjector
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
@@ -50,11 +49,9 @@ from awx.main.models.notifications import (
|
|||||||
NotificationTemplate,
|
NotificationTemplate,
|
||||||
JobNotificationMixin,
|
JobNotificationMixin,
|
||||||
)
|
)
|
||||||
from awx.main.models.credential.injectors import _openstack_data
|
|
||||||
from awx.main.utils import _inventory_updates
|
from awx.main.utils import _inventory_updates
|
||||||
from awx.main.utils.safe_yaml import sanitize_jinja
|
from awx.main.utils.safe_yaml import sanitize_jinja
|
||||||
from awx.main.utils.execution_environments import to_container_path, get_control_plane_execution_environment
|
from awx.main.utils.execution_environments import get_control_plane_execution_environment
|
||||||
from awx.main.utils.licensing import server_product_name
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'SmartInventoryMembership', 'HostMetric', 'HostMetricSummaryMonthly']
|
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'SmartInventoryMembership', 'HostMetric', 'HostMetricSummaryMonthly']
|
||||||
@@ -89,6 +86,11 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
|||||||
verbose_name_plural = _('inventories')
|
verbose_name_plural = _('inventories')
|
||||||
unique_together = [('name', 'organization')]
|
unique_together = [('name', 'organization')]
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
permissions = [
|
||||||
|
('use_inventory', 'Can use inventory in a job template'),
|
||||||
|
('adhoc_inventory', 'Can run ad hoc commands'),
|
||||||
|
('update_inventory', 'Can update inventory sources in inventory'),
|
||||||
|
]
|
||||||
|
|
||||||
organization = models.ForeignKey(
|
organization = models.ForeignKey(
|
||||||
'Organization',
|
'Organization',
|
||||||
@@ -925,6 +927,8 @@ class InventorySourceOptions(BaseModel):
|
|||||||
('rhv', _('Red Hat Virtualization')),
|
('rhv', _('Red Hat Virtualization')),
|
||||||
('controller', _('Red Hat Ansible Automation Platform')),
|
('controller', _('Red Hat Ansible Automation Platform')),
|
||||||
('insights', _('Red Hat Insights')),
|
('insights', _('Red Hat Insights')),
|
||||||
|
('terraform', _('Terraform State')),
|
||||||
|
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||||
]
|
]
|
||||||
|
|
||||||
# From the options of the Django management base command
|
# From the options of the Django management base command
|
||||||
@@ -1034,7 +1038,7 @@ class InventorySourceOptions(BaseModel):
|
|||||||
def cloud_credential_validation(source, cred):
|
def cloud_credential_validation(source, cred):
|
||||||
if not source:
|
if not source:
|
||||||
return None
|
return None
|
||||||
if cred and source not in ('custom', 'scm'):
|
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||||
# If a credential was provided, it's important that it matches
|
# If a credential was provided, it's important that it matches
|
||||||
# the actual inventory source being used (Amazon requires Amazon
|
# the actual inventory source being used (Amazon requires Amazon
|
||||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||||
@@ -1043,12 +1047,14 @@ class InventorySourceOptions(BaseModel):
|
|||||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||||
# from the instance metadata instead of those explicitly provided.
|
# from the instance metadata instead of those explicitly provided.
|
||||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||||
return _('Credential is required for a cloud source.')
|
return _('Credential is required for a cloud source.')
|
||||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||||
|
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||||
|
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_cloud_credential(self):
|
def get_cloud_credential(self):
|
||||||
@@ -1399,7 +1405,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
|||||||
return selected_groups
|
return selected_groups
|
||||||
|
|
||||||
|
|
||||||
class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
|
class CustomInventoryScript(CommonModelNameNotUnique):
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
@@ -1416,251 +1422,5 @@ class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
|
|||||||
return reverse('api:inventory_script_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:inventory_script_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
|
|
||||||
class PluginFileInjector(object):
|
|
||||||
plugin_name = None # Ansible core name used to reference plugin
|
|
||||||
# base injector should be one of None, "managed", or "template"
|
|
||||||
# this dictates which logic to borrow from playbook injectors
|
|
||||||
base_injector = None
|
|
||||||
# every source should have collection, these are for the collection name
|
|
||||||
namespace = None
|
|
||||||
collection = None
|
|
||||||
collection_migration = '2.9' # Starting with this version, we use collections
|
|
||||||
use_fqcn = False # plugin: name versus plugin: namespace.collection.name
|
|
||||||
|
|
||||||
# TODO: delete this method and update unit tests
|
|
||||||
@classmethod
|
|
||||||
def get_proper_name(cls):
|
|
||||||
if cls.plugin_name is None:
|
|
||||||
return None
|
|
||||||
return f'{cls.namespace}.{cls.collection}.{cls.plugin_name}'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def filename(self):
|
|
||||||
"""Inventory filename for using the inventory plugin
|
|
||||||
This is created dynamically, but the auto plugin requires this exact naming
|
|
||||||
"""
|
|
||||||
return '{0}.yml'.format(self.plugin_name)
|
|
||||||
|
|
||||||
def inventory_contents(self, inventory_update, private_data_dir):
|
|
||||||
"""Returns a string that is the content for the inventory file for the inventory plugin"""
|
|
||||||
return yaml.safe_dump(self.inventory_as_dict(inventory_update, private_data_dir), default_flow_style=False, width=1000)
|
|
||||||
|
|
||||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
|
||||||
source_vars = dict(inventory_update.source_vars_dict) # make a copy
|
|
||||||
'''
|
|
||||||
None conveys that we should use the user-provided plugin.
|
|
||||||
Note that a plugin value of '' should still be overridden.
|
|
||||||
'''
|
|
||||||
if self.plugin_name is not None:
|
|
||||||
if hasattr(self, 'downstream_namespace') and server_product_name() != 'AWX':
|
|
||||||
source_vars['plugin'] = f'{self.downstream_namespace}.{self.downstream_collection}.{self.plugin_name}'
|
|
||||||
elif self.use_fqcn:
|
|
||||||
source_vars['plugin'] = f'{self.namespace}.{self.collection}.{self.plugin_name}'
|
|
||||||
else:
|
|
||||||
source_vars['plugin'] = self.plugin_name
|
|
||||||
return source_vars
|
|
||||||
|
|
||||||
def build_env(self, inventory_update, env, private_data_dir, private_data_files):
|
|
||||||
injector_env = self.get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
|
||||||
env.update(injector_env)
|
|
||||||
# All CLOUD_PROVIDERS sources implement as inventory plugin from collection
|
|
||||||
env['ANSIBLE_INVENTORY_ENABLED'] = 'auto'
|
|
||||||
return env
|
|
||||||
|
|
||||||
def _get_shared_env(self, inventory_update, private_data_dir, private_data_files):
|
|
||||||
"""By default, we will apply the standard managed injectors"""
|
|
||||||
injected_env = {}
|
|
||||||
credential = inventory_update.get_cloud_credential()
|
|
||||||
# some sources may have no credential, specifically ec2
|
|
||||||
if credential is None:
|
|
||||||
return injected_env
|
|
||||||
if self.base_injector in ('managed', 'template'):
|
|
||||||
injected_env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk) # so injector knows this is inventory
|
|
||||||
if self.base_injector == 'managed':
|
|
||||||
from awx.main.models.credential import injectors as builtin_injectors
|
|
||||||
|
|
||||||
cred_kind = inventory_update.source.replace('ec2', 'aws')
|
|
||||||
if cred_kind in dir(builtin_injectors):
|
|
||||||
getattr(builtin_injectors, cred_kind)(credential, injected_env, private_data_dir)
|
|
||||||
elif self.base_injector == 'template':
|
|
||||||
safe_env = injected_env.copy()
|
|
||||||
args = []
|
|
||||||
credential.credential_type.inject_credential(credential, injected_env, safe_env, args, private_data_dir)
|
|
||||||
# NOTE: safe_env is handled externally to injector class by build_safe_env static method
|
|
||||||
# that means that managed injectors must only inject detectable env keys
|
|
||||||
# enforcement of this is accomplished by tests
|
|
||||||
return injected_env
|
|
||||||
|
|
||||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
|
||||||
env = self._get_shared_env(inventory_update, private_data_dir, private_data_files)
|
|
||||||
return env
|
|
||||||
|
|
||||||
def build_private_data(self, inventory_update, private_data_dir):
|
|
||||||
return self.build_plugin_private_data(inventory_update, private_data_dir)
|
|
||||||
|
|
||||||
def build_plugin_private_data(self, inventory_update, private_data_dir):
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class azure_rm(PluginFileInjector):
|
|
||||||
plugin_name = 'azure_rm'
|
|
||||||
base_injector = 'managed'
|
|
||||||
namespace = 'azure'
|
|
||||||
collection = 'azcollection'
|
|
||||||
|
|
||||||
def get_plugin_env(self, *args, **kwargs):
|
|
||||||
ret = super(azure_rm, self).get_plugin_env(*args, **kwargs)
|
|
||||||
# We need native jinja2 types so that tags can give JSON null value
|
|
||||||
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class ec2(PluginFileInjector):
|
|
||||||
plugin_name = 'aws_ec2'
|
|
||||||
base_injector = 'managed'
|
|
||||||
namespace = 'amazon'
|
|
||||||
collection = 'aws'
|
|
||||||
|
|
||||||
def get_plugin_env(self, *args, **kwargs):
|
|
||||||
ret = super(ec2, self).get_plugin_env(*args, **kwargs)
|
|
||||||
# We need native jinja2 types so that ec2_state_code will give integer
|
|
||||||
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class gce(PluginFileInjector):
|
|
||||||
plugin_name = 'gcp_compute'
|
|
||||||
base_injector = 'managed'
|
|
||||||
namespace = 'google'
|
|
||||||
collection = 'cloud'
|
|
||||||
|
|
||||||
def get_plugin_env(self, *args, **kwargs):
|
|
||||||
ret = super(gce, self).get_plugin_env(*args, **kwargs)
|
|
||||||
# We need native jinja2 types so that ip addresses can give JSON null value
|
|
||||||
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
|
||||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
|
||||||
credential = inventory_update.get_cloud_credential()
|
|
||||||
# InventorySource.source_vars take precedence over ENV vars
|
|
||||||
if 'projects' not in ret:
|
|
||||||
ret['projects'] = [credential.get_input('project', default='')]
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class vmware(PluginFileInjector):
|
|
||||||
plugin_name = 'vmware_vm_inventory'
|
|
||||||
base_injector = 'managed'
|
|
||||||
namespace = 'community'
|
|
||||||
collection = 'vmware'
|
|
||||||
|
|
||||||
|
|
||||||
class openstack(PluginFileInjector):
|
|
||||||
plugin_name = 'openstack'
|
|
||||||
namespace = 'openstack'
|
|
||||||
collection = 'cloud'
|
|
||||||
|
|
||||||
def _get_clouds_dict(self, inventory_update, cred, private_data_dir):
|
|
||||||
openstack_data = _openstack_data(cred)
|
|
||||||
|
|
||||||
openstack_data['clouds']['devstack']['private'] = inventory_update.source_vars_dict.get('private', True)
|
|
||||||
ansible_variables = {
|
|
||||||
'use_hostnames': True,
|
|
||||||
'expand_hostvars': False,
|
|
||||||
'fail_on_errors': True,
|
|
||||||
}
|
|
||||||
provided_count = 0
|
|
||||||
for var_name in ansible_variables:
|
|
||||||
if var_name in inventory_update.source_vars_dict:
|
|
||||||
ansible_variables[var_name] = inventory_update.source_vars_dict[var_name]
|
|
||||||
provided_count += 1
|
|
||||||
if provided_count:
|
|
||||||
# Must we provide all 3 because the user provides any 1 of these??
|
|
||||||
# this probably results in some incorrect mangling of the defaults
|
|
||||||
openstack_data['ansible'] = ansible_variables
|
|
||||||
return openstack_data
|
|
||||||
|
|
||||||
def build_plugin_private_data(self, inventory_update, private_data_dir):
|
|
||||||
credential = inventory_update.get_cloud_credential()
|
|
||||||
private_data = {'credentials': {}}
|
|
||||||
|
|
||||||
openstack_data = self._get_clouds_dict(inventory_update, credential, private_data_dir)
|
|
||||||
private_data['credentials'][credential] = yaml.safe_dump(openstack_data, default_flow_style=False, allow_unicode=True)
|
|
||||||
return private_data
|
|
||||||
|
|
||||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
|
||||||
env = super(openstack, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
|
||||||
credential = inventory_update.get_cloud_credential()
|
|
||||||
cred_data = private_data_files['credentials']
|
|
||||||
env['OS_CLIENT_CONFIG_FILE'] = to_container_path(cred_data[credential], private_data_dir)
|
|
||||||
return env
|
|
||||||
|
|
||||||
|
|
||||||
class rhv(PluginFileInjector):
|
|
||||||
"""ovirt uses the custom credential templating, and that is all"""
|
|
||||||
|
|
||||||
plugin_name = 'ovirt'
|
|
||||||
base_injector = 'template'
|
|
||||||
initial_version = '2.9'
|
|
||||||
namespace = 'ovirt'
|
|
||||||
collection = 'ovirt'
|
|
||||||
downstream_namespace = 'redhat'
|
|
||||||
downstream_collection = 'rhv'
|
|
||||||
use_fqcn = True
|
|
||||||
|
|
||||||
|
|
||||||
class satellite6(PluginFileInjector):
|
|
||||||
plugin_name = 'foreman'
|
|
||||||
namespace = 'theforeman'
|
|
||||||
collection = 'foreman'
|
|
||||||
downstream_namespace = 'redhat'
|
|
||||||
downstream_collection = 'satellite'
|
|
||||||
use_fqcn = True
|
|
||||||
|
|
||||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
|
||||||
# this assumes that this is merged
|
|
||||||
# https://github.com/ansible/ansible/pull/52693
|
|
||||||
credential = inventory_update.get_cloud_credential()
|
|
||||||
ret = super(satellite6, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
|
||||||
if credential:
|
|
||||||
ret['FOREMAN_SERVER'] = credential.get_input('host', default='')
|
|
||||||
ret['FOREMAN_USER'] = credential.get_input('username', default='')
|
|
||||||
ret['FOREMAN_PASSWORD'] = credential.get_input('password', default='')
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class controller(PluginFileInjector):
|
|
||||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
|
||||||
base_injector = 'template'
|
|
||||||
namespace = 'awx'
|
|
||||||
collection = 'awx'
|
|
||||||
downstream_namespace = 'ansible'
|
|
||||||
downstream_collection = 'controller'
|
|
||||||
|
|
||||||
|
|
||||||
class insights(PluginFileInjector):
|
|
||||||
plugin_name = 'insights'
|
|
||||||
base_injector = 'template'
|
|
||||||
namespace = 'redhatinsights'
|
|
||||||
collection = 'insights'
|
|
||||||
downstream_namespace = 'redhat'
|
|
||||||
downstream_collection = 'insights'
|
|
||||||
use_fqcn = True
|
|
||||||
|
|
||||||
|
|
||||||
class constructed(PluginFileInjector):
|
|
||||||
plugin_name = 'constructed'
|
|
||||||
namespace = 'ansible'
|
|
||||||
collection = 'builtin'
|
|
||||||
|
|
||||||
def build_env(self, *args, **kwargs):
|
|
||||||
env = super().build_env(*args, **kwargs)
|
|
||||||
# Enable script inventory plugin so we pick up the script files from source inventories
|
|
||||||
env['ANSIBLE_INVENTORY_ENABLED'] += ',script'
|
|
||||||
env['ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED'] = 'True'
|
|
||||||
return env
|
|
||||||
|
|
||||||
|
|
||||||
for cls in PluginFileInjector.__subclasses__():
|
for cls in PluginFileInjector.__subclasses__():
|
||||||
InventorySourceOptions.injectors[cls.__name__] = cls
|
InventorySourceOptions.injectors[cls.__name__] = cls
|
||||||
|
|||||||
@@ -205,6 +205,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
|||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
permissions = [('execute_jobtemplate', 'Can run this job template')]
|
||||||
|
# Remove add permission, ability to add comes from use permission for inventory, project, credentials
|
||||||
|
default_permissions = ('change', 'delete', 'view')
|
||||||
|
|
||||||
job_type = models.CharField(
|
job_type = models.CharField(
|
||||||
max_length=64,
|
max_length=64,
|
||||||
|
|||||||
@@ -19,13 +19,14 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from ansible_base.lib.utils.models import prevent_search
|
from ansible_base.lib.utils.models import prevent_search
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.rbac import Role, RoleAncestorEntry
|
|
||||||
|
from awx.main.models.rbac import Role, RoleAncestorEntry, to_permissions
|
||||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
|
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
|
||||||
from awx.main.utils.execution_environments import get_default_execution_environment
|
from awx.main.utils.execution_environments import get_default_execution_environment
|
||||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||||
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
||||||
from awx.main.fields import AskForField
|
from awx.main.fields import AskForField
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES, org_role_to_permission
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.models.mixins')
|
logger = logging.getLogger('awx.main.models.mixins')
|
||||||
@@ -64,6 +65,18 @@ class ResourceMixin(models.Model):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
|
def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
if cls._meta.model_name == 'organization' and role_field in org_role_to_permission:
|
||||||
|
# Organization roles can not use the DAB RBAC shortcuts
|
||||||
|
# like Organization.access_qs(user, 'change_jobtemplate') is needed
|
||||||
|
# not just Organization.access_qs(user, 'change') is needed
|
||||||
|
if accessor.is_superuser:
|
||||||
|
return cls.objects.values_list('id')
|
||||||
|
|
||||||
|
codename = org_role_to_permission[role_field]
|
||||||
|
|
||||||
|
return cls.access_ids_qs(accessor, codename, content_types=content_types)
|
||||||
|
return cls.access_ids_qs(accessor, to_permissions[role_field], content_types=content_types)
|
||||||
if accessor._meta.model_name == 'user':
|
if accessor._meta.model_name == 'user':
|
||||||
ancestor_roles = accessor.roles.all()
|
ancestor_roles = accessor.roles.all()
|
||||||
elif type(accessor) == Role:
|
elif type(accessor) == Role:
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
|
|||||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||||
from awx.main.notifications.irc_backend import IrcBackend
|
from awx.main.notifications.irc_backend import IrcBackend
|
||||||
|
from awx.main.notifications.awssns_backend import AWSSNSBackend
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.models.notifications')
|
logger = logging.getLogger('awx.main.models.notifications')
|
||||||
@@ -40,6 +41,7 @@ __all__ = ['NotificationTemplate', 'Notification']
|
|||||||
|
|
||||||
class NotificationTemplate(CommonModelNameNotUnique):
|
class NotificationTemplate(CommonModelNameNotUnique):
|
||||||
NOTIFICATION_TYPES = [
|
NOTIFICATION_TYPES = [
|
||||||
|
('awssns', _('AWS SNS'), AWSSNSBackend),
|
||||||
('email', _('Email'), CustomEmailBackend),
|
('email', _('Email'), CustomEmailBackend),
|
||||||
('slack', _('Slack'), SlackBackend),
|
('slack', _('Slack'), SlackBackend),
|
||||||
('twilio', _('Twilio'), TwilioBackend),
|
('twilio', _('Twilio'), TwilioBackend),
|
||||||
@@ -394,11 +396,11 @@ class JobNotificationMixin(object):
|
|||||||
'verbosity': 0,
|
'verbosity': 0,
|
||||||
},
|
},
|
||||||
'job_friendly_name': 'Job',
|
'job_friendly_name': 'Job',
|
||||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
'url': 'https://platformhost/#/jobs/playbook/1010',
|
||||||
'approval_status': 'approved',
|
'approval_status': 'approved',
|
||||||
'approval_node_name': 'Approve Me',
|
'approval_node_name': 'Approve Me',
|
||||||
'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
|
'workflow_url': 'https://platformhost/#/jobs/workflow/1010',
|
||||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
'job_metadata': """{'url': 'https://platformhost/$/jobs/playbook/13',
|
||||||
'traceback': '',
|
'traceback': '',
|
||||||
'status': 'running',
|
'status': 'running',
|
||||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||||
@@ -498,7 +500,7 @@ class JobNotificationMixin(object):
|
|||||||
# Body should have at least 2 CRLF, some clients will interpret
|
# Body should have at least 2 CRLF, some clients will interpret
|
||||||
# the email incorrectly with blank body. So we will check that
|
# the email incorrectly with blank body. So we will check that
|
||||||
|
|
||||||
if len(body.strip().splitlines()) <= 2:
|
if len(body.strip().splitlines()) < 1:
|
||||||
# blank body
|
# blank body
|
||||||
body = '\r\n'.join(
|
body = '\r\n'.join(
|
||||||
[
|
[
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ from django.contrib.sessions.models import Session
|
|||||||
from django.utils.timezone import now as tz_now
|
from django.utils.timezone import now as tz_now
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
@@ -33,6 +35,12 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
|||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
permissions = [
|
||||||
|
('member_organization', 'Basic participation permissions for organization'),
|
||||||
|
('audit_organization', 'Audit everything inside the organization'),
|
||||||
|
]
|
||||||
|
# Remove add permission, only superuser can add
|
||||||
|
default_permissions = ('change', 'delete', 'view')
|
||||||
|
|
||||||
instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership')
|
instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership')
|
||||||
galaxy_credentials = OrderedManyToManyField(
|
galaxy_credentials = OrderedManyToManyField(
|
||||||
@@ -103,6 +111,7 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
|||||||
approval_role = ImplicitRoleField(
|
approval_role = ImplicitRoleField(
|
||||||
parent_role='admin_role',
|
parent_role='admin_role',
|
||||||
)
|
)
|
||||||
|
resource = AnsibleResourceField(primary_key_field="id")
|
||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
@@ -134,6 +143,7 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
|||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
unique_together = [('organization', 'name')]
|
unique_together = [('organization', 'name')]
|
||||||
ordering = ('organization__name', 'name')
|
ordering = ('organization__name', 'name')
|
||||||
|
permissions = [('member_team', 'Inherit all roles assigned to this team')]
|
||||||
|
|
||||||
organization = models.ForeignKey(
|
organization = models.ForeignKey(
|
||||||
'Organization',
|
'Organization',
|
||||||
@@ -151,6 +161,7 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
|||||||
read_role = ImplicitRoleField(
|
read_role = ImplicitRoleField(
|
||||||
parent_role=['organization.auditor_role', 'member_role'],
|
parent_role=['organization.auditor_role', 'member_role'],
|
||||||
)
|
)
|
||||||
|
resource = AnsibleResourceField(primary_key_field="id")
|
||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|||||||
@@ -259,6 +259,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
|||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('id',)
|
ordering = ('id',)
|
||||||
|
permissions = [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')]
|
||||||
|
|
||||||
default_environment = models.ForeignKey(
|
default_environment = models.ForeignKey(
|
||||||
'ExecutionEnvironment',
|
'ExecutionEnvironment',
|
||||||
|
|||||||
@@ -7,14 +7,30 @@ import threading
|
|||||||
import contextlib
|
import contextlib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
# django-rest-framework
|
||||||
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
|
# crum to impersonate users
|
||||||
|
from crum import impersonate
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.db import models, transaction, connection
|
from django.db import models, transaction, connection
|
||||||
|
from django.db.models.signals import m2m_changed
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.apps import apps
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
# Ansible_base app
|
||||||
|
from ansible_base.rbac.models import RoleDefinition
|
||||||
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
|
from awx.main.migrations._dab_rbac import build_role_map, get_permissions_for_role
|
||||||
|
from awx.main.constants import role_name_to_perm_mapping, org_role_to_permission
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'Role',
|
'Role',
|
||||||
@@ -75,6 +91,11 @@ role_descriptions = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
to_permissions = {}
|
||||||
|
for k, v in role_name_to_perm_mapping.items():
|
||||||
|
to_permissions[k] = v[0].strip('_')
|
||||||
|
|
||||||
|
|
||||||
tls = threading.local() # thread local storage
|
tls = threading.local() # thread local storage
|
||||||
|
|
||||||
|
|
||||||
@@ -86,10 +107,8 @@ def check_singleton(func):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
sys_admin = Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR)
|
|
||||||
sys_audit = Role.singleton(ROLE_SINGLETON_SYSTEM_AUDITOR)
|
|
||||||
user = args[0]
|
user = args[0]
|
||||||
if user in sys_admin or user in sys_audit:
|
if user.is_superuser or user.is_system_auditor:
|
||||||
if len(args) == 2:
|
if len(args) == 2:
|
||||||
return args[1]
|
return args[1]
|
||||||
return Role.objects.all()
|
return Role.objects.all()
|
||||||
@@ -169,6 +188,24 @@ class Role(models.Model):
|
|||||||
|
|
||||||
def __contains__(self, accessor):
|
def __contains__(self, accessor):
|
||||||
if accessor._meta.model_name == 'user':
|
if accessor._meta.model_name == 'user':
|
||||||
|
if accessor.is_superuser:
|
||||||
|
return True
|
||||||
|
if self.role_field == 'system_administrator':
|
||||||
|
return accessor.is_superuser
|
||||||
|
elif self.role_field == 'system_auditor':
|
||||||
|
return accessor.is_system_auditor
|
||||||
|
elif self.role_field in ('read_role', 'auditor_role') and accessor.is_system_auditor:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
if self.content_object and self.content_object._meta.model_name == 'organization' and self.role_field in org_role_to_permission:
|
||||||
|
codename = org_role_to_permission[self.role_field]
|
||||||
|
|
||||||
|
return accessor.has_obj_perm(self.content_object, codename)
|
||||||
|
|
||||||
|
if self.role_field not in to_permissions:
|
||||||
|
raise Exception(f'{self.role_field} evaluated but not a translatable permission')
|
||||||
|
return accessor.has_obj_perm(self.content_object, to_permissions[self.role_field])
|
||||||
return self.ancestors.filter(members=accessor).exists()
|
return self.ancestors.filter(members=accessor).exists()
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f'Role evaluations only valid for users, received {accessor}')
|
raise RuntimeError(f'Role evaluations only valid for users, received {accessor}')
|
||||||
@@ -280,6 +317,9 @@ class Role(models.Model):
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return
|
||||||
|
|
||||||
if len(additions) == 0 and len(removals) == 0:
|
if len(additions) == 0 and len(removals) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -412,6 +452,12 @@ class Role(models.Model):
|
|||||||
in their organization, but some of those roles descend from
|
in their organization, but some of those roles descend from
|
||||||
organization admin_role, but not auditor_role.
|
organization admin_role, but not auditor_role.
|
||||||
"""
|
"""
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
|
|
||||||
|
q = RoleEvaluation.objects.filter(role__in=user.has_roles.all()).values_list('object_id', 'content_type_id').query
|
||||||
|
return roles_qs.extra(where=[f'(object_id,content_type_id) in ({q})'])
|
||||||
|
|
||||||
return roles_qs.filter(
|
return roles_qs.filter(
|
||||||
id__in=RoleAncestorEntry.objects.filter(
|
id__in=RoleAncestorEntry.objects.filter(
|
||||||
descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list(
|
descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list(
|
||||||
@@ -434,6 +480,13 @@ class Role(models.Model):
|
|||||||
return self.singleton_name in [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]
|
return self.singleton_name in [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]
|
||||||
|
|
||||||
|
|
||||||
|
class AncestorManager(models.Manager):
|
||||||
|
def get_queryset(self):
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
raise RuntimeError('The old RBAC system has been disabled, this should never be called')
|
||||||
|
return super(AncestorManager, self).get_queryset()
|
||||||
|
|
||||||
|
|
||||||
class RoleAncestorEntry(models.Model):
|
class RoleAncestorEntry(models.Model):
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
@@ -451,6 +504,8 @@ class RoleAncestorEntry(models.Model):
|
|||||||
content_type_id = models.PositiveIntegerField(null=False)
|
content_type_id = models.PositiveIntegerField(null=False)
|
||||||
object_id = models.PositiveIntegerField(null=False)
|
object_id = models.PositiveIntegerField(null=False)
|
||||||
|
|
||||||
|
objects = AncestorManager()
|
||||||
|
|
||||||
|
|
||||||
def role_summary_fields_generator(content_object, role_field):
|
def role_summary_fields_generator(content_object, role_field):
|
||||||
global role_descriptions
|
global role_descriptions
|
||||||
@@ -479,3 +534,185 @@ def role_summary_fields_generator(content_object, role_field):
|
|||||||
summary['name'] = role_names[role_field]
|
summary['name'] = role_names[role_field]
|
||||||
summary['id'] = getattr(content_object, '{}_id'.format(role_field))
|
summary['id'] = getattr(content_object, '{}_id'.format(role_field))
|
||||||
return summary
|
return summary
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------- Custom Role Compatibility -------------------------
|
||||||
|
# The following are methods to connect this (old) RBAC system to the new
|
||||||
|
# system which allows custom roles
|
||||||
|
# this follows the ORM interface layer documented in docs/rbac.md
|
||||||
|
def get_role_codenames(role):
|
||||||
|
obj = role.content_object
|
||||||
|
if obj is None:
|
||||||
|
return
|
||||||
|
f = obj._meta.get_field(role.role_field)
|
||||||
|
parents, children = build_role_map(apps)
|
||||||
|
return [perm.codename for perm in get_permissions_for_role(f, children, apps)]
|
||||||
|
|
||||||
|
|
||||||
|
def get_role_definition(role):
|
||||||
|
"""Given a old-style role, this gives a role definition in the new RBAC system for it"""
|
||||||
|
obj = role.content_object
|
||||||
|
if obj is None:
|
||||||
|
return
|
||||||
|
f = obj._meta.get_field(role.role_field)
|
||||||
|
action_name = f.name.rsplit("_", 1)[0]
|
||||||
|
model_print = type(obj).__name__
|
||||||
|
rd_name = f'{model_print} {action_name.title()} Compat'
|
||||||
|
perm_list = get_role_codenames(role)
|
||||||
|
defaults = {
|
||||||
|
'content_type_id': role.content_type_id,
|
||||||
|
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
|
||||||
|
}
|
||||||
|
with impersonate(None):
|
||||||
|
try:
|
||||||
|
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||||
|
except ValidationError:
|
||||||
|
# This is a tricky case - practically speaking, users should not be allowed to create team roles
|
||||||
|
# or roles that include the team member permission.
|
||||||
|
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
|
||||||
|
defaults['managed'] = True
|
||||||
|
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
|
def get_role_from_object_role(object_role):
|
||||||
|
"""
|
||||||
|
Given an object role from the new system, return the corresponding role from the old system
|
||||||
|
reverses naming from get_role_definition, and the ANSIBLE_BASE_ROLE_PRECREATE setting.
|
||||||
|
"""
|
||||||
|
rd = object_role.role_definition
|
||||||
|
if rd.name.endswith(' Compat'):
|
||||||
|
model_name, role_name, _ = rd.name.split()
|
||||||
|
role_name = role_name.lower()
|
||||||
|
role_name += '_role'
|
||||||
|
elif rd.name.endswith(' Admin') and rd.name.count(' ') == 2:
|
||||||
|
# cases like "Organization Project Admin"
|
||||||
|
model_name, target_model_name, role_name = rd.name.split()
|
||||||
|
role_name = role_name.lower()
|
||||||
|
model_cls = apps.get_model('main', target_model_name)
|
||||||
|
target_model_name = get_type_for_model(model_cls)
|
||||||
|
|
||||||
|
# exception cases completely specific to one model naming convention
|
||||||
|
if target_model_name == 'notification_template':
|
||||||
|
target_model_name = 'notification'
|
||||||
|
elif target_model_name == 'workflow_job_template':
|
||||||
|
target_model_name = 'workflow'
|
||||||
|
|
||||||
|
role_name = f'{target_model_name}_admin_role'
|
||||||
|
elif rd.name.endswith(' Admin'):
|
||||||
|
# cases like "project-admin"
|
||||||
|
role_name = 'admin_role'
|
||||||
|
elif rd.name == 'Organization Audit':
|
||||||
|
role_name = 'auditor_role'
|
||||||
|
else:
|
||||||
|
model_name, role_name = rd.name.split()
|
||||||
|
role_name = role_name.lower()
|
||||||
|
role_name += '_role'
|
||||||
|
return getattr(object_role.content_object, role_name)
|
||||||
|
|
||||||
|
|
||||||
|
def give_or_remove_permission(role, actor, giving=True):
|
||||||
|
obj = role.content_object
|
||||||
|
if obj is None:
|
||||||
|
return
|
||||||
|
rd = get_role_definition(role)
|
||||||
|
rd.give_or_remove_permission(actor, obj, giving=giving)
|
||||||
|
|
||||||
|
|
||||||
|
class SyncEnabled(threading.local):
|
||||||
|
def __init__(self):
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
|
||||||
|
rbac_sync_enabled = SyncEnabled()
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def disable_rbac_sync():
|
||||||
|
try:
|
||||||
|
previous_value = rbac_sync_enabled.enabled
|
||||||
|
rbac_sync_enabled.enabled = False
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
rbac_sync_enabled.enabled = previous_value
|
||||||
|
|
||||||
|
|
||||||
|
def give_creator_permissions(user, obj):
|
||||||
|
assignment = RoleDefinition.objects.give_creator_permissions(user, obj)
|
||||||
|
if assignment:
|
||||||
|
with disable_rbac_sync():
|
||||||
|
old_role = get_role_from_object_role(assignment.object_role)
|
||||||
|
old_role.members.add(user)
|
||||||
|
|
||||||
|
|
||||||
|
def sync_members_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
||||||
|
if action.startswith('pre_'):
|
||||||
|
return
|
||||||
|
if not rbac_sync_enabled.enabled:
|
||||||
|
return
|
||||||
|
|
||||||
|
if action == 'post_add':
|
||||||
|
is_giving = True
|
||||||
|
elif action == 'post_remove':
|
||||||
|
is_giving = False
|
||||||
|
elif action == 'post_clear':
|
||||||
|
raise RuntimeError('Clearing of role members not supported')
|
||||||
|
|
||||||
|
if reverse:
|
||||||
|
user = instance
|
||||||
|
else:
|
||||||
|
role = instance
|
||||||
|
|
||||||
|
for user_or_role_id in pk_set:
|
||||||
|
if reverse:
|
||||||
|
role = Role.objects.get(pk=user_or_role_id)
|
||||||
|
else:
|
||||||
|
user = get_user_model().objects.get(pk=user_or_role_id)
|
||||||
|
give_or_remove_permission(role, user, giving=is_giving)
|
||||||
|
|
||||||
|
|
||||||
|
def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
||||||
|
if action.startswith('pre_'):
|
||||||
|
return
|
||||||
|
|
||||||
|
if action == 'post_add':
|
||||||
|
is_giving = True
|
||||||
|
elif action == 'post_remove':
|
||||||
|
is_giving = False
|
||||||
|
elif action == 'post_clear':
|
||||||
|
raise RuntimeError('Clearing of role members not supported')
|
||||||
|
|
||||||
|
if reverse:
|
||||||
|
parent_role = instance
|
||||||
|
else:
|
||||||
|
child_role = instance
|
||||||
|
|
||||||
|
for role_id in pk_set:
|
||||||
|
if reverse:
|
||||||
|
try:
|
||||||
|
child_role = Role.objects.get(id=role_id)
|
||||||
|
except Role.DoesNotExist:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
parent_role = Role.objects.get(id=role_id)
|
||||||
|
except Role.DoesNotExist:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# To a fault, we want to avoid running this if triggered from implicit_parents management
|
||||||
|
# we only want to do anything if we know for sure this is a non-implicit team role
|
||||||
|
if parent_role.role_field == 'member_role' and parent_role.content_type.model == 'team':
|
||||||
|
# Team internal parents are member_role->read_role and admin_role->member_role
|
||||||
|
# for the same object, this parenting will also be implicit_parents management
|
||||||
|
# do nothing for internal parents, but OTHER teams may still be assigned permissions to a team
|
||||||
|
if (child_role.content_type_id == parent_role.content_type_id) and (child_role.object_id == parent_role.object_id):
|
||||||
|
return
|
||||||
|
|
||||||
|
from awx.main.models.organization import Team
|
||||||
|
|
||||||
|
team = Team.objects.get(pk=parent_role.object_id)
|
||||||
|
give_or_remove_permission(child_role, team, giving=is_giving)
|
||||||
|
|
||||||
|
|
||||||
|
m2m_changed.connect(sync_members_to_new_rbac, Role.members.through)
|
||||||
|
m2m_changed.connect(sync_parents_to_new_rbac, Role.parents.through)
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
|||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models, connection
|
from django.db import models, connection, transaction
|
||||||
from django.core.exceptions import NON_FIELD_ERRORS
|
from django.core.exceptions import NON_FIELD_ERRORS
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
@@ -31,13 +31,15 @@ from rest_framework.exceptions import ParseError
|
|||||||
from polymorphic.models import PolymorphicModel
|
from polymorphic.models import PolymorphicModel
|
||||||
|
|
||||||
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
||||||
from awx.main.dispatch import get_task_queuename
|
from awx.main.dispatch import get_task_queuename
|
||||||
from awx.main.dispatch.control import Control as ControlDispatcher
|
from awx.main.dispatch.control import Control as ControlDispatcher
|
||||||
from awx.main.registrar import activity_stream_registrar
|
from awx.main.registrar import activity_stream_registrar
|
||||||
from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
from awx.main.models.mixins import TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
||||||
|
from awx.main.models.rbac import to_permissions
|
||||||
from awx.main.utils.common import (
|
from awx.main.utils.common import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
get_model_for_type,
|
get_model_for_type,
|
||||||
@@ -196,9 +198,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _submodels_with_roles(cls):
|
def _submodels_with_roles(cls):
|
||||||
ujt_classes = [c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
|
return [c for c in cls.__subclasses__() if permission_registry.is_registered(c)]
|
||||||
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
|
|
||||||
return [ct.id for ct in ct_dict.values()]
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def accessible_pk_qs(cls, accessor, role_field):
|
def accessible_pk_qs(cls, accessor, role_field):
|
||||||
@@ -210,7 +210,23 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
|||||||
# do not use this if in a subclass
|
# do not use this if in a subclass
|
||||||
if cls != UnifiedJobTemplate:
|
if cls != UnifiedJobTemplate:
|
||||||
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
|
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
|
||||||
return ResourceMixin._accessible_pk_qs(cls, accessor, role_field, content_types=cls._submodels_with_roles())
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
|
|
||||||
|
action = to_permissions[role_field]
|
||||||
|
|
||||||
|
# Special condition for super auditor
|
||||||
|
role_subclasses = cls._submodels_with_roles()
|
||||||
|
role_cts = ContentType.objects.get_for_models(*role_subclasses).values()
|
||||||
|
all_codenames = {f'{action}_{cls._meta.model_name}' for cls in role_subclasses}
|
||||||
|
if not (all_codenames - accessor.singleton_permissions()):
|
||||||
|
qs = cls.objects.filter(polymorphic_ctype__in=role_cts)
|
||||||
|
return qs.values_list('id', flat=True)
|
||||||
|
|
||||||
|
return (
|
||||||
|
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__in=all_codenames, content_type_id__in=[ct.id for ct in role_cts])
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
def _perform_unique_checks(self, unique_checks):
|
def _perform_unique_checks(self, unique_checks):
|
||||||
# Handle the list of unique fields returned above. Replace with an
|
# Handle the list of unique fields returned above. Replace with an
|
||||||
@@ -264,7 +280,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
|||||||
if new_next_schedule:
|
if new_next_schedule:
|
||||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||||
return # no-op, common for infrequent schedules
|
return # no-op, common for infrequent schedules
|
||||||
self.next_schedule = new_next_schedule
|
|
||||||
|
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||||
|
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||||
|
if transaction.get_autocommit():
|
||||||
|
self.next_schedule = related_schedules.first()
|
||||||
|
else:
|
||||||
|
self.next_schedule = related_schedules.select_for_update().first()
|
||||||
|
|
||||||
self.next_job_run = new_next_schedule.next_run
|
self.next_job_run = new_next_schedule.next_run
|
||||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||||
|
|
||||||
@@ -814,7 +837,7 @@ class UnifiedJob(
|
|||||||
update_fields.append(key)
|
update_fields.append(key)
|
||||||
|
|
||||||
if parent_instance:
|
if parent_instance:
|
||||||
if self.status in ('pending', 'waiting', 'running'):
|
if self.status in ('pending', 'running'):
|
||||||
if parent_instance.current_job != self:
|
if parent_instance.current_job != self:
|
||||||
parent_instance_set('current_job', self)
|
parent_instance_set('current_job', self)
|
||||||
# Update parent with all the 'good' states of it's child
|
# Update parent with all the 'good' states of it's child
|
||||||
@@ -851,7 +874,7 @@ class UnifiedJob(
|
|||||||
# If this job already exists in the database, retrieve a copy of
|
# If this job already exists in the database, retrieve a copy of
|
||||||
# the job in its prior state.
|
# the job in its prior state.
|
||||||
# If update_fields are given without status, then that indicates no change
|
# If update_fields are given without status, then that indicates no change
|
||||||
if self.pk and ((not update_fields) or ('status' in update_fields)):
|
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||||
self_before = self.__class__.objects.get(pk=self.pk)
|
self_before = self.__class__.objects.get(pk=self.pk)
|
||||||
if self_before.status != self.status:
|
if self_before.status != self.status:
|
||||||
status_before = self_before.status
|
status_before = self_before.status
|
||||||
@@ -893,7 +916,8 @@ class UnifiedJob(
|
|||||||
update_fields.append('elapsed')
|
update_fields.append('elapsed')
|
||||||
|
|
||||||
# Ensure that the job template information is current.
|
# Ensure that the job template information is current.
|
||||||
if self.unified_job_template != self._get_parent_instance():
|
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||||
|
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
|
||||||
self.unified_job_template = self._get_parent_instance()
|
self.unified_job_template = self._get_parent_instance()
|
||||||
if 'unified_job_template' not in update_fields:
|
if 'unified_job_template' not in update_fields:
|
||||||
update_fields.append('unified_job_template')
|
update_fields.append('unified_job_template')
|
||||||
@@ -906,8 +930,9 @@ class UnifiedJob(
|
|||||||
# Okay; we're done. Perform the actual save.
|
# Okay; we're done. Perform the actual save.
|
||||||
result = super(UnifiedJob, self).save(*args, **kwargs)
|
result = super(UnifiedJob, self).save(*args, **kwargs)
|
||||||
|
|
||||||
# If status changed, update the parent instance.
|
# If status changed, update the parent instance
|
||||||
if self.status != status_before:
|
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||||
|
if self.status != status_before and self.status != 'waiting':
|
||||||
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
||||||
# This dodges lock contention at the expense of the foreign key not being
|
# This dodges lock contention at the expense of the foreign key not being
|
||||||
# completely correct.
|
# completely correct.
|
||||||
@@ -1599,7 +1624,8 @@ class UnifiedJob(
|
|||||||
extra["controller_node"] = self.controller_node or "NOT_SET"
|
extra["controller_node"] = self.controller_node or "NOT_SET"
|
||||||
elif state == "execution_node_chosen":
|
elif state == "execution_node_chosen":
|
||||||
extra["execution_node"] = self.execution_node or "NOT_SET"
|
extra["execution_node"] = self.execution_node or "NOT_SET"
|
||||||
logger_job_lifecycle.info(msg, extra=extra)
|
|
||||||
|
logger_job_lifecycle.info(f"{msg} {json.dumps(extra)}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def launched_by(self):
|
def launched_by(self):
|
||||||
|
|||||||
@@ -467,6 +467,10 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
|
permissions = [
|
||||||
|
('execute_workflowjobtemplate', 'Can run this workflow job template'),
|
||||||
|
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
|
||||||
|
]
|
||||||
|
|
||||||
notification_templates_approvals = models.ManyToManyField(
|
notification_templates_approvals = models.ManyToManyField(
|
||||||
"NotificationTemplate",
|
"NotificationTemplate",
|
||||||
|
|||||||
70
awx/main/notifications/awssns_backend.py
Normal file
70
awx/main/notifications/awssns_backend.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# Copyright (c) 2016 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
|
logger = logging.getLogger('awx.main.notifications.awssns_backend')
|
||||||
|
WEBSOCKET_TIMEOUT = 30
|
||||||
|
|
||||||
|
|
||||||
|
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
init_parameters = {
|
||||||
|
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
|
||||||
|
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
|
||||||
|
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
|
||||||
|
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
|
||||||
|
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
|
||||||
|
}
|
||||||
|
recipient_parameter = "sns_topic_arn"
|
||||||
|
sender_parameter = None
|
||||||
|
|
||||||
|
DEFAULT_BODY = "{{ job_metadata }}"
|
||||||
|
default_messages = CustomNotificationBase.job_metadata_messages
|
||||||
|
|
||||||
|
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
|
||||||
|
session = boto3.session.Session()
|
||||||
|
client_config = {"service_name": 'sns'}
|
||||||
|
if aws_region:
|
||||||
|
client_config["region_name"] = aws_region
|
||||||
|
if aws_secret_access_key:
|
||||||
|
client_config["aws_secret_access_key"] = aws_secret_access_key
|
||||||
|
if aws_access_key_id:
|
||||||
|
client_config["aws_access_key_id"] = aws_access_key_id
|
||||||
|
if aws_session_token:
|
||||||
|
client_config["aws_session_token"] = aws_session_token
|
||||||
|
self.client = session.client(**client_config)
|
||||||
|
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
|
||||||
|
|
||||||
|
def _sns_publish(self, topic_arn, message):
|
||||||
|
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
|
||||||
|
|
||||||
|
def format_body(self, body):
|
||||||
|
if isinstance(body, str):
|
||||||
|
try:
|
||||||
|
body = json.loads(body)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if isinstance(body, dict):
|
||||||
|
body = json.dumps(body)
|
||||||
|
# convert dict body to json string
|
||||||
|
return body
|
||||||
|
|
||||||
|
def send_messages(self, messages):
|
||||||
|
sent_messages = 0
|
||||||
|
for message in messages:
|
||||||
|
sns_topic_arn = str(message.recipients()[0])
|
||||||
|
try:
|
||||||
|
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
|
||||||
|
sent_messages += 1
|
||||||
|
except ClientError as error:
|
||||||
|
if not self.fail_silently:
|
||||||
|
raise error
|
||||||
|
|
||||||
|
return sent_messages
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
# Copyright (c) 2019 Ansible, Inc.
|
# Copyright (c) 2019 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
# -*-coding:utf-8-*-
|
||||||
|
|
||||||
|
|
||||||
class CustomNotificationBase(object):
|
class CustomNotificationBase(object):
|
||||||
@@ -31,3 +32,15 @@ class CustomNotificationBase(object):
|
|||||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
job_metadata_messages = {
|
||||||
|
"started": {"body": "{{ job_metadata }}"},
|
||||||
|
"success": {"body": "{{ job_metadata }}"},
|
||||||
|
"error": {"body": "{{ job_metadata }}"},
|
||||||
|
"workflow_approval": {
|
||||||
|
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||||
|
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||||
|
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||||
|
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|||||||
@@ -27,17 +27,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_BODY = "{{ job_metadata }}"
|
DEFAULT_BODY = "{{ job_metadata }}"
|
||||||
default_messages = {
|
default_messages = CustomNotificationBase.job_metadata_messages
|
||||||
"started": {"body": DEFAULT_BODY},
|
|
||||||
"success": {"body": DEFAULT_BODY},
|
|
||||||
"error": {"body": DEFAULT_BODY},
|
|
||||||
"workflow_approval": {
|
|
||||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
|
||||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
|
||||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
|
||||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||||
self.http_method = http_method
|
self.http_method = http_method
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ from . import consumers
|
|||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.routing')
|
logger = logging.getLogger('awx.main.routing')
|
||||||
|
_application = None
|
||||||
|
|
||||||
|
|
||||||
class AWXProtocolTypeRouter(ProtocolTypeRouter):
|
class AWXProtocolTypeRouter(ProtocolTypeRouter):
|
||||||
@@ -62,15 +63,60 @@ websocket_urlpatterns = [
|
|||||||
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
||||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||||
|
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
|
||||||
|
|
||||||
websocket_relay_urlpatterns = [
|
websocket_relay_urlpatterns = [
|
||||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||||
]
|
]
|
||||||
|
|
||||||
application = AWXProtocolTypeRouter(
|
|
||||||
{
|
def application_func(cls=AWXProtocolTypeRouter) -> ProtocolTypeRouter:
|
||||||
'websocket': MultipleURLRouterAdapter(
|
return cls(
|
||||||
URLRouter(websocket_relay_urlpatterns),
|
{
|
||||||
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
'websocket': MultipleURLRouterAdapter(
|
||||||
)
|
URLRouter(websocket_relay_urlpatterns),
|
||||||
}
|
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||||
)
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> ProtocolTypeRouter:
|
||||||
|
"""
|
||||||
|
Defer instantiating application.
|
||||||
|
For testing, we just need it to NOT run on import.
|
||||||
|
|
||||||
|
https://peps.python.org/pep-0562/#specification
|
||||||
|
|
||||||
|
Normally, someone would get application from this module via:
|
||||||
|
from awx.main.routing import application
|
||||||
|
|
||||||
|
and do something with the application:
|
||||||
|
application.do_something()
|
||||||
|
|
||||||
|
What does the callstack look like when the import runs?
|
||||||
|
...
|
||||||
|
awx.main.routing.__getattribute__(...) # <-- we don't define this so NOOP as far as we are concerned
|
||||||
|
if '__getattr__' in awx.main.routing.__dict__: # <-- this triggers the function we are in
|
||||||
|
return awx.main.routing.__dict__.__getattr__("application")
|
||||||
|
|
||||||
|
Why isn't this function simply implemented as:
|
||||||
|
def __getattr__(name):
|
||||||
|
if not _application:
|
||||||
|
_application = application_func()
|
||||||
|
return _application
|
||||||
|
|
||||||
|
It could. I manually tested it and it passes test_routing.py.
|
||||||
|
|
||||||
|
But my understanding after reading the PEP-0562 specification link above is that
|
||||||
|
performance would be a bit worse due to the extra __getattribute__ calls when
|
||||||
|
we reference non-global variables.
|
||||||
|
"""
|
||||||
|
if name == "application":
|
||||||
|
globs = globals()
|
||||||
|
if not globs['_application']:
|
||||||
|
globs['_application'] = application_func()
|
||||||
|
return globs['_application']
|
||||||
|
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||||
|
|||||||
@@ -138,7 +138,8 @@ class TaskBase:
|
|||||||
|
|
||||||
# Lock
|
# Lock
|
||||||
with task_manager_bulk_reschedule():
|
with task_manager_bulk_reschedule():
|
||||||
with advisory_lock(f"{self.prefix}_lock", wait=False) as acquired:
|
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000 # convert to milliseconds
|
||||||
|
with advisory_lock(f"{self.prefix}_lock", lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
if acquired is False:
|
if acquired is False:
|
||||||
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")
|
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")
|
||||||
|
|||||||
@@ -126,6 +126,8 @@ def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwarg
|
|||||||
|
|
||||||
def sync_superuser_status_to_rbac(instance, **kwargs):
|
def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||||
'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role'
|
'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role'
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return
|
||||||
update_fields = kwargs.get('update_fields', None)
|
update_fields = kwargs.get('update_fields', None)
|
||||||
if update_fields and 'is_superuser' not in update_fields:
|
if update_fields and 'is_superuser' not in update_fields:
|
||||||
return
|
return
|
||||||
@@ -137,6 +139,8 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
|
|||||||
|
|
||||||
def sync_rbac_to_superuser_status(instance, sender, **kwargs):
|
def sync_rbac_to_superuser_status(instance, sender, **kwargs):
|
||||||
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
|
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return
|
||||||
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
|
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
|
||||||
new_status_value = bool(kwargs['action'] == 'post_add')
|
new_status_value = bool(kwargs['action'] == 'post_add')
|
||||||
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()
|
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()
|
||||||
|
|||||||
@@ -49,6 +49,70 @@ class ReceptorConnectionType(Enum):
|
|||||||
STREAMTLS = 2
|
STREAMTLS = 2
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Translate receptorctl messages that come in over stdout into
|
||||||
|
structured messages. Currently, these are error messages.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ReceptorErrorBase:
|
||||||
|
_MESSAGE = 'Receptor Error'
|
||||||
|
|
||||||
|
def __init__(self, node: str = 'N/A', state_name: str = 'N/A'):
|
||||||
|
self.node = node
|
||||||
|
self.state_name = state_name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.__class__.__name__} '{self._MESSAGE}' on node '{self.node}' with state '{self.state_name}'"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkUnitError(ReceptorErrorBase):
|
||||||
|
_MESSAGE = 'unknown work unit '
|
||||||
|
|
||||||
|
def __init__(self, work_unit_id: str, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.work_unit_id = work_unit_id
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{super().__str__()} work unit id '{self.work_unit_id}'"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkUnitCancelError(WorkUnitError):
|
||||||
|
_MESSAGE = 'error cancelling remote unit: unknown work unit '
|
||||||
|
|
||||||
|
|
||||||
|
class WorkUnitResultsError(WorkUnitError):
|
||||||
|
_MESSAGE = 'Failed to get results: unknown work unit '
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownError(ReceptorErrorBase):
|
||||||
|
_MESSAGE = 'Unknown receptor ctl error'
|
||||||
|
|
||||||
|
def __init__(self, msg, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._MESSAGE = msg
|
||||||
|
|
||||||
|
|
||||||
|
class FuzzyError:
|
||||||
|
def __new__(self, e: RuntimeError, node: str, state_name: str):
|
||||||
|
"""
|
||||||
|
At the time of writing this comment all of the sub-classes detection
|
||||||
|
is centralized in this parent class. It's like a Router().
|
||||||
|
Someone may find it better to push down the error detection logic into
|
||||||
|
each sub-class.
|
||||||
|
"""
|
||||||
|
msg = e.args[0]
|
||||||
|
|
||||||
|
common_startswith = (WorkUnitCancelError, WorkUnitResultsError, WorkUnitError)
|
||||||
|
|
||||||
|
for klass in common_startswith:
|
||||||
|
if msg.startswith(klass._MESSAGE):
|
||||||
|
work_unit_id = msg[len(klass._MESSAGE) :]
|
||||||
|
return klass(work_unit_id, node=node, state_name=state_name)
|
||||||
|
|
||||||
|
return UnknownError(msg, node=node, state_name=state_name)
|
||||||
|
|
||||||
|
|
||||||
def read_receptor_config():
|
def read_receptor_config():
|
||||||
# for K8S deployments, getting a lock is necessary as another process
|
# for K8S deployments, getting a lock is necessary as another process
|
||||||
# may be re-writing the config at this time
|
# may be re-writing the config at this time
|
||||||
@@ -185,6 +249,7 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
|||||||
timing_data['transmit_timing'] = run_start - transmit_start
|
timing_data['transmit_timing'] = run_start - transmit_start
|
||||||
run_timing = 0.0
|
run_timing = 0.0
|
||||||
stdout = ''
|
stdout = ''
|
||||||
|
state_name = 'local var never set'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resultfile = receptor_ctl.get_work_results(unit_id)
|
resultfile = receptor_ctl.get_work_results(unit_id)
|
||||||
@@ -205,13 +270,33 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
|||||||
stdout = resultfile.read()
|
stdout = resultfile.read()
|
||||||
stdout = str(stdout, encoding='utf-8')
|
stdout = str(stdout, encoding='utf-8')
|
||||||
|
|
||||||
|
except RuntimeError as e:
|
||||||
|
receptor_e = FuzzyError(e, node, state_name)
|
||||||
|
if type(receptor_e) in (
|
||||||
|
WorkUnitError,
|
||||||
|
WorkUnitResultsError,
|
||||||
|
):
|
||||||
|
logger.warning(f'While consuming job results: {receptor_e}')
|
||||||
|
else:
|
||||||
|
raise
|
||||||
finally:
|
finally:
|
||||||
if settings.RECEPTOR_RELEASE_WORK:
|
if settings.RECEPTOR_RELEASE_WORK:
|
||||||
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
try:
|
||||||
if res != {'released': unit_id}:
|
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
||||||
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
|
||||||
|
|
||||||
receptor_ctl.close()
|
if res != {'released': unit_id}:
|
||||||
|
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
||||||
|
|
||||||
|
receptor_ctl.close()
|
||||||
|
except RuntimeError as e:
|
||||||
|
receptor_e = FuzzyError(e, node, state_name)
|
||||||
|
if type(receptor_e) in (
|
||||||
|
WorkUnitError,
|
||||||
|
WorkUnitCancelError,
|
||||||
|
):
|
||||||
|
logger.warning(f"While releasing work: {receptor_e}")
|
||||||
|
else:
|
||||||
|
logger.error(f"While releasing work: {receptor_e}")
|
||||||
|
|
||||||
if state_name.lower() == 'failed':
|
if state_name.lower() == 'failed':
|
||||||
work_detail = status.get('Detail', '')
|
work_detail = status.get('Detail', '')
|
||||||
@@ -275,7 +360,7 @@ def _convert_args_to_cli(vargs):
|
|||||||
args = ['cleanup']
|
args = ['cleanup']
|
||||||
for option in ('exclude_strings', 'remove_images'):
|
for option in ('exclude_strings', 'remove_images'):
|
||||||
if vargs.get(option):
|
if vargs.get(option):
|
||||||
args.append('--{}={}'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
args.append('--{}="{}"'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
||||||
for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'):
|
for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'):
|
||||||
if vargs.get(option) is True:
|
if vargs.get(option) is True:
|
||||||
args.append('--{}'.format(option.replace('_', '-')))
|
args.append('--{}'.format(option.replace('_', '-')))
|
||||||
@@ -320,10 +405,11 @@ class AWXReceptorJob:
|
|||||||
finally:
|
finally:
|
||||||
# Make sure to always release the work unit if we established it
|
# Make sure to always release the work unit if we established it
|
||||||
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
|
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
|
||||||
try:
|
if settings.RECPETOR_KEEP_WORK_ON_ERROR and getattr(res, 'status', 'error') == 'error':
|
||||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
try:
|
||||||
except Exception:
|
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
except Exception:
|
||||||
|
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||||
|
|
||||||
def _run_internal(self, receptor_ctl):
|
def _run_internal(self, receptor_ctl):
|
||||||
# Create a socketpair. Where the left side will be used for writing our payload
|
# Create a socketpair. Where the left side will be used for writing our payload
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import itertools
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import psycopg
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from contextlib import redirect_stdout
|
from contextlib import redirect_stdout
|
||||||
import shutil
|
import shutil
|
||||||
@@ -35,6 +36,9 @@ import ansible_runner.cleanup
|
|||||||
# dateutil
|
# dateutil
|
||||||
from dateutil.parser import parse as parse_date
|
from dateutil.parser import parse as parse_date
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx import __version__ as awx_application_version
|
from awx import __version__ as awx_application_version
|
||||||
from awx.main.access import access_registry
|
from awx.main.access import access_registry
|
||||||
@@ -50,7 +54,7 @@ from awx.main.models import (
|
|||||||
Job,
|
Job,
|
||||||
convert_jsonfields,
|
convert_jsonfields,
|
||||||
)
|
)
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES, ERROR_STATES
|
||||||
from awx.main.dispatch.publish import task
|
from awx.main.dispatch.publish import task
|
||||||
from awx.main.dispatch import get_task_queuename, reaper
|
from awx.main.dispatch import get_task_queuename, reaper
|
||||||
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
||||||
@@ -416,7 +420,7 @@ def handle_removed_image(remove_images=None):
|
|||||||
|
|
||||||
@task(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
def cleanup_images_and_files():
|
def cleanup_images_and_files():
|
||||||
_cleanup_images_and_files()
|
_cleanup_images_and_files(image_prune=True)
|
||||||
|
|
||||||
|
|
||||||
@task(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
@@ -630,10 +634,18 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
|||||||
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
|
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
|
||||||
|
|
||||||
except DatabaseError as e:
|
except DatabaseError as e:
|
||||||
if 'did not affect any rows' in str(e):
|
cause = e.__cause__
|
||||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
if cause and hasattr(cause, 'sqlstate'):
|
||||||
|
sqlstate = cause.sqlstate
|
||||||
|
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||||
|
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||||
|
|
||||||
|
if sqlstate == psycopg.errors.NoData:
|
||||||
|
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||||
|
else:
|
||||||
|
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
|
||||||
else:
|
else:
|
||||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||||
|
|
||||||
# Run local reaper
|
# Run local reaper
|
||||||
if worker_tasks is not None:
|
if worker_tasks is not None:
|
||||||
@@ -673,6 +685,8 @@ def awx_receptor_workunit_reaper():
|
|||||||
|
|
||||||
unit_ids = [id for id in receptor_work_list]
|
unit_ids = [id for id in receptor_work_list]
|
||||||
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
||||||
|
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||||
|
jobs_with_unreleased_receptor_units = jobs_with_unreleased_receptor_units.exclude(status__in=ERROR_STATES)
|
||||||
for job in jobs_with_unreleased_receptor_units:
|
for job in jobs_with_unreleased_receptor_units:
|
||||||
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
||||||
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
|
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
|
||||||
@@ -692,7 +706,10 @@ def awx_k8s_reaper():
|
|||||||
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
||||||
pods = PodManager.list_active_jobs(group)
|
pods = PodManager.list_active_jobs(group)
|
||||||
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
|
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
|
||||||
for job in UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES):
|
reap_job_candidates = UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES)
|
||||||
|
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||||
|
reap_job_candidates = reap_job_candidates.exclude(status__in=ERROR_STATES)
|
||||||
|
for job in reap_job_candidates:
|
||||||
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
||||||
try:
|
try:
|
||||||
pm = PodManager(job)
|
pm = PodManager(job)
|
||||||
@@ -703,7 +720,8 @@ def awx_k8s_reaper():
|
|||||||
|
|
||||||
@task(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
def awx_periodic_scheduler():
|
def awx_periodic_scheduler():
|
||||||
with advisory_lock('awx_periodic_scheduler_lock', wait=False) as acquired:
|
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000
|
||||||
|
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||||
if acquired is False:
|
if acquired is False:
|
||||||
logger.debug("Not running periodic scheduler, another task holds lock")
|
logger.debug("Not running periodic scheduler, another task holds lock")
|
||||||
return
|
return
|
||||||
@@ -788,10 +806,19 @@ def update_inventory_computed_fields(inventory_id):
|
|||||||
try:
|
try:
|
||||||
i.update_computed_fields()
|
i.update_computed_fields()
|
||||||
except DatabaseError as e:
|
except DatabaseError as e:
|
||||||
if 'did not affect any rows' in str(e):
|
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105
|
||||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
# django raises DatabaseError("Forced update did not affect any rows.")
|
||||||
return
|
|
||||||
raise
|
# if sqlstate is set then there was a database error and otherwise will re-raise that error
|
||||||
|
cause = e.__cause__
|
||||||
|
if cause and hasattr(cause, 'sqlstate'):
|
||||||
|
sqlstate = cause.sqlstate
|
||||||
|
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||||
|
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||||
|
raise
|
||||||
|
|
||||||
|
# otherwise
|
||||||
|
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||||
|
|
||||||
|
|
||||||
def update_smart_memberships_for_inventory(smart_inventory):
|
def update_smart_memberships_for_inventory(smart_inventory):
|
||||||
@@ -946,3 +973,27 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
|||||||
permission_check_func(creater, copy_mapping.values())
|
permission_check_func(creater, copy_mapping.values())
|
||||||
if isinstance(new_obj, Inventory):
|
if isinstance(new_obj, Inventory):
|
||||||
update_inventory_computed_fields.delay(new_obj.id)
|
update_inventory_computed_fields.delay(new_obj.id)
|
||||||
|
|
||||||
|
|
||||||
|
@task(queue=get_task_queuename)
|
||||||
|
def periodic_resource_sync():
|
||||||
|
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||||
|
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||||
|
return
|
||||||
|
|
||||||
|
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||||
|
if acquired is False:
|
||||||
|
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||||
|
return
|
||||||
|
logger.debug("Running periodic resource sync")
|
||||||
|
|
||||||
|
executor = SyncExecutor()
|
||||||
|
executor.run()
|
||||||
|
for key, item_list in executor.results.items():
|
||||||
|
if not item_list or key == 'noop':
|
||||||
|
continue
|
||||||
|
# Log creations and conflicts
|
||||||
|
if len(item_list) > 10 and settings.LOG_AGGREGATOR_LEVEL != 'DEBUG':
|
||||||
|
logger.info(f'Periodic resource sync {key}, first 10 items:\n{item_list[:10]}')
|
||||||
|
else:
|
||||||
|
logger.info(f'Periodic resource sync {key}:\n{item_list}')
|
||||||
|
|||||||
@@ -3,5 +3,5 @@
|
|||||||
hosts: all
|
hosts: all
|
||||||
tasks:
|
tasks:
|
||||||
- name: Hello Message
|
- name: Hello Message
|
||||||
debug:
|
ansible.builtin.debug:
|
||||||
msg: "Hello World!"
|
msg: "Hello World!"
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||||
|
"K8S_AUTH_API_KEY": "fooo",
|
||||||
|
"K8S_AUTH_VERIFY_SSL": "False"
|
||||||
|
}
|
||||||
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"GOOGLE_BACKEND_CREDENTIALS": "{{ file_reference }}"
|
||||||
|
}
|
||||||
@@ -1,13 +1,8 @@
|
|||||||
from awx.main.tests.functional.conftest import * # noqa
|
from awx.main.tests.functional.conftest import * # noqa
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
@pytest.fixture()
|
||||||
parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
|
def release():
|
||||||
|
return os.environ.get('VERSION_TARGET', '')
|
||||||
|
|
||||||
def pytest_generate_tests(metafunc):
|
|
||||||
# This is called for every test. Only get/set command line arguments
|
|
||||||
# if the argument is specified in the list of test "fixturenames".
|
|
||||||
option_value = metafunc.config.option.release
|
|
||||||
if 'release' in metafunc.fixturenames and option_value is not None:
|
|
||||||
metafunc.parametrize("release", [option_value])
|
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ class TestSwaggerGeneration:
|
|||||||
# The number of API endpoints changes over time, but let's just check
|
# The number of API endpoints changes over time, but let's just check
|
||||||
# for a reasonable number here; if this test starts failing, raise/lower the bounds
|
# for a reasonable number here; if this test starts failing, raise/lower the bounds
|
||||||
paths = JSON['paths']
|
paths = JSON['paths']
|
||||||
assert 250 < len(paths) < 375
|
assert 250 < len(paths) < 400
|
||||||
assert set(list(paths['/api/'].keys())) == set(['get', 'parameters'])
|
assert set(list(paths['/api/'].keys())) == set(['get', 'parameters'])
|
||||||
assert set(list(paths['/api/v2/'].keys())) == set(['get', 'parameters'])
|
assert set(list(paths['/api/v2/'].keys())) == set(['get', 'parameters'])
|
||||||
assert set(list(sorted(paths['/api/v2/credentials/'].keys()))) == set(['get', 'post', 'parameters'])
|
assert set(list(sorted(paths['/api/v2/credentials/'].keys()))) == set(['get', 'post', 'parameters'])
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ from prometheus_client.parser import text_string_to_metric_families
|
|||||||
from awx.main import models
|
from awx.main import models
|
||||||
from awx.main.analytics.metrics import metrics
|
from awx.main.analytics.metrics import metrics
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.models.rbac import Role
|
|
||||||
|
|
||||||
EXPECTED_VALUES = {
|
EXPECTED_VALUES = {
|
||||||
'awx_system_info': 1.0,
|
'awx_system_info': 1.0,
|
||||||
@@ -66,7 +65,6 @@ def test_metrics_permissions(get, admin, org_admin, alice, bob, organization):
|
|||||||
organization.auditor_role.members.add(bob)
|
organization.auditor_role.members.add(bob)
|
||||||
assert get(get_metrics_view_db_only(), user=bob).status_code == 403
|
assert get(get_metrics_view_db_only(), user=bob).status_code == 403
|
||||||
|
|
||||||
Role.singleton('system_auditor').members.add(bob)
|
|
||||||
bob.is_system_auditor = True
|
bob.is_system_auditor = True
|
||||||
assert get(get_metrics_view_db_only(), user=bob).status_code == 200
|
assert get(get_metrics_view_db_only(), user=bob).status_code == 200
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from django.test import Client
|
|||||||
from rest_framework.test import APIRequestFactory
|
from rest_framework.test import APIRequestFactory
|
||||||
|
|
||||||
from awx.api.generics import LoggedLoginView
|
from awx.api.generics import LoggedLoginView
|
||||||
from awx.api.versioning import drf_reverse
|
from rest_framework.reverse import reverse as drf_reverse
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
|
|||||||
role_pk = inventory.admin_role.pk
|
role_pk = inventory.admin_role.pk
|
||||||
data = {"id": role_pk}
|
data = {"id": role_pk}
|
||||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
||||||
|
|
||||||
|
|
||||||
@@ -21,8 +21,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
|
|||||||
role_pk = inventory.admin_role.pk
|
role_pk = inventory.admin_role.pk
|
||||||
data = {"id": role_pk}
|
data = {"id": role_pk}
|
||||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||||
|
|
||||||
|
|
||||||
@@ -33,8 +33,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
|
|||||||
role_pk = inventory.admin_role.pk
|
role_pk = inventory.admin_role.pk
|
||||||
data = {"id": team.pk}
|
data = {"id": team.pk}
|
||||||
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
||||||
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
|
mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access)
|
||||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||||
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ def test_idempotent_credential_type_setup():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh):
|
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||||
params = {
|
params = {
|
||||||
'credential_type': 1,
|
'credential_type': 1,
|
||||||
'inputs': {'username': 'someusername'},
|
'inputs': {'username': 'someusername'},
|
||||||
@@ -81,7 +81,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh):
|
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||||
params = {
|
params = {
|
||||||
'credential_type': 1,
|
'credential_type': 1,
|
||||||
'inputs': {'username': 'someusername'},
|
'inputs': {'username': 'someusername'},
|
||||||
@@ -385,10 +385,9 @@ def test_list_created_org_credentials(post, get, organization, org_admin, org_me
|
|||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_list_cannot_order_by_encrypted_field(post, get, organization, org_admin, credentialtype_ssh, order_by):
|
def test_list_cannot_order_by_encrypted_field(post, get, organization, org_admin, credentialtype_ssh, order_by):
|
||||||
for i, password in enumerate(('abc', 'def', 'xyz')):
|
for i, password in enumerate(('abc', 'def', 'xyz')):
|
||||||
response = post(reverse('api:credential_list'), {'organization': organization.id, 'name': 'C%d' % i, 'password': password}, org_admin)
|
post(reverse('api:credential_list'), {'organization': organization.id, 'name': 'C%d' % i, 'password': password}, org_admin, expect=400)
|
||||||
|
|
||||||
response = get(reverse('api:credential_list'), org_admin, QUERY_STRING='order_by=%s' % order_by, status=400)
|
get(reverse('api:credential_list'), org_admin, QUERY_STRING='order_by=%s' % order_by, expect=400)
|
||||||
assert response.status_code == 400
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@@ -399,8 +398,7 @@ def test_inputs_cannot_contain_extra_fields(get, post, organization, admin, cred
|
|||||||
'credential_type': credentialtype_ssh.pk,
|
'credential_type': credentialtype_ssh.pk,
|
||||||
'inputs': {'invalid_field': 'foo'},
|
'inputs': {'invalid_field': 'foo'},
|
||||||
}
|
}
|
||||||
response = post(reverse('api:credential_list'), params, admin)
|
response = post(reverse('api:credential_list'), params, admin, expect=400)
|
||||||
assert response.status_code == 400
|
|
||||||
assert "'invalid_field' was unexpected" in response.data['inputs'][0]
|
assert "'invalid_field' was unexpected" in response.data['inputs'][0]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user