mirror of
https://github.com/ansible/awx.git
synced 2026-02-11 22:54:44 -03:30
Compare commits
209 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5bea2b557 | ||
|
|
6d0c47fdd0 | ||
|
|
54b4acbdfc | ||
|
|
a41766090e | ||
|
|
34fa897dda | ||
|
|
32df114e41 | ||
|
|
018f235a64 | ||
|
|
7e77235d5e | ||
|
|
139d8f0ae2 | ||
|
|
7691365aea | ||
|
|
59f61517d4 | ||
|
|
fa670e2d7f | ||
|
|
a87a044d64 | ||
|
|
381ade1148 | ||
|
|
864a30e3d4 | ||
|
|
5f42db67e6 | ||
|
|
ddf4f288d4 | ||
|
|
e75bc8bc1e | ||
|
|
bb533287b8 | ||
|
|
9979fc659e | ||
|
|
9e5babc093 | ||
|
|
c71e2524ed | ||
|
|
48b4c62186 | ||
|
|
853730acb9 | ||
|
|
f1448fced1 | ||
|
|
7697b6a69b | ||
|
|
22a491c32c | ||
|
|
cbd9dce940 | ||
|
|
a4fdcc1cca | ||
|
|
df95439008 | ||
|
|
acd834df8b | ||
|
|
587f0ecf98 | ||
|
|
5a2091f7bf | ||
|
|
fa7423819a | ||
|
|
fde8af9f11 | ||
|
|
209e7e27b1 | ||
|
|
6c7d29a982 | ||
|
|
282ba36839 | ||
|
|
b727d2c3b3 | ||
|
|
7fc3d5c7c7 | ||
|
|
4e055f46c4 | ||
|
|
f595985b7c | ||
|
|
ea232315bf | ||
|
|
ee251812b5 | ||
|
|
00ba1ea569 | ||
|
|
d91af132c1 | ||
|
|
94e5795dfc | ||
|
|
c4688d6298 | ||
|
|
6763badea3 | ||
|
|
2c4ad6ef0f | ||
|
|
37f44d7214 | ||
|
|
98bbc836a6 | ||
|
|
b59aff50dc | ||
|
|
a70b0c1ddc | ||
|
|
db72c9d5b8 | ||
|
|
4e0d19914f | ||
|
|
6f2307f50e | ||
|
|
dbc2215bb6 | ||
|
|
7c08b29827 | ||
|
|
407194d320 | ||
|
|
853af295d9 | ||
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 | ||
|
|
6dc4a4508d | ||
|
|
cf09a4220d | ||
|
|
659c3b64de | ||
|
|
37ad690d09 | ||
|
|
7845ec7e01 | ||
|
|
a15bcf1d55 | ||
|
|
7b3fb2c2a8 | ||
|
|
6df47c8449 | ||
|
|
cae42653bf | ||
|
|
da46a29f40 | ||
|
|
0eb465531c | ||
|
|
d0fe0ed796 | ||
|
|
ceafa14c9d | ||
|
|
08e1454098 | ||
|
|
776b661fb3 | ||
|
|
af6ccdbde5 | ||
|
|
559ab3564b | ||
|
|
208ef0ce25 | ||
|
|
c3d9aa54d8 | ||
|
|
66efe7198a | ||
|
|
adf930ee42 | ||
|
|
892410477a | ||
|
|
0d4f653794 | ||
|
|
8de8f6dce2 | ||
|
|
fc9064e27f | ||
|
|
7de350dc3e | ||
|
|
d4bdaad4d8 | ||
|
|
a9b2ffa3e9 | ||
|
|
1b8d409043 | ||
|
|
da2bccf5a8 | ||
|
|
a2f083bd8e | ||
|
|
4d641b6cf5 | ||
|
|
439c3f0c23 | ||
|
|
946bbe3560 | ||
|
|
20f054d600 | ||
|
|
918d5b3565 | ||
|
|
158314af50 | ||
|
|
4754819a09 | ||
|
|
78fc23138a | ||
|
|
014534bfa5 | ||
|
|
2502e7c7d8 | ||
|
|
fb237e3834 | ||
|
|
e4646ae611 | ||
|
|
7dc77546f4 | ||
|
|
f5f85666c8 | ||
|
|
47a061eb39 | ||
|
|
c760577855 | ||
|
|
814ceb0d06 | ||
|
|
f178c84728 | ||
|
|
c0f71801f6 | ||
|
|
4e8e1398d7 | ||
|
|
3d6a8fd4ef | ||
|
|
e873bb1304 | ||
|
|
672f1eb745 | ||
|
|
199507c6f1 | ||
|
|
a176c04c14 | ||
|
|
e3af658f82 | ||
|
|
e8a3b96482 | ||
|
|
c015e8413e | ||
|
|
390c2d8907 | ||
|
|
97605c5f19 | ||
|
|
818c326160 | ||
|
|
c98727d83e | ||
|
|
a138a92e67 | ||
|
|
7aed19ffda | ||
|
|
3bb559dd09 | ||
|
|
389a729b75 | ||
|
|
2f3c9122fd | ||
|
|
733478ee19 | ||
|
|
41c6337fc1 | ||
|
|
7446da1c2f | ||
|
|
c79fca5ceb | ||
|
|
dc5f43927a | ||
|
|
35a5a81e19 | ||
|
|
9dcc11d54c | ||
|
|
74ce21fa54 | ||
|
|
eb93660b36 | ||
|
|
f50e597548 | ||
|
|
817c3b36b9 | ||
|
|
1859a6ae69 | ||
|
|
0645d342dd | ||
|
|
61ec03e540 | ||
|
|
09f0a366bf | ||
|
|
778961d31e | ||
|
|
f962c88df3 | ||
|
|
8db3ffe719 | ||
|
|
cc5d4dd119 |
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -24,7 +24,7 @@ runs:
|
|||||||
|
|
||||||
- name: Pre-pull latest devel image to warm cache
|
- name: Pre-pull latest devel image to warm cache
|
||||||
shell: bash
|
shell: bash
|
||||||
run: docker pull ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||||
|
|
||||||
- name: Build image for current source checkout
|
- name: Build image for current source checkout
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
10
.github/actions/run_awx_devel/action.yml
vendored
10
.github/actions/run_awx_devel/action.yml
vendored
@@ -57,16 +57,6 @@ runs:
|
|||||||
awx-manage update_password --username=admin --password=password
|
awx-manage update_password --username=admin --password=password
|
||||||
EOSH
|
EOSH
|
||||||
|
|
||||||
- name: Build UI
|
|
||||||
# This must be a string comparison in composite actions:
|
|
||||||
# https://github.com/actions/runner/issues/2238
|
|
||||||
if: ${{ inputs.build-ui == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
docker exec -i tools_awx_1 sh <<-EOSH
|
|
||||||
make ui-devel
|
|
||||||
EOSH
|
|
||||||
|
|
||||||
- name: Get instance data
|
- name: Get instance data
|
||||||
id: data
|
id: data
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
2
.github/triage_replies.md
vendored
2
.github/triage_replies.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
## General
|
## General
|
||||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||||
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
37
.github/workflows/ci.yml
vendored
37
.github/workflows/ci.yml
vendored
@@ -38,7 +38,9 @@ jobs:
|
|||||||
- name: ui-test-general
|
- name: ui-test-general
|
||||||
command: make ui-test-general
|
command: make ui-test-general
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Build awx_devel image for running checks
|
- name: Build awx_devel image for running checks
|
||||||
uses: ./.github/actions/awx_devel_image
|
uses: ./.github/actions/awx_devel_image
|
||||||
@@ -52,7 +54,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@@ -66,15 +70,19 @@ jobs:
|
|||||||
awx-operator:
|
awx-operator:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
|
env:
|
||||||
|
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
- name: Checkout awx-operator
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false\
|
||||||
repository: ansible/awx-operator
|
repository: ansible/awx-operator
|
||||||
path: awx-operator
|
path: awx-operator
|
||||||
|
|
||||||
@@ -111,6 +119,15 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: local/awx
|
AWX_TEST_IMAGE: local/awx
|
||||||
AWX_TEST_VERSION: ci
|
AWX_TEST_VERSION: ci
|
||||||
|
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
|
||||||
|
STORE_DEBUG_OUTPUT: true
|
||||||
|
|
||||||
|
- name: Upload debug output
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: awx-operator-debug-output
|
||||||
|
path: ${{ env.DEBUG_OUTPUT_DIR }}
|
||||||
|
|
||||||
collection-sanity:
|
collection-sanity:
|
||||||
name: awx_collection sanity
|
name: awx_collection sanity
|
||||||
@@ -119,7 +136,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
@@ -143,7 +162,9 @@ jobs:
|
|||||||
- name: r-z0-9
|
- name: r-z0-9
|
||||||
regex: ^[r-z0-9]
|
regex: ^[r-z0-9]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@@ -189,7 +210,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
run: python3 -m pip install --upgrade ansible-core
|
||||||
|
|||||||
57
.github/workflows/dab-release.yml
vendored
Normal file
57
.github/workflows/dab-release.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
name: django-ansible-base requirements update
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * *' # once an day @ 6 AM
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
jobs:
|
||||||
|
dab-pin-newest:
|
||||||
|
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- id: dab-release
|
||||||
|
name: Get current django-ansible-base release version
|
||||||
|
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
||||||
|
with:
|
||||||
|
owner: ansible
|
||||||
|
repo: django-ansible-base
|
||||||
|
excludes: prerelease, draft
|
||||||
|
|
||||||
|
- name: Check out respository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- id: dab-pinned
|
||||||
|
name: Get current django-ansible-base pinned version
|
||||||
|
run:
|
||||||
|
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update django-ansible-base pinned version to upstream release
|
||||||
|
run:
|
||||||
|
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||||
|
with:
|
||||||
|
base: devel
|
||||||
|
branch: bump-django-ansible-base
|
||||||
|
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
||||||
|
body: |
|
||||||
|
##### SUMMARY
|
||||||
|
Automated .github/workflows/dab-release.yml
|
||||||
|
|
||||||
|
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
||||||
|
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
||||||
|
|
||||||
|
##### ISSUE TYPE
|
||||||
|
- Bug, Docs Fix or other nominal change
|
||||||
|
|
||||||
|
##### COMPONENT NAME
|
||||||
|
- API
|
||||||
|
|
||||||
|
commit-message: |
|
||||||
|
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
||||||
|
add-paths:
|
||||||
|
requirements/requirements_git.txt
|
||||||
13
.github/workflows/devel_images.yml
vendored
13
.github/workflows/devel_images.yml
vendored
@@ -2,6 +2,7 @@
|
|||||||
name: Build/Push Development Images
|
name: Build/Push Development Images
|
||||||
env:
|
env:
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
@@ -34,7 +35,9 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
@@ -59,16 +62,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
- name: Setup node and npm
|
- name: Setup node and npm for the new UI build
|
||||||
uses: actions/setup-node@v2
|
uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: '16.13.1'
|
node-version: '18'
|
||||||
if: matrix.build-targets.image-name == 'awx'
|
if: matrix.build-targets.image-name == 'awx'
|
||||||
|
|
||||||
- name: Prebuild UI for awx image (to speed up build process)
|
- name: Prebuild new UI for awx image (to speed up build process)
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install gettext
|
|
||||||
make ui-release
|
|
||||||
make ui-next
|
make ui-next
|
||||||
if: matrix.build-targets.image-name == 'awx'
|
if: matrix.build-targets.image-name == 'awx'
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -8,7 +8,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: install tox
|
- name: install tox
|
||||||
run: pip install tox
|
run: pip install tox
|
||||||
|
|||||||
75
.github/workflows/e2e_test.yml
vendored
75
.github/workflows/e2e_test.yml
vendored
@@ -1,75 +0,0 @@
|
|||||||
---
|
|
||||||
name: E2E Tests
|
|
||||||
env:
|
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [labeled]
|
|
||||||
jobs:
|
|
||||||
e2e-test:
|
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 40
|
|
||||||
permissions:
|
|
||||||
packages: write
|
|
||||||
contents: read
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
|
||||||
id: awx
|
|
||||||
with:
|
|
||||||
build-ui: true
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Pull awx_cypress_base image
|
|
||||||
run: |
|
|
||||||
docker pull quay.io/awx/awx_cypress_base:latest
|
|
||||||
|
|
||||||
- name: Checkout test project
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: ${{ github.repository_owner }}/tower-qa
|
|
||||||
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
|
||||||
path: tower-qa
|
|
||||||
ref: devel
|
|
||||||
|
|
||||||
- name: Build cypress
|
|
||||||
run: |
|
|
||||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
|
||||||
docker build -t awx-pf-tests .
|
|
||||||
|
|
||||||
- name: Run E2E tests
|
|
||||||
env:
|
|
||||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
|
||||||
run: |
|
|
||||||
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
|
||||||
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
|
||||||
export COMMIT_INFO_SHA=$GITHUB_SHA
|
|
||||||
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
|
||||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
|
||||||
AWX_IP=${{ steps.awx.outputs.ip }}
|
|
||||||
printenv > .env
|
|
||||||
echo "Executing tests:"
|
|
||||||
docker run \
|
|
||||||
--network '_sources_default' \
|
|
||||||
--ipc=host \
|
|
||||||
--env-file=.env \
|
|
||||||
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
|
||||||
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
|
||||||
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
|
||||||
-e COMMAND="npm run cypress-concurrently-gha" \
|
|
||||||
-v /dev/shm:/dev/shm \
|
|
||||||
-v $PWD:/e2e \
|
|
||||||
-w /e2e \
|
|
||||||
awx-pf-tests run --project .
|
|
||||||
|
|
||||||
- uses: ./.github/actions/upload_awx_devel_logs
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
log-filename: e2e-${{ matrix.job }}.log
|
|
||||||
5
.github/workflows/label_issue.yml
vendored
5
.github/workflows/label_issue.yml
vendored
@@ -30,7 +30,10 @@ jobs:
|
|||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
name: Label Issue - Community
|
name: Label Issue - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
5
.github/workflows/label_pr.yml
vendored
5
.github/workflows/label_pr.yml
vendored
@@ -29,7 +29,10 @@ jobs:
|
|||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
name: Label PR - Community
|
name: Label PR - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
47
.github/workflows/promote.yml
vendored
47
.github/workflows/promote.yml
vendored
@@ -7,7 +7,11 @@ env:
|
|||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag_name:
|
||||||
|
description: 'Name for the tag of the release.'
|
||||||
|
required: true
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read # to fetch code (actions/checkout)
|
||||||
|
|
||||||
@@ -17,8 +21,20 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set GitHub Env vars for workflow_dispatch event
|
||||||
|
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set GitHub Env vars if release event
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
@@ -43,16 +59,21 @@ jobs:
|
|||||||
- name: Build collection and publish to galaxy
|
- name: Build collection and publish to galaxy
|
||||||
env:
|
env:
|
||||||
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
||||||
COLLECTION_VERSION: ${{ github.event.release.tag_name }}
|
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
||||||
COLLECTION_TEMPLATE_VERSION: true
|
COLLECTION_TEMPLATE_VERSION: true
|
||||||
run: |
|
run: |
|
||||||
|
sudo apt-get install jq
|
||||||
make build_collection
|
make build_collection
|
||||||
if [ "$(curl -L --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
||||||
echo "Galaxy release already done"; \
|
if [[ "$count" == "1" ]]; then
|
||||||
else \
|
echo "Galaxy release already done";
|
||||||
|
elif [[ "$count" == "0" ]]; then
|
||||||
ansible-galaxy collection publish \
|
ansible-galaxy collection publish \
|
||||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
||||||
|
else
|
||||||
|
echo "Unexpected count from galaxy search: $count";
|
||||||
|
exit 1;
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Set official pypi info
|
- name: Set official pypi info
|
||||||
@@ -64,6 +85,8 @@ jobs:
|
|||||||
if: ${{ github.repository_owner != 'ansible' }}
|
if: ${{ github.repository_owner != 'ansible' }}
|
||||||
|
|
||||||
- name: Build awxkit and upload to pypi
|
- name: Build awxkit and upload to pypi
|
||||||
|
env:
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.TAG_NAME }}
|
||||||
run: |
|
run: |
|
||||||
git reset --hard
|
git reset --hard
|
||||||
cd awxkit && python3 setup.py sdist bdist_wheel
|
cd awxkit && python3 setup.py sdist bdist_wheel
|
||||||
@@ -84,14 +107,14 @@ jobs:
|
|||||||
- name: Re-tag and promote awx image
|
- name: Re-tag and promote awx image
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools create \
|
docker buildx imagetools create \
|
||||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||||
--tag quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
--tag quay.io/${{ github.repository }}:${{ env.TAG_NAME }}
|
||||||
docker buildx imagetools create \
|
docker buildx imagetools create \
|
||||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||||
--tag quay.io/${{ github.repository }}:latest
|
--tag quay.io/${{ github.repository }}:latest
|
||||||
|
|
||||||
- name: Re-tag and promote awx-ee image
|
- name: Re-tag and promote awx-ee image
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools create \
|
docker buildx imagetools create \
|
||||||
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} \
|
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }} \
|
||||||
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }}
|
||||||
|
|||||||
26
.github/workflows/stage.yml
vendored
26
.github/workflows/stage.yml
vendored
@@ -45,19 +45,22 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
- name: Checkout awx-operator
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
repository: ${{ github.repository_owner }}/awx-operator
|
repository: ${{ github.repository_owner }}/awx-operator
|
||||||
path: awx-operator
|
path: awx-operator
|
||||||
|
|
||||||
- name: Checkout awx-logos
|
- name: Checkout awx-logos
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
repository: ansible/awx-logos
|
repository: ansible/awx-logos
|
||||||
path: awx-logos
|
path: awx-logos
|
||||||
|
|
||||||
@@ -86,17 +89,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||||
|
|
||||||
- name: Setup node and npm
|
- name: Setup node and npm for new UI build
|
||||||
uses: actions/setup-node@v2
|
uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: '16.13.1'
|
node-version: '18'
|
||||||
|
|
||||||
- name: Prebuild UI for awx image (to speed up build process)
|
- name: Prebuild new UI for awx image (to speed up build process)
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
run: |
|
run: make ui-next
|
||||||
sudo apt-get install gettext
|
|
||||||
make ui-release
|
|
||||||
make ui-next
|
|
||||||
|
|
||||||
- name: Set build env variables
|
- name: Set build env variables
|
||||||
run: |
|
run: |
|
||||||
@@ -136,9 +136,9 @@ jobs:
|
|||||||
- name: Pulling images for test deployment with awx-operator
|
- name: Pulling images for test deployment with awx-operator
|
||||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||||
run: |
|
run: |
|
||||||
docker pull ${AWX_OPERATOR_TEST_IMAGE}
|
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
||||||
docker pull ${AWX_EE_TEST_IMAGE}
|
docker pull -q ${AWX_EE_TEST_IMAGE}
|
||||||
docker pull ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||||
|
|
||||||
- name: Run test deployment with awx-operator
|
- name: Run test deployment with awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
|
|||||||
4
.github/workflows/update_dependabot_prs.yml
vendored
4
.github/workflows/update_dependabot_prs.yml
vendored
@@ -13,7 +13,9 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout branch
|
- name: Checkout branch
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Update PR Body
|
- name: Update PR Body
|
||||||
env:
|
env:
|
||||||
|
|||||||
6
.github/workflows/upload_schema.yml
vendored
6
.github/workflows/upload_schema.yml
vendored
@@ -18,7 +18,9 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
@@ -34,7 +36,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Pre-pull image to warm build cache
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||||
|
|
||||||
- name: Build image
|
- name: Build image
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ ignore: |
|
|||||||
# django template files
|
# django template files
|
||||||
awx/api/templates/instance_install_bundle/**
|
awx/api/templates/instance_install_bundle/**
|
||||||
.readthedocs.yaml
|
.readthedocs.yaml
|
||||||
|
tools/loki
|
||||||
|
tools/otel
|
||||||
|
|
||||||
extends: default
|
extends: default
|
||||||
|
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
|||||||
|
|
||||||
#### Frontend Development
|
#### Frontend Development
|
||||||
|
|
||||||
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
#### Fork and clone the AWX repo
|
#### Fork and clone the AWX repo
|
||||||
|
|
||||||
@@ -121,7 +121,7 @@ If it has someone assigned to it then that person is the person responsible for
|
|||||||
|
|
||||||
**NOTES**
|
**NOTES**
|
||||||
|
|
||||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||||
|
|
||||||
|
|
||||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||||
@@ -132,7 +132,7 @@ If it has someone assigned to it then that person is the person responsible for
|
|||||||
|
|
||||||
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||||
|
|
||||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||||
|
|
||||||
|
|
||||||
## Submitting Pull Requests
|
## Submitting Pull Requests
|
||||||
@@ -161,7 +161,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
|||||||
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||||
|
|
||||||
## Getting Help
|
## Getting Help
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ If any of those items are missing your pull request will still get the `needs_tr
|
|||||||
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
||||||
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
||||||
|
|
||||||
The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
|
The `state:needs_triage` label will remain on your pull request until a person has looked at it.
|
||||||
|
|
||||||
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
||||||
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
||||||
|
|||||||
71
Makefile
71
Makefile
@@ -47,8 +47,14 @@ VAULT ?= false
|
|||||||
VAULT_TLS ?= false
|
VAULT_TLS ?= false
|
||||||
# If set to true docker-compose will also start a tacacs+ instance
|
# If set to true docker-compose will also start a tacacs+ instance
|
||||||
TACACS ?= false
|
TACACS ?= false
|
||||||
|
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||||
|
OTEL ?= false
|
||||||
|
# If set to true docker-compose will also start a Loki instance
|
||||||
|
LOKI ?= false
|
||||||
# If set to true docker-compose will install editable dependencies
|
# If set to true docker-compose will install editable dependencies
|
||||||
EDITABLE_DEPENDENCIES ?= false
|
EDITABLE_DEPENDENCIES ?= false
|
||||||
|
# If set to true, use tls for postgres connection
|
||||||
|
PG_TLS ?= false
|
||||||
|
|
||||||
VENV_BASE ?= /var/lib/awx/venv
|
VENV_BASE ?= /var/lib/awx/venv
|
||||||
|
|
||||||
@@ -57,6 +63,11 @@ DEV_DOCKER_OWNER ?= ansible
|
|||||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||||
|
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||||
|
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
||||||
|
|
||||||
|
# Common command to use for running ansible-playbook
|
||||||
|
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||||
|
|
||||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||||
|
|
||||||
@@ -65,7 +76,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
|||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
|
|
||||||
@@ -80,6 +91,18 @@ I18N_FLAG_FILE = .i18n_built
|
|||||||
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||||
|
|
||||||
|
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
|
||||||
|
# DOCKER_CACHE=--no-cache make docker-compose-build
|
||||||
|
ifeq ($(DOCKER_CACHE),)
|
||||||
|
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
|
||||||
|
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
|
||||||
|
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
|
||||||
|
else
|
||||||
|
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
endif
|
||||||
|
|
||||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||||
develop refresh adduser migrate dbchange \
|
develop refresh adduser migrate dbchange \
|
||||||
receiver test test_unit test_coverage coverage_html \
|
receiver test test_unit test_coverage coverage_html \
|
||||||
@@ -362,7 +385,7 @@ symlink_collection:
|
|||||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||||
|
|
||||||
awx_collection_build: $(shell find awx_collection -type f)
|
awx_collection_build: $(shell find awx_collection -type f)
|
||||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||||
-e collection_version=$(COLLECTION_VERSION) \
|
-e collection_version=$(COLLECTION_VERSION) \
|
||||||
@@ -479,13 +502,7 @@ ui-test-general:
|
|||||||
$(NPM_BIN) run --prefix awx/ui pretest
|
$(NPM_BIN) run --prefix awx/ui pretest
|
||||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||||
|
|
||||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
|
||||||
HEADLESS ?= no
|
|
||||||
ifeq ($(HEADLESS), yes)
|
|
||||||
dist/$(SDIST_TAR_FILE):
|
dist/$(SDIST_TAR_FILE):
|
||||||
else
|
|
||||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
|
||||||
endif
|
|
||||||
$(PYTHON) -m build -s
|
$(PYTHON) -m build -s
|
||||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||||
|
|
||||||
@@ -516,10 +533,10 @@ endif
|
|||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
@@ -535,12 +552,15 @@ docker-compose-sources: .git/hooks/pre-commit
|
|||||||
-e enable_vault=$(VAULT) \
|
-e enable_vault=$(VAULT) \
|
||||||
-e vault_tls=$(VAULT_TLS) \
|
-e vault_tls=$(VAULT_TLS) \
|
||||||
-e enable_tacacs=$(TACACS) \
|
-e enable_tacacs=$(TACACS) \
|
||||||
|
-e enable_otel=$(OTEL) \
|
||||||
|
-e enable_loki=$(LOKI) \
|
||||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||||
|
-e pg_tls=$(PG_TLS) \
|
||||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
|
|
||||||
docker-compose: awx/projects docker-compose-sources
|
docker-compose: awx/projects docker-compose-sources
|
||||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||||
-e enable_vault=$(VAULT) \
|
-e enable_vault=$(VAULT) \
|
||||||
-e vault_tls=$(VAULT_TLS) \
|
-e vault_tls=$(VAULT_TLS) \
|
||||||
-e enable_ldap=$(LDAP); \
|
-e enable_ldap=$(LDAP); \
|
||||||
@@ -583,7 +603,7 @@ docker-compose-container-group-clean:
|
|||||||
.PHONY: Dockerfile.dev
|
.PHONY: Dockerfile.dev
|
||||||
## Generate Dockerfile.dev for awx_devel image
|
## Generate Dockerfile.dev for awx_devel image
|
||||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.dev \
|
-e dockerfile_name=Dockerfile.dev \
|
||||||
-e build_dev=True \
|
-e build_dev=True \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
@@ -594,8 +614,7 @@ docker-compose-build: Dockerfile.dev
|
|||||||
-f Dockerfile.dev \
|
-f Dockerfile.dev \
|
||||||
-t $(DEVEL_IMAGE_NAME) \
|
-t $(DEVEL_IMAGE_NAME) \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
$(DOCKER_DEVEL_CACHE_FLAG) .
|
||||||
|
|
||||||
|
|
||||||
.PHONY: docker-compose-buildx
|
.PHONY: docker-compose-buildx
|
||||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||||
@@ -605,7 +624,7 @@ docker-compose-buildx: Dockerfile.dev
|
|||||||
- docker buildx build \
|
- docker buildx build \
|
||||||
--push \
|
--push \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) \
|
$(DOCKER_DEVEL_CACHE_FLAG) \
|
||||||
--platform=$(PLATFORMS) \
|
--platform=$(PLATFORMS) \
|
||||||
--tag $(DEVEL_IMAGE_NAME) \
|
--tag $(DEVEL_IMAGE_NAME) \
|
||||||
-f Dockerfile.dev .
|
-f Dockerfile.dev .
|
||||||
@@ -616,7 +635,7 @@ docker-clean:
|
|||||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||||
|
|
||||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
|
||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
|
|
||||||
@@ -658,7 +677,7 @@ version-for-buildyml:
|
|||||||
.PHONY: Dockerfile
|
.PHONY: Dockerfile
|
||||||
## Generate Dockerfile for awx image
|
## Generate Dockerfile for awx image
|
||||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
-e headless=$(HEADLESS)
|
-e headless=$(HEADLESS)
|
||||||
|
|
||||||
@@ -668,7 +687,8 @@ awx-kube-build: Dockerfile
|
|||||||
--build-arg VERSION=$(VERSION) \
|
--build-arg VERSION=$(VERSION) \
|
||||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
--build-arg HEADLESS=$(HEADLESS) \
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||||
|
-t $(IMAGE_KUBE) .
|
||||||
|
|
||||||
## Build multi-arch awx image for deployment on Kubernetes environment.
|
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||||
awx-kube-buildx: Dockerfile
|
awx-kube-buildx: Dockerfile
|
||||||
@@ -680,7 +700,8 @@ awx-kube-buildx: Dockerfile
|
|||||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
--build-arg HEADLESS=$(HEADLESS) \
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
--platform=$(PLATFORMS) \
|
--platform=$(PLATFORMS) \
|
||||||
--tag $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) \
|
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||||
|
--tag $(IMAGE_KUBE) \
|
||||||
-f Dockerfile .
|
-f Dockerfile .
|
||||||
- docker buildx rm awx-kube-buildx
|
- docker buildx rm awx-kube-buildx
|
||||||
|
|
||||||
@@ -688,7 +709,7 @@ awx-kube-buildx: Dockerfile
|
|||||||
.PHONY: Dockerfile.kube-dev
|
.PHONY: Dockerfile.kube-dev
|
||||||
## Generate Docker.kube-dev for awx_kube_devel image
|
## Generate Docker.kube-dev for awx_kube_devel image
|
||||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.kube-dev \
|
-e dockerfile_name=Dockerfile.kube-dev \
|
||||||
-e kube_dev=True \
|
-e kube_dev=True \
|
||||||
-e template_dest=_build_kube_dev \
|
-e template_dest=_build_kube_dev \
|
||||||
@@ -698,8 +719,8 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
|||||||
awx-kube-dev-build: Dockerfile.kube-dev
|
awx-kube-dev-build: Dockerfile.kube-dev
|
||||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
-t $(IMAGE_KUBE_DEV) .
|
||||||
|
|
||||||
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||||
@@ -708,14 +729,14 @@ awx-kube-dev-buildx: Dockerfile.kube-dev
|
|||||||
- docker buildx build \
|
- docker buildx build \
|
||||||
--push \
|
--push \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||||
--platform=$(PLATFORMS) \
|
--platform=$(PLATFORMS) \
|
||||||
--tag $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
--tag $(IMAGE_KUBE_DEV) \
|
||||||
-f Dockerfile.kube-dev .
|
-f Dockerfile.kube-dev .
|
||||||
- docker buildx rm awx-kube-dev-buildx
|
- docker buildx rm awx-kube-dev-buildx
|
||||||
|
|
||||||
kind-dev-load: awx-kube-dev-build
|
kind-dev-load: awx-kube-dev-build
|
||||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
|
||||||
|
|
||||||
# Translation TASKS
|
# Translation TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|||||||
@@ -30,14 +30,21 @@ from rest_framework.permissions import IsAuthenticated
|
|||||||
from rest_framework.renderers import StaticHTMLRenderer
|
from rest_framework.renderers import StaticHTMLRenderer
|
||||||
from rest_framework.negotiation import DefaultContentNegotiation
|
from rest_framework.negotiation import DefaultContentNegotiation
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||||
from ansible_base.lib.utils.models import get_all_field_names
|
from ansible_base.lib.utils.models import get_all_field_names
|
||||||
|
from ansible_base.lib.utils.requests import get_remote_host
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||||
|
from ansible_base.rbac.permission_registry import permission_registry
|
||||||
|
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||||
|
from awx.main.models.rbac import give_creator_permissions
|
||||||
from awx.main.access import optimize_queryset
|
from awx.main.access import optimize_queryset
|
||||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||||
from awx.main.utils.licensing import server_product_name
|
from awx.main.utils.licensing import server_product_name
|
||||||
|
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||||
from awx.main.views import ApiErrorView
|
from awx.main.views import ApiErrorView
|
||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
@@ -89,20 +96,26 @@ class LoggedLoginView(auth_views.LoginView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||||
|
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||||
ret.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
ret.set_cookie(
|
||||||
|
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||||
|
)
|
||||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
else:
|
else:
|
||||||
if 'username' in self.request.POST:
|
if 'username' in self.request.POST:
|
||||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||||
ret.status_code = 401
|
ret.status_code = 401
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class LoggedLogoutView(auth_views.LogoutView):
|
class LoggedLogoutView(auth_views.LogoutView):
|
||||||
|
|
||||||
|
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
original_user = getattr(request, 'user', None)
|
original_user = getattr(request, 'user', None)
|
||||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||||
@@ -142,22 +155,23 @@ class APIView(views.APIView):
|
|||||||
Store the Django REST Framework Request object as an attribute on the
|
Store the Django REST Framework Request object as an attribute on the
|
||||||
normal Django request, store time the request started.
|
normal Django request, store time the request started.
|
||||||
"""
|
"""
|
||||||
|
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||||
|
|
||||||
self.time_started = time.time()
|
self.time_started = time.time()
|
||||||
if getattr(settings, 'SQL_DEBUG', False):
|
if getattr(settings, 'SQL_DEBUG', False):
|
||||||
self.queries_before = len(connection.queries)
|
self.queries_before = len(connection.queries)
|
||||||
|
|
||||||
|
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||||
|
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||||
|
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||||
|
else:
|
||||||
|
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||||
|
|
||||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||||
# they respect the allowed proxy list
|
# they respect the allowed proxy list
|
||||||
if all(
|
if settings.PROXY_IP_ALLOWED_LIST:
|
||||||
[
|
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||||
settings.PROXY_IP_ALLOWED_LIST,
|
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
|
||||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
|
||||||
]
|
|
||||||
):
|
|
||||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
|
||||||
if custom_header.startswith('HTTP_'):
|
|
||||||
request.environ.pop(custom_header, None)
|
|
||||||
|
|
||||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||||
request.drf_request = drf_request
|
request.drf_request = drf_request
|
||||||
@@ -202,17 +216,21 @@ class APIView(views.APIView):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
|
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||||
msg_data = {
|
msg_data = {
|
||||||
'status_code': response.status_code,
|
'status_code': response.status_code,
|
||||||
'user_name': request.user,
|
'user_name': request.user,
|
||||||
'url_path': request.path,
|
'url_path': request.path,
|
||||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
'remote_addr': ip,
|
||||||
}
|
}
|
||||||
|
|
||||||
if type(response.data) is dict:
|
if type(response.data) is dict:
|
||||||
msg_data['error'] = response.data.get('error', response.status_text)
|
msg_data['error'] = response.data.get('error', response.status_text)
|
||||||
elif type(response.data) is list:
|
elif type(response.data) is list:
|
||||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
if len(response.data) > 0 and isinstance(response.data[0], str):
|
||||||
|
msg_data['error'] = str(response.data[0])
|
||||||
|
else:
|
||||||
|
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||||
else:
|
else:
|
||||||
msg_data['error'] = response.status_text
|
msg_data['error'] = response.status_text
|
||||||
|
|
||||||
@@ -472,7 +490,11 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
|||||||
|
|
||||||
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||||
# Base class for a list view that allows creating new objects.
|
# Base class for a list view that allows creating new objects.
|
||||||
pass
|
def perform_create(self, serializer):
|
||||||
|
super().perform_create(serializer)
|
||||||
|
if serializer.Meta.model in permission_registry.all_registered_models:
|
||||||
|
if self.request and self.request.user:
|
||||||
|
give_creator_permissions(self.request.user, serializer.instance)
|
||||||
|
|
||||||
|
|
||||||
class ParentMixin(object):
|
class ParentMixin(object):
|
||||||
@@ -792,6 +814,7 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||||
|
deprecated = True
|
||||||
serializer_class = ResourceAccessListElementSerializer
|
serializer_class = ResourceAccessListElementSerializer
|
||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
@@ -799,6 +822,15 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
|||||||
obj = self.get_parent_object()
|
obj = self.get_parent_object()
|
||||||
|
|
||||||
content_type = ContentType.objects.get_for_model(obj)
|
content_type = ContentType.objects.get_for_model(obj)
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
||||||
|
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
||||||
|
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
|
||||||
|
if auditor_role:
|
||||||
|
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
||||||
|
return qs.distinct()
|
||||||
|
|
||||||
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
||||||
|
|
||||||
ancestors = set()
|
ancestors = set()
|
||||||
@@ -958,7 +990,7 @@ class CopyAPIView(GenericAPIView):
|
|||||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
||||||
)
|
)
|
||||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||||
new_obj.admin_role.members.add(request.user)
|
give_creator_permissions(request.user, new_obj)
|
||||||
if sub_objs:
|
if sub_objs:
|
||||||
permission_check_func = None
|
permission_check_func = None
|
||||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
default = field.get_default()
|
default = field.get_default()
|
||||||
if type(default) is UUID:
|
if type(default) is UUID:
|
||||||
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
||||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
|
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost':
|
||||||
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
||||||
field_info['default'] = default
|
field_info['default'] = default
|
||||||
except serializers.SkipField:
|
except serializers.SkipField:
|
||||||
|
|||||||
@@ -43,11 +43,14 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
|||||||
# Django-Polymorphic
|
# Django-Polymorphic
|
||||||
from polymorphic.models import PolymorphicModel
|
from polymorphic.models import PolymorphicModel
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
from ansible_base.lib.utils.models import get_type_for_model
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.access import get_user_capabilities
|
from awx.main.access import get_user_capabilities
|
||||||
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE
|
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE, org_role_to_permission
|
||||||
from awx.main.models import (
|
from awx.main.models import (
|
||||||
ActivityStream,
|
ActivityStream,
|
||||||
AdHocCommand,
|
AdHocCommand,
|
||||||
@@ -102,7 +105,7 @@ from awx.main.models import (
|
|||||||
CLOUD_INVENTORY_SOURCES,
|
CLOUD_INVENTORY_SOURCES,
|
||||||
)
|
)
|
||||||
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
||||||
from awx.main.models.rbac import role_summary_fields_generator, RoleAncestorEntry
|
from awx.main.models.rbac import role_summary_fields_generator, give_creator_permissions, get_role_codenames, to_permissions, get_role_from_object_role
|
||||||
from awx.main.fields import ImplicitRoleField
|
from awx.main.fields import ImplicitRoleField
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
get_model_for_type,
|
get_model_for_type,
|
||||||
@@ -2763,13 +2766,26 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
team_content_type = ContentType.objects.get_for_model(Team)
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
content_type = ContentType.objects.get_for_model(obj)
|
content_type = ContentType.objects.get_for_model(obj)
|
||||||
|
|
||||||
def get_roles_on_resource(parent_role):
|
reversed_org_map = {}
|
||||||
"Returns a string list of the roles a parent_role has for current obj."
|
for k, v in org_role_to_permission.items():
|
||||||
return list(
|
reversed_org_map[v] = k
|
||||||
RoleAncestorEntry.objects.filter(ancestor=parent_role, content_type_id=content_type.id, object_id=obj.id)
|
reversed_role_map = {}
|
||||||
.values_list('role_field', flat=True)
|
for k, v in to_permissions.items():
|
||||||
.distinct()
|
reversed_role_map[v] = k
|
||||||
)
|
|
||||||
|
def get_roles_from_perms(perm_list):
|
||||||
|
"""given a list of permission codenames return a list of role names"""
|
||||||
|
role_names = set()
|
||||||
|
for codename in perm_list:
|
||||||
|
action = codename.split('_', 1)[0]
|
||||||
|
if action in reversed_role_map:
|
||||||
|
role_names.add(reversed_role_map[action])
|
||||||
|
elif codename in reversed_org_map:
|
||||||
|
if isinstance(obj, Organization):
|
||||||
|
role_names.add(reversed_org_map[codename])
|
||||||
|
if 'view_organization' not in role_names:
|
||||||
|
role_names.add('read_role')
|
||||||
|
return list(role_names)
|
||||||
|
|
||||||
def format_role_perm(role):
|
def format_role_perm(role):
|
||||||
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
||||||
@@ -2786,13 +2802,21 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
else:
|
else:
|
||||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||||
role_dict['user_capabilities'] = {'unattach': False}
|
role_dict['user_capabilities'] = {'unattach': False}
|
||||||
return {'role': role_dict, 'descendant_roles': get_roles_on_resource(role)}
|
|
||||||
|
model_name = content_type.model
|
||||||
|
if isinstance(obj, Organization):
|
||||||
|
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name) or codename.startswith('add_')]
|
||||||
|
else:
|
||||||
|
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name)]
|
||||||
|
|
||||||
|
return {'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)}
|
||||||
|
|
||||||
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
||||||
ret = []
|
ret = []
|
||||||
|
team = naive_team_role.content_object
|
||||||
team_role = naive_team_role
|
team_role = naive_team_role
|
||||||
if naive_team_role.role_field == 'admin_role':
|
if naive_team_role.role_field == 'admin_role':
|
||||||
team_role = naive_team_role.content_object.member_role
|
team_role = team.member_role
|
||||||
for role in team_role.children.filter(id__in=permissive_role_ids).all():
|
for role in team_role.children.filter(id__in=permissive_role_ids).all():
|
||||||
role_dict = {
|
role_dict = {
|
||||||
'id': role.id,
|
'id': role.id,
|
||||||
@@ -2812,10 +2836,87 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
else:
|
else:
|
||||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||||
role_dict['user_capabilities'] = {'unattach': False}
|
role_dict['user_capabilities'] = {'unattach': False}
|
||||||
ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(team_role)})
|
|
||||||
|
descendant_perms = list(
|
||||||
|
RoleEvaluation.objects.filter(role__in=team.has_roles.all(), object_id=obj.id, content_type_id=content_type.id)
|
||||||
|
.values_list('codename', flat=True)
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
ret.append({'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)})
|
||||||
|
return ret
|
||||||
|
|
||||||
|
gfk_kwargs = dict(content_type_id=content_type.id, object_id=obj.id)
|
||||||
|
direct_permissive_role_ids = Role.objects.filter(**gfk_kwargs).values_list('id', flat=True)
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
ret['summary_fields']['direct_access'] = []
|
||||||
|
ret['summary_fields']['indirect_access'] = []
|
||||||
|
|
||||||
|
new_roles_seen = set()
|
||||||
|
all_team_roles = set()
|
||||||
|
all_permissive_role_ids = set()
|
||||||
|
for evaluation in RoleEvaluation.objects.filter(role__in=user.has_roles.all(), **gfk_kwargs).prefetch_related('role'):
|
||||||
|
new_role = evaluation.role
|
||||||
|
if new_role.id in new_roles_seen:
|
||||||
|
continue
|
||||||
|
new_roles_seen.add(new_role.id)
|
||||||
|
old_role = get_role_from_object_role(new_role)
|
||||||
|
all_permissive_role_ids.add(old_role.id)
|
||||||
|
|
||||||
|
if int(new_role.object_id) == obj.id and new_role.content_type_id == content_type.id:
|
||||||
|
ret['summary_fields']['direct_access'].append(format_role_perm(old_role))
|
||||||
|
elif new_role.content_type_id == team_content_type.id:
|
||||||
|
all_team_roles.add(old_role)
|
||||||
|
else:
|
||||||
|
ret['summary_fields']['indirect_access'].append(format_role_perm(old_role))
|
||||||
|
|
||||||
|
# Lazy role creation gives us a big problem, where some intermediate roles are not easy to find
|
||||||
|
# like when a team has indirect permission, so here we get all roles the users teams have
|
||||||
|
# these contribute to all potential permission-granting roles of the object
|
||||||
|
user_teams_qs = permission_registry.team_model.objects.filter(member_roles__in=ObjectRole.objects.filter(users=user))
|
||||||
|
team_obj_roles = ObjectRole.objects.filter(teams__in=user_teams_qs)
|
||||||
|
for evaluation in RoleEvaluation.objects.filter(role__in=team_obj_roles, **gfk_kwargs).prefetch_related('role'):
|
||||||
|
new_role = evaluation.role
|
||||||
|
if new_role.id in new_roles_seen:
|
||||||
|
continue
|
||||||
|
new_roles_seen.add(new_role.id)
|
||||||
|
old_role = get_role_from_object_role(new_role)
|
||||||
|
all_permissive_role_ids.add(old_role.id)
|
||||||
|
|
||||||
|
# In DAB RBAC, superuser is strictly a user flag, and global roles are not in the RoleEvaluation table
|
||||||
|
if user.is_superuser:
|
||||||
|
ret['summary_fields'].setdefault('indirect_access', [])
|
||||||
|
all_role_names = [field.name for field in obj._meta.get_fields() if isinstance(field, ImplicitRoleField)]
|
||||||
|
ret['summary_fields']['indirect_access'].append(
|
||||||
|
{
|
||||||
|
"role": {
|
||||||
|
"id": None,
|
||||||
|
"name": _("System Administrator"),
|
||||||
|
"description": _("Can manage all aspects of the system"),
|
||||||
|
"user_capabilities": {"unattach": False},
|
||||||
|
},
|
||||||
|
"descendant_roles": all_role_names,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
elif user.is_system_auditor:
|
||||||
|
ret['summary_fields'].setdefault('indirect_access', [])
|
||||||
|
ret['summary_fields']['indirect_access'].append(
|
||||||
|
{
|
||||||
|
"role": {
|
||||||
|
"id": None,
|
||||||
|
"name": _("System Auditor"),
|
||||||
|
"description": _("Can view all aspects of the system"),
|
||||||
|
"user_capabilities": {"unattach": False},
|
||||||
|
},
|
||||||
|
"descendant_roles": ["read_role"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
ret['summary_fields']['direct_access'].extend([y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in all_team_roles) for y in x])
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
direct_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('id', flat=True)
|
|
||||||
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
||||||
|
|
||||||
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
|
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
|
||||||
@@ -3084,7 +3185,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
|||||||
credential = super(CredentialSerializerCreate, self).create(validated_data)
|
credential = super(CredentialSerializerCreate, self).create(validated_data)
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
credential.admin_role.members.add(user)
|
give_creator_permissions(user, credential)
|
||||||
if team:
|
if team:
|
||||||
if not credential.organization or team.organization.id != credential.organization.id:
|
if not credential.organization or team.organization.id != credential.organization.id:
|
||||||
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
||||||
@@ -5280,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get_body(self, obj):
|
def get_body(self, obj):
|
||||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
|
||||||
if isinstance(obj.body, dict):
|
if isinstance(obj.body, dict):
|
||||||
if 'body' in obj.body:
|
if 'body' in obj.body:
|
||||||
return obj.body['body']
|
return obj.body['body']
|
||||||
@@ -5302,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
|
|||||||
def to_representation(self, obj):
|
def to_representation(self, obj):
|
||||||
ret = super(NotificationSerializer, self).to_representation(obj)
|
ret = super(NotificationSerializer, self).to_representation(obj)
|
||||||
|
|
||||||
if obj.notification_type == 'webhook':
|
if obj.notification_type in ('webhook', 'awssns'):
|
||||||
ret.pop('subject')
|
ret.pop('subject')
|
||||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
|
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
|
||||||
ret.pop('body')
|
ret.pop('body')
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,12 @@
|
|||||||
- hosts: all
|
- hosts: all
|
||||||
become: yes
|
become: yes
|
||||||
tasks:
|
tasks:
|
||||||
|
- name: Create the receptor group
|
||||||
|
group:
|
||||||
|
{% verbatim %}
|
||||||
|
name: "{{ receptor_group }}"
|
||||||
|
{% endverbatim %}
|
||||||
|
state: present
|
||||||
- name: Create the receptor user
|
- name: Create the receptor user
|
||||||
user:
|
user:
|
||||||
{% verbatim %}
|
{% verbatim %}
|
||||||
|
|||||||
@@ -29,9 +29,7 @@ def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra
|
|||||||
kwargs = {}
|
kwargs = {}
|
||||||
if 'version' not in kwargs:
|
if 'version' not in kwargs:
|
||||||
kwargs['version'] = settings.REST_FRAMEWORK['DEFAULT_VERSION']
|
kwargs['version'] = settings.REST_FRAMEWORK['DEFAULT_VERSION']
|
||||||
url = drf_reverse(viewname, args, kwargs, request, format, **extra)
|
return drf_reverse(viewname, args, kwargs, request, format, **extra)
|
||||||
|
|
||||||
return transform_optional_api_urlpattern_prefix_url(request, url)
|
|
||||||
|
|
||||||
|
|
||||||
class URLPathVersioning(BaseVersioning):
|
class URLPathVersioning(BaseVersioning):
|
||||||
|
|||||||
@@ -60,6 +60,11 @@ from oauth2_provider.models import get_access_token_model
|
|||||||
import pytz
|
import pytz
|
||||||
from wsgiref.util import FileWrapper
|
from wsgiref.util import FileWrapper
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||||
|
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||||
from awx.main.access import get_user_queryset
|
from awx.main.access import get_user_queryset
|
||||||
@@ -87,6 +92,7 @@ from awx.api.generics import (
|
|||||||
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
|
from awx.main.models.rbac import get_role_definition
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
extract_ansible_vars,
|
extract_ansible_vars,
|
||||||
@@ -124,6 +130,7 @@ from awx.api.views.mixin import (
|
|||||||
from awx.api.pagination import UnifiedJobEventPagination
|
from awx.api.pagination import UnifiedJobEventPagination
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views')
|
logger = logging.getLogger('awx.api.views')
|
||||||
|
|
||||||
|
|
||||||
@@ -536,6 +543,7 @@ class InstanceGroupAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceGroupObjectRolesList(SubListAPIView):
|
class InstanceGroupObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.InstanceGroup
|
parent_model = models.InstanceGroup
|
||||||
@@ -705,16 +713,81 @@ class AuthView(APIView):
|
|||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
def immutablesharedfields(cls):
|
||||||
|
'''
|
||||||
|
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||||
|
|
||||||
|
Works by overriding these view methods:
|
||||||
|
- create
|
||||||
|
- delete
|
||||||
|
- perform_update
|
||||||
|
create and delete are overridden to raise a PermissionDenied exception.
|
||||||
|
perform_update is overridden to check if any shared fields are being modified,
|
||||||
|
and raise a PermissionDenied exception if so.
|
||||||
|
'''
|
||||||
|
# create instead of perform_create because some of our views
|
||||||
|
# override create instead of perform_create
|
||||||
|
if hasattr(cls, 'create'):
|
||||||
|
cls.original_create = cls.create
|
||||||
|
|
||||||
|
@functools.wraps(cls.create)
|
||||||
|
def create_wrapper(*args, **kwargs):
|
||||||
|
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
return cls.original_create(*args, **kwargs)
|
||||||
|
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||||
|
|
||||||
|
cls.create = create_wrapper
|
||||||
|
|
||||||
|
if hasattr(cls, 'delete'):
|
||||||
|
cls.original_delete = cls.delete
|
||||||
|
|
||||||
|
@functools.wraps(cls.delete)
|
||||||
|
def delete_wrapper(*args, **kwargs):
|
||||||
|
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
return cls.original_delete(*args, **kwargs)
|
||||||
|
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||||
|
|
||||||
|
cls.delete = delete_wrapper
|
||||||
|
|
||||||
|
if hasattr(cls, 'perform_update'):
|
||||||
|
cls.original_perform_update = cls.perform_update
|
||||||
|
|
||||||
|
@functools.wraps(cls.perform_update)
|
||||||
|
def update_wrapper(*args, **kwargs):
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
view, serializer = args
|
||||||
|
instance = view.get_object()
|
||||||
|
if instance:
|
||||||
|
if isinstance(instance, models.Organization):
|
||||||
|
shared_fields = OrganizationType._declared_fields.keys()
|
||||||
|
elif isinstance(instance, models.User):
|
||||||
|
shared_fields = UserType._declared_fields.keys()
|
||||||
|
elif isinstance(instance, models.Team):
|
||||||
|
shared_fields = TeamType._declared_fields.keys()
|
||||||
|
attrs = serializer.validated_data
|
||||||
|
for field in shared_fields:
|
||||||
|
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||||
|
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||||
|
return cls.original_perform_update(*args, **kwargs)
|
||||||
|
|
||||||
|
cls.perform_update = update_wrapper
|
||||||
|
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamList(ListCreateAPIView):
|
class TeamList(ListCreateAPIView):
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamUsersList(BaseUsersList):
|
class TeamUsersList(BaseUsersList):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -724,6 +797,7 @@ class TeamUsersList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class TeamRolesList(SubListAttachDetachAPIView):
|
class TeamRolesList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||||
metadata_class = RoleMetadata
|
metadata_class = RoleMetadata
|
||||||
@@ -763,10 +837,12 @@ class TeamRolesList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class TeamObjectRolesList(SubListAPIView):
|
class TeamObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Team
|
parent_model = models.Team
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -784,8 +860,15 @@ class TeamProjectsList(SubListAPIView):
|
|||||||
self.check_parent_access(team)
|
self.check_parent_access(team)
|
||||||
model_ct = ContentType.objects.get_for_model(self.model)
|
model_ct = ContentType.objects.get_for_model(self.model)
|
||||||
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
||||||
proj_roles = models.Role.objects.filter(Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk), content_type=model_ct)
|
|
||||||
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
|
rd = get_role_definition(team.member_role)
|
||||||
|
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
|
||||||
|
if role is None:
|
||||||
|
# Team has no permissions, therefore team has no projects
|
||||||
|
return self.model.objects.none()
|
||||||
|
else:
|
||||||
|
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||||
|
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
|
||||||
|
|
||||||
|
|
||||||
class TeamActivityStreamList(SubListAPIView):
|
class TeamActivityStreamList(SubListAPIView):
|
||||||
@@ -800,10 +883,23 @@ class TeamActivityStreamList(SubListAPIView):
|
|||||||
self.check_parent_access(parent)
|
self.check_parent_access(parent)
|
||||||
|
|
||||||
qs = self.request.user.get_queryset(self.model)
|
qs = self.request.user.get_queryset(self.model)
|
||||||
|
|
||||||
return qs.filter(
|
return qs.filter(
|
||||||
Q(team=parent)
|
Q(team=parent)
|
||||||
| Q(project__in=models.Project.accessible_objects(parent.member_role, 'read_role'))
|
| Q(
|
||||||
| Q(credential__in=models.Credential.accessible_objects(parent.member_role, 'read_role'))
|
project__in=RoleEvaluation.objects.filter(
|
||||||
|
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Project).id, codename='view_project'
|
||||||
|
)
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
| Q(
|
||||||
|
credential__in=RoleEvaluation.objects.filter(
|
||||||
|
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Credential).id, codename='view_credential'
|
||||||
|
)
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1055,10 +1151,12 @@ class ProjectAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectObjectRolesList(SubListAPIView):
|
class ProjectObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Project
|
parent_model = models.Project
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -1071,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
|||||||
copy_return_serializer_class = serializers.ProjectSerializer
|
copy_return_serializer_class = serializers.ProjectSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class UserList(ListCreateAPIView):
|
class UserList(ListCreateAPIView):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -1216,6 +1315,7 @@ class UserTeamsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class UserRolesList(SubListAttachDetachAPIView):
|
class UserRolesList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||||
metadata_class = RoleMetadata
|
metadata_class = RoleMetadata
|
||||||
@@ -1240,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
|||||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||||
role = get_object_or_400(models.Role, pk=sub_id)
|
role = get_object_or_400(models.Role, pk=sub_id)
|
||||||
|
|
||||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||||
|
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
for model in [models.Organization, models.Team]:
|
||||||
|
ct = content_types[model]
|
||||||
|
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||||
|
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||||
|
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
credential_content_type = content_types[models.Credential]
|
||||||
if role.content_type == credential_content_type:
|
if role.content_type == credential_content_type:
|
||||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||||
@@ -1312,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
|||||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -1490,10 +1600,12 @@ class CredentialAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class CredentialObjectRolesList(SubListAPIView):
|
class CredentialObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Credential
|
parent_model = models.Credential
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -2280,12 +2392,13 @@ class JobTemplateList(ListCreateAPIView):
|
|||||||
serializer_class = serializers.JobTemplateSerializer
|
serializer_class = serializers.JobTemplateSerializer
|
||||||
always_allow_superuser = False
|
always_allow_superuser = False
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def check_permissions(self, request):
|
||||||
ret = super(JobTemplateList, self).post(request, *args, **kwargs)
|
if request.method == 'POST':
|
||||||
if ret.status_code == 201:
|
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||||
job_template = models.JobTemplate.objects.get(id=ret.data['id'])
|
if not can_access:
|
||||||
job_template.admin_role.members.add(request.user)
|
self.permission_denied(request, message=messages)
|
||||||
return ret
|
|
||||||
|
super(JobTemplateList, self).check_permissions(request)
|
||||||
|
|
||||||
|
|
||||||
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
@@ -2666,12 +2779,7 @@ class JobTemplateCallback(GenericAPIView):
|
|||||||
host for the current request.
|
host for the current request.
|
||||||
"""
|
"""
|
||||||
# Find the list of remote host names/IPs to check.
|
# Find the list of remote host names/IPs to check.
|
||||||
remote_hosts = set()
|
remote_hosts = set(get_remote_hosts(self.request))
|
||||||
for header in settings.REMOTE_HOST_HEADERS:
|
|
||||||
for value in self.request.META.get(header, '').split(','):
|
|
||||||
value = value.strip()
|
|
||||||
if value:
|
|
||||||
remote_hosts.add(value)
|
|
||||||
# Add the reverse lookup of IP addresses.
|
# Add the reverse lookup of IP addresses.
|
||||||
for rh in list(remote_hosts):
|
for rh in list(remote_hosts):
|
||||||
try:
|
try:
|
||||||
@@ -2832,10 +2940,12 @@ class JobTemplateAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class JobTemplateObjectRolesList(SubListAPIView):
|
class JobTemplateObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.JobTemplate
|
parent_model = models.JobTemplate
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -3009,6 +3119,14 @@ class WorkflowJobTemplateList(ListCreateAPIView):
|
|||||||
serializer_class = serializers.WorkflowJobTemplateSerializer
|
serializer_class = serializers.WorkflowJobTemplateSerializer
|
||||||
always_allow_superuser = False
|
always_allow_superuser = False
|
||||||
|
|
||||||
|
def check_permissions(self, request):
|
||||||
|
if request.method == 'POST':
|
||||||
|
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||||
|
if not can_access:
|
||||||
|
self.permission_denied(request, message=messages)
|
||||||
|
|
||||||
|
super(WorkflowJobTemplateList, self).check_permissions(request)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
model = models.WorkflowJobTemplate
|
model = models.WorkflowJobTemplate
|
||||||
@@ -3218,10 +3336,12 @@ class WorkflowJobTemplateAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateObjectRolesList(SubListAPIView):
|
class WorkflowJobTemplateObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.WorkflowJobTemplate
|
parent_model = models.WorkflowJobTemplate
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -4230,6 +4350,7 @@ class ActivityStreamDetail(RetrieveAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleList(ListAPIView):
|
class RoleList(ListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
@@ -4237,11 +4358,13 @@ class RoleList(ListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleDetail(RetrieveAPIView):
|
class RoleDetail(RetrieveAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
|
|
||||||
|
|
||||||
class RoleUsersList(SubListAttachDetachAPIView):
|
class RoleUsersList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4262,7 +4385,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
|||||||
user = get_object_or_400(models.User, pk=sub_id)
|
user = get_object_or_400(models.User, pk=sub_id)
|
||||||
role = self.get_parent_object()
|
role = self.get_parent_object()
|
||||||
|
|
||||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
for model in [models.Organization, models.Team]:
|
||||||
|
ct = content_types[model]
|
||||||
|
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||||
|
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||||
|
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
credential_content_type = content_types[models.Credential]
|
||||||
if role.content_type == credential_content_type:
|
if role.content_type == credential_content_type:
|
||||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||||
@@ -4276,6 +4407,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleTeamsList(SubListAttachDetachAPIView):
|
class RoleTeamsList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4320,10 +4452,12 @@ class RoleTeamsList(SubListAttachDetachAPIView):
|
|||||||
team.member_role.children.remove(role)
|
team.member_role.children.remove(role)
|
||||||
else:
|
else:
|
||||||
team.member_role.children.add(role)
|
team.member_role.children.add(role)
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class RoleParentsList(SubListAPIView):
|
class RoleParentsList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4337,6 +4471,7 @@ class RoleParentsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleChildrenList(SubListAPIView):
|
class RoleChildrenList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
|
|||||||
@@ -152,6 +152,7 @@ class InventoryObjectRolesList(SubListAPIView):
|
|||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
|
|||||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
|||||||
CredentialSerializer,
|
CredentialSerializer,
|
||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||||
|
from awx.api.views import immutablesharedfields
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.organization')
|
logger = logging.getLogger('awx.api.views.organization')
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
|||||||
relationship = 'inventories'
|
relationship = 'inventories'
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationUsersList(BaseUsersList):
|
class OrganizationUsersList(BaseUsersList):
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
|||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationAdminsList(BaseUsersList):
|
class OrganizationAdminsList(BaseUsersList):
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
|||||||
parent_key = 'organization'
|
parent_key = 'organization'
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||||
model = Team
|
model = Team
|
||||||
serializer_class = TeamSerializer
|
serializer_class = TeamSerializer
|
||||||
@@ -226,6 +232,7 @@ class OrganizationObjectRolesList(SubListAPIView):
|
|||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
|
|||||||
@@ -132,6 +132,9 @@ class ApiVersionRootView(APIView):
|
|||||||
data['bulk'] = reverse('api:bulk', request=request)
|
data['bulk'] = reverse('api:bulk', request=request)
|
||||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
||||||
data['service_index'] = django_reverse('service-index-root')
|
data['service_index'] = django_reverse('service-index-root')
|
||||||
|
data['role_definitions'] = django_reverse('roledefinition-list')
|
||||||
|
data['role_user_assignments'] = django_reverse('roleuserassignment-list')
|
||||||
|
data['role_team_assignments'] = django_reverse('roleteamassignment-list')
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -61,6 +61,10 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
try:
|
try:
|
||||||
|
if isinstance(value, str):
|
||||||
|
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
|
||||||
|
# DRF changed truthy and falsy lists to be capitalized
|
||||||
|
value = value.lower()
|
||||||
if isinstance(value, (list, tuple)):
|
if isinstance(value, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_representation(value)
|
return super(StringListBooleanField, self).to_representation(value)
|
||||||
elif value in BooleanField.TRUE_VALUES:
|
elif value in BooleanField.TRUE_VALUES:
|
||||||
@@ -78,6 +82,8 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
def to_internal_value(self, data):
|
def to_internal_value(self, data):
|
||||||
try:
|
try:
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = data.lower()
|
||||||
if isinstance(data, (list, tuple)):
|
if isinstance(data, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_internal_value(data)
|
return super(StringListBooleanField, self).to_internal_value(data)
|
||||||
elif data in BooleanField.TRUE_VALUES:
|
elif data in BooleanField.TRUE_VALUES:
|
||||||
|
|||||||
@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||||
|
|
||||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||||
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||||
|
|
||||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||||
|
|
||||||
|
|
||||||
def test_empty_setting(settings, mocker):
|
def test_empty_setting(settings, mocker):
|
||||||
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||||
|
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
settings.AWX_SOME_SETTING
|
settings.AWX_SOME_SETTING
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||||
|
|
||||||
|
|
||||||
def test_setting_from_db(settings, mocker):
|
def test_setting_from_db(settings, mocker):
|
||||||
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
|
|||||||
|
|
||||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||||
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
|
|||||||
|
|
||||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
|
||||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||||
|
|
||||||
assert existing_setting.value == 'NEW-VALUE'
|
assert existing_setting.value == 'NEW-VALUE'
|
||||||
existing_setting.save.assert_called_with(update_fields=['value'])
|
existing_setting.save.assert_called_with(update_fields=['value'])
|
||||||
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||||
|
|
||||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
|
||||||
del settings.AWX_SOME_SETTING
|
del settings.AWX_SOME_SETTING
|
||||||
|
|
||||||
assert existing_setting.delete.call_count == 1
|
assert existing_setting.delete.call_count == 1
|
||||||
|
|
||||||
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
|||||||
# use its primary key as part of the encryption key
|
# use its primary key as part of the encryption key
|
||||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||||
|
|
||||||
|
|
||||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||||
|
|||||||
@@ -20,7 +20,10 @@ from rest_framework.exceptions import ParseError, PermissionDenied
|
|||||||
# Django OAuth Toolkit
|
# Django OAuth Toolkit
|
||||||
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
from ansible_base.lib.utils.validation import to_python_boolean
|
from ansible_base.lib.utils.validation import to_python_boolean
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
@@ -72,8 +75,6 @@ from awx.main.models import (
|
|||||||
WorkflowJobTemplateNode,
|
WorkflowJobTemplateNode,
|
||||||
WorkflowApproval,
|
WorkflowApproval,
|
||||||
WorkflowApprovalTemplate,
|
WorkflowApprovalTemplate,
|
||||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
|
||||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
|
||||||
)
|
)
|
||||||
from awx.main.models.mixins import ResourceMixin
|
from awx.main.models.mixins import ResourceMixin
|
||||||
|
|
||||||
@@ -264,7 +265,11 @@ class BaseAccess(object):
|
|||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
return self.user.is_superuser
|
if self.user.is_superuser:
|
||||||
|
return True
|
||||||
|
if obj._meta.model_name in [cls._meta.model_name for cls in permission_registry.all_registered_models]:
|
||||||
|
return self.user.has_obj_perm(obj, 'delete')
|
||||||
|
return False
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
return self.can_add({'reference_obj': obj})
|
return self.can_add({'reference_obj': obj})
|
||||||
@@ -593,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
|||||||
- a superuser
|
- a superuser
|
||||||
- admin role on the Instance group
|
- admin role on the Instance group
|
||||||
I can add/delete Instance Groups:
|
I can add/delete Instance Groups:
|
||||||
- a superuser(system administrator)
|
- a superuser(system administrator), because these are not org-scoped
|
||||||
I can use Instance Groups when I have:
|
I can use Instance Groups when I have:
|
||||||
- use_role on the instance group
|
- use_role on the instance group
|
||||||
"""
|
"""
|
||||||
@@ -622,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
|||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||||
return False
|
return False
|
||||||
return self.user.is_superuser
|
return self.user.has_obj_perm(obj, 'delete')
|
||||||
|
|
||||||
|
|
||||||
class UserAccess(BaseAccess):
|
class UserAccess(BaseAccess):
|
||||||
@@ -651,9 +656,7 @@ class UserAccess(BaseAccess):
|
|||||||
qs = (
|
qs = (
|
||||||
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
|
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
|
||||||
| User.objects.filter(pk=self.user.id)
|
| User.objects.filter(pk=self.user.id)
|
||||||
| User.objects.filter(
|
| User.objects.filter(is_superuser=True)
|
||||||
pk__in=Role.objects.filter(singleton_name__in=[ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
|
|
||||||
)
|
|
||||||
).distinct()
|
).distinct()
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
@@ -711,6 +714,15 @@ class UserAccess(BaseAccess):
|
|||||||
if not allow_orphans:
|
if not allow_orphans:
|
||||||
# in these cases only superusers can modify orphan users
|
# in these cases only superusers can modify orphan users
|
||||||
return False
|
return False
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
# Permission granted if the user has all permissions that the target user has
|
||||||
|
target_perms = set(
|
||||||
|
RoleEvaluation.objects.filter(role__in=obj.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||||
|
)
|
||||||
|
user_perms = set(
|
||||||
|
RoleEvaluation.objects.filter(role__in=self.user.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||||
|
)
|
||||||
|
return not (target_perms - user_perms)
|
||||||
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
|
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
|
||||||
else:
|
else:
|
||||||
return self.is_all_org_admin(obj)
|
return self.is_all_org_admin(obj)
|
||||||
@@ -949,9 +961,6 @@ class InventoryAccess(BaseAccess):
|
|||||||
def can_update(self, obj):
|
def can_update(self, obj):
|
||||||
return self.user in obj.update_role
|
return self.user in obj.update_role
|
||||||
|
|
||||||
def can_delete(self, obj):
|
|
||||||
return self.can_admin(obj, None)
|
|
||||||
|
|
||||||
def can_run_ad_hoc_commands(self, obj):
|
def can_run_ad_hoc_commands(self, obj):
|
||||||
return self.user in obj.adhoc_role
|
return self.user in obj.adhoc_role
|
||||||
|
|
||||||
@@ -1378,12 +1387,11 @@ class TeamAccess(BaseAccess):
|
|||||||
class ExecutionEnvironmentAccess(BaseAccess):
|
class ExecutionEnvironmentAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see an execution environment when:
|
I can see an execution environment when:
|
||||||
- I'm a superuser
|
- I can see its organization
|
||||||
- I'm a member of the same organization
|
- It is a global ExecutionEnvironment
|
||||||
- it is a global ExecutionEnvironment
|
|
||||||
I can create/change an execution environment when:
|
I can create/change an execution environment when:
|
||||||
- I'm a superuser
|
- I'm a superuser
|
||||||
- I'm an admin for the organization(s)
|
- I have an organization or object role that gives access
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = ExecutionEnvironment
|
model = ExecutionEnvironment
|
||||||
@@ -1392,7 +1400,9 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
|||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
return ExecutionEnvironment.objects.filter(
|
return ExecutionEnvironment.objects.filter(
|
||||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
|
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role'))
|
||||||
|
| Q(organization__isnull=True)
|
||||||
|
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
|
||||||
).distinct()
|
).distinct()
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
@@ -1405,13 +1415,19 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
|||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if obj and obj.organization_id is None:
|
if obj and obj.organization_id is None:
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
if self.user not in obj.organization.execution_environment_admin_role:
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
raise PermissionDenied
|
if not self.user.has_obj_perm(obj, 'change'):
|
||||||
if data and 'organization' in data:
|
|
||||||
new_org = get_object_from_data('organization', Organization, data, obj=obj)
|
|
||||||
if not new_org or self.user not in new_org.execution_environment_admin_role:
|
|
||||||
return False
|
return False
|
||||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
else:
|
||||||
|
if self.user not in obj.organization.execution_environment_admin_role:
|
||||||
|
raise PermissionDenied
|
||||||
|
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
|
||||||
|
return False
|
||||||
|
# Special case that check_related does not catch, org users can not remove the organization from the EE
|
||||||
|
if data and ('organization' in data or 'organization_id' in data):
|
||||||
|
if (not data.get('organization')) and (not data.get('organization_id')):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
if obj.managed:
|
if obj.managed:
|
||||||
@@ -1583,6 +1599,8 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
|||||||
inventory = get_value(Inventory, 'inventory')
|
inventory = get_value(Inventory, 'inventory')
|
||||||
if inventory:
|
if inventory:
|
||||||
if self.user not in inventory.use_role:
|
if self.user not in inventory.use_role:
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
@@ -1591,11 +1609,16 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
|||||||
project = get_value(Project, 'project')
|
project = get_value(Project, 'project')
|
||||||
# If the user has admin access to the project (as an org admin), should
|
# If the user has admin access to the project (as an org admin), should
|
||||||
# be able to proceed without additional checks.
|
# be able to proceed without additional checks.
|
||||||
if project:
|
if not project:
|
||||||
return self.user in project.use_role
|
|
||||||
else:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if self.user not in project.use_role:
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['project'] = [_('You do not have use permission on Project')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_copy_related(self, obj):
|
def can_copy_related(self, obj):
|
||||||
"""
|
"""
|
||||||
@@ -2079,11 +2102,23 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if not data: # So the browseable API will work
|
if not data: # So the browseable API will work
|
||||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||||
|
|
||||||
return bool(
|
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
|
||||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
if data.get('organization', None) is None:
|
||||||
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
if self.save_messages:
|
||||||
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||||
)
|
return False
|
||||||
|
|
||||||
|
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
if self.save_messages:
|
if self.save_messages:
|
||||||
@@ -2592,6 +2627,8 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
|||||||
if not JobLaunchConfigAccess(self.user).can_add(data):
|
if not JobLaunchConfigAccess(self.user).can_add(data):
|
||||||
return False
|
return False
|
||||||
if not data:
|
if not data:
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return self.user.has_roles.filter(permission_partials__codename__in=['execute_jobtemplate', 'update_project', 'update_inventory']).exists()
|
||||||
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
|
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
|
||||||
|
|
||||||
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
||||||
@@ -2613,13 +2650,15 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
|||||||
|
|
||||||
class NotificationTemplateAccess(BaseAccess):
|
class NotificationTemplateAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see/use a notification_template if I have permission to
|
Run standard logic from DAB RBAC
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
prefetch_related = ('created_by', 'modified_by', 'organization')
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return self.model.access_qs(self.user, 'view')
|
||||||
return self.model.objects.filter(
|
return self.model.objects.filter(
|
||||||
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
|
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
|
||||||
).distinct()
|
).distinct()
|
||||||
@@ -2632,10 +2671,7 @@ class NotificationTemplateAccess(BaseAccess):
|
|||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if obj.organization is None:
|
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||||
# only superusers are allowed to edit orphan notification templates
|
|
||||||
return False
|
|
||||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
|
||||||
|
|
||||||
def can_admin(self, obj, data):
|
def can_admin(self, obj, data):
|
||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
@@ -2645,9 +2681,7 @@ class NotificationTemplateAccess(BaseAccess):
|
|||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_start(self, obj, validate_license=True):
|
def can_start(self, obj, validate_license=True):
|
||||||
if obj.organization is None:
|
return self.can_change(obj, None)
|
||||||
return False
|
|
||||||
return self.user in obj.organization.notification_admin_role
|
|
||||||
|
|
||||||
|
|
||||||
class NotificationAccess(BaseAccess):
|
class NotificationAccess(BaseAccess):
|
||||||
@@ -2788,7 +2822,7 @@ class ActivityStreamAccess(BaseAccess):
|
|||||||
| Q(notification_template__organization__in=auditing_orgs)
|
| Q(notification_template__organization__in=auditing_orgs)
|
||||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||||
| Q(label__organization__in=auditing_orgs)
|
| Q(label__organization__in=auditing_orgs)
|
||||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
| Q(role__in=Role.visible_roles(self.user) if auditing_orgs else [])
|
||||||
)
|
)
|
||||||
|
|
||||||
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||||
@@ -2845,13 +2879,10 @@ class RoleAccess(BaseAccess):
|
|||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
result = Role.visible_roles(self.user)
|
result = Role.visible_roles(self.user)
|
||||||
# Sanity check: is the requesting user an orphaned non-admin/auditor?
|
# Make system admin/auditor mandatorily visible.
|
||||||
# if yes, make system admin/auditor mandatorily visible.
|
mandatories = ('system_administrator', 'system_auditor')
|
||||||
if not self.user.is_superuser and not self.user.is_system_auditor and not self.user.organizations.exists():
|
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
||||||
mandatories = ('system_administrator', 'system_auditor')
|
return result | super_qs
|
||||||
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
|
||||||
result = result | super_qs
|
|
||||||
return result
|
|
||||||
|
|
||||||
def can_add(self, obj, data):
|
def can_add(self, obj, data):
|
||||||
# Unsupported for now
|
# Unsupported for now
|
||||||
|
|||||||
@@ -66,10 +66,8 @@ class FixedSlidingWindow:
|
|||||||
|
|
||||||
|
|
||||||
class RelayWebsocketStatsManager:
|
class RelayWebsocketStatsManager:
|
||||||
def __init__(self, event_loop, local_hostname):
|
def __init__(self, local_hostname):
|
||||||
self._local_hostname = local_hostname
|
self._local_hostname = local_hostname
|
||||||
|
|
||||||
self._event_loop = event_loop
|
|
||||||
self._stats = dict()
|
self._stats = dict()
|
||||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||||
|
|
||||||
@@ -94,7 +92,10 @@ class RelayWebsocketStatsManager:
|
|||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.async_task = self._event_loop.create_task(self.run_loop())
|
self.async_task = asyncio.get_running_loop().create_task(
|
||||||
|
self.run_loop(),
|
||||||
|
name='RelayWebsocketStatsManager.run_loop',
|
||||||
|
)
|
||||||
return self.async_task
|
return self.async_task
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -1,7 +1,40 @@
|
|||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from awx.main.utils.named_url_graph import _customize_graph, generate_graph
|
||||||
|
from awx.conf import register, fields
|
||||||
|
|
||||||
|
|
||||||
class MainConfig(AppConfig):
|
class MainConfig(AppConfig):
|
||||||
name = 'awx.main'
|
name = 'awx.main'
|
||||||
verbose_name = _('Main')
|
verbose_name = _('Main')
|
||||||
|
|
||||||
|
def load_named_url_feature(self):
|
||||||
|
models = [m for m in self.get_models() if hasattr(m, 'get_absolute_url')]
|
||||||
|
generate_graph(models)
|
||||||
|
_customize_graph()
|
||||||
|
register(
|
||||||
|
'NAMED_URL_FORMATS',
|
||||||
|
field_class=fields.DictField,
|
||||||
|
read_only=True,
|
||||||
|
label=_('Formats of all available named urls'),
|
||||||
|
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
||||||
|
category=_('Named URL'),
|
||||||
|
category_slug='named-url',
|
||||||
|
)
|
||||||
|
register(
|
||||||
|
'NAMED_URL_GRAPH_NODES',
|
||||||
|
field_class=fields.DictField,
|
||||||
|
read_only=True,
|
||||||
|
label=_('List of all named url graph nodes.'),
|
||||||
|
help_text=_(
|
||||||
|
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
||||||
|
' Use this list to programmatically generate named URLs for resources'
|
||||||
|
),
|
||||||
|
category=_('Named URL'),
|
||||||
|
category_slug='named-url',
|
||||||
|
)
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
super().ready()
|
||||||
|
|
||||||
|
self.load_named_url_feature()
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
|
from django.core.checks import Error
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
@@ -928,6 +929,16 @@ register(
|
|||||||
category_slug='debug',
|
category_slug='debug',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'RECEPTOR_KEEP_WORK_ON_ERROR',
|
||||||
|
field_class=fields.BooleanField,
|
||||||
|
label=_('Keep receptor work on error'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Prevent receptor work from being released on when error is detected'),
|
||||||
|
category=('Debug'),
|
||||||
|
category_slug='debug',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def logging_validate(serializer, attrs):
|
def logging_validate(serializer, attrs):
|
||||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||||
@@ -954,3 +965,27 @@ def logging_validate(serializer, attrs):
|
|||||||
|
|
||||||
|
|
||||||
register_validate('logging', logging_validate)
|
register_validate('logging', logging_validate)
|
||||||
|
|
||||||
|
|
||||||
|
def csrf_trusted_origins_validate(serializer, attrs):
|
||||||
|
if not serializer.instance or not hasattr(serializer.instance, 'CSRF_TRUSTED_ORIGINS'):
|
||||||
|
return attrs
|
||||||
|
if 'CSRF_TRUSTED_ORIGINS' not in attrs:
|
||||||
|
return attrs
|
||||||
|
errors = []
|
||||||
|
for origin in attrs['CSRF_TRUSTED_ORIGINS']:
|
||||||
|
if "://" not in origin:
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS "
|
||||||
|
"setting must start with a scheme (usually http:// or "
|
||||||
|
"https://) but found %s. See the release notes for details." % origin,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if errors:
|
||||||
|
error_messages = [error.msg for error in errors]
|
||||||
|
raise serializers.ValidationError(_('\n'.join(error_messages)))
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
register_validate('system', csrf_trusted_origins_validate)
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ __all__ = [
|
|||||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||||
]
|
]
|
||||||
|
|
||||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||||
PRIVILEGE_ESCALATION_METHODS = [
|
PRIVILEGE_ESCALATION_METHODS = [
|
||||||
('sudo', _('Sudo')),
|
('sudo', _('Sudo')),
|
||||||
('su', _('Su')),
|
('su', _('Su')),
|
||||||
@@ -43,6 +43,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
|||||||
}
|
}
|
||||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||||
ACTIVE_STATES = CAN_CANCEL
|
ACTIVE_STATES = CAN_CANCEL
|
||||||
|
ERROR_STATES = ('error',)
|
||||||
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
||||||
CENSOR_VALUE = '************'
|
CENSOR_VALUE = '************'
|
||||||
ENV_BLOCKLIST = frozenset(
|
ENV_BLOCKLIST = frozenset(
|
||||||
@@ -114,3 +115,28 @@ SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS = 'unique_managed_hosts'
|
|||||||
|
|
||||||
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
|
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
|
||||||
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
|
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
|
||||||
|
|
||||||
|
# Data for RBAC compatibility layer
|
||||||
|
role_name_to_perm_mapping = {
|
||||||
|
'adhoc_role': ['adhoc_'],
|
||||||
|
'approval_role': ['approve_'],
|
||||||
|
'auditor_role': ['audit_'],
|
||||||
|
'admin_role': ['change_', 'add_', 'delete_'],
|
||||||
|
'execute_role': ['execute_'],
|
||||||
|
'read_role': ['view_'],
|
||||||
|
'update_role': ['update_'],
|
||||||
|
'member_role': ['member_'],
|
||||||
|
'use_role': ['use_'],
|
||||||
|
}
|
||||||
|
|
||||||
|
org_role_to_permission = {
|
||||||
|
'notification_admin_role': 'add_notificationtemplate',
|
||||||
|
'project_admin_role': 'add_project',
|
||||||
|
'execute_role': 'execute_jobtemplate',
|
||||||
|
'inventory_admin_role': 'add_inventory',
|
||||||
|
'credential_admin_role': 'add_credential',
|
||||||
|
'workflow_admin_role': 'add_workflowjobtemplate',
|
||||||
|
'job_template_admin_role': 'change_jobtemplate', # TODO: this doesnt really work, solution not clear
|
||||||
|
'execution_environment_admin_role': 'add_executionenvironment',
|
||||||
|
'auditor_role': 'view_project', # TODO: also doesnt really work
|
||||||
|
}
|
||||||
|
|||||||
@@ -102,7 +102,8 @@ def create_listener_connection():
|
|||||||
|
|
||||||
# Apply overrides specifically for the listener connection
|
# Apply overrides specifically for the listener connection
|
||||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||||
conf[k] = v
|
if k != 'OPTIONS':
|
||||||
|
conf[k] = v
|
||||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||||
conf['OPTIONS'][k] = v
|
conf['OPTIONS'][k] = v
|
||||||
|
|
||||||
|
|||||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
|||||||
kwargs.setdefault('related_name', '+')
|
kwargs.setdefault('related_name', '+')
|
||||||
kwargs.setdefault('null', 'True')
|
kwargs.setdefault('null', 'True')
|
||||||
kwargs.setdefault('editable', False)
|
kwargs.setdefault('editable', False)
|
||||||
kwargs.setdefault('on_delete', models.CASCADE)
|
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
|
|||||||
12
awx/main/management/commands/check_instance_ready.py
Normal file
12
awx/main/management/commands/check_instance_ready.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from awx.main.models.ha import Instance
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
if Instance.objects.me().node_state != Instance.States.READY:
|
||||||
|
raise CommandError('Instance is not ready') # so that return code is not 0
|
||||||
|
|
||||||
|
return
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
# All Rights Reserved
|
# All Rights Reserved
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import transaction
|
||||||
from crum import impersonate
|
from crum import impersonate
|
||||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||||
from awx.main.signals import disable_computed_fields
|
from awx.main.signals import disable_computed_fields
|
||||||
@@ -13,6 +14,12 @@ class Command(BaseCommand):
|
|||||||
help = 'Creates a preload tower data if there is none.'
|
help = 'Creates a preload tower data if there is none.'
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
|
# Wrap the operation in an atomic block, so we do not on accident
|
||||||
|
# create the organization but not create the project, etc.
|
||||||
|
with transaction.atomic():
|
||||||
|
self._handle()
|
||||||
|
|
||||||
|
def _handle(self):
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
# Create a default organization as the first superuser found.
|
# Create a default organization as the first superuser found.
|
||||||
@@ -43,10 +50,11 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||||
c, _ = Credential.objects.get_or_create(
|
c, _ = Credential.objects.get_or_create(
|
||||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
credential_type=ssh_type, name='Demo Credential', inputs={'username': getattr(superuser, 'username', 'null')}, created_by=superuser
|
||||||
)
|
)
|
||||||
|
|
||||||
c.admin_role.members.add(superuser)
|
if superuser:
|
||||||
|
c.admin_role.members.add(superuser)
|
||||||
|
|
||||||
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
||||||
name='Ansible Galaxy',
|
name='Ansible Galaxy',
|
||||||
|
|||||||
@@ -2,10 +2,11 @@ import json
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
|
|
||||||
|
|
||||||
@@ -40,6 +41,15 @@ class Command(BaseCommand):
|
|||||||
"USER_SEARCH": False,
|
"USER_SEARCH": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def is_enabled(self, settings, keys):
|
||||||
|
missing_fields = []
|
||||||
|
for key, required in keys.items():
|
||||||
|
if required and not settings.get(key):
|
||||||
|
missing_fields.append(key)
|
||||||
|
if missing_fields:
|
||||||
|
return False, missing_fields
|
||||||
|
return True, None
|
||||||
|
|
||||||
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||||
awx_ldap_settings = {}
|
awx_ldap_settings = {}
|
||||||
|
|
||||||
@@ -64,15 +74,17 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
if new_key == "SERVER_URI" and value:
|
if new_key == "SERVER_URI" and value:
|
||||||
value = value.split(", ")
|
value = value.split(", ")
|
||||||
|
grouped_settings[index][new_key] = value
|
||||||
|
|
||||||
|
if type(value).__name__ == "LDAPSearch":
|
||||||
|
data = []
|
||||||
|
data.append(value.base_dn)
|
||||||
|
data.append("SCOPE_SUBTREE")
|
||||||
|
data.append(value.filterstr)
|
||||||
|
grouped_settings[index][new_key] = data
|
||||||
|
|
||||||
return grouped_settings
|
return grouped_settings
|
||||||
|
|
||||||
def is_enabled(self, settings, keys):
|
|
||||||
for key, required in keys.items():
|
|
||||||
if required and not settings.get(key):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_awx_saml_settings(self) -> dict[str, Any]:
|
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||||
awx_saml_settings = {}
|
awx_saml_settings = {}
|
||||||
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||||
@@ -82,7 +94,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||||
config = {
|
config = {
|
||||||
"type": f"awx.authentication.authenticator_plugins.{type}",
|
"type": f"ansible_base.authentication.authenticator_plugins.{type}",
|
||||||
"name": name,
|
"name": name,
|
||||||
"enabled": enabled,
|
"enabled": enabled,
|
||||||
"create_objects": True,
|
"create_objects": True,
|
||||||
@@ -130,7 +142,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
# dump SAML settings
|
# dump SAML settings
|
||||||
awx_saml_settings = self.get_awx_saml_settings()
|
awx_saml_settings = self.get_awx_saml_settings()
|
||||||
awx_saml_enabled = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
awx_saml_enabled, saml_missing_fields = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||||
if awx_saml_enabled:
|
if awx_saml_enabled:
|
||||||
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||||
data.append(
|
data.append(
|
||||||
@@ -142,21 +154,25 @@ class Command(BaseCommand):
|
|||||||
awx_saml_name,
|
awx_saml_name,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
data.append({"SAML_missing_fields": saml_missing_fields})
|
||||||
|
|
||||||
# dump LDAP settings
|
# dump LDAP settings
|
||||||
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||||
for awx_ldap_name, awx_ldap_settings in enumerate(awx_ldap_group_settings.values()):
|
for awx_ldap_name, awx_ldap_settings in awx_ldap_group_settings.items():
|
||||||
enabled = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
awx_ldap_enabled, ldap_missing_fields = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||||
if enabled:
|
if awx_ldap_enabled:
|
||||||
data.append(
|
data.append(
|
||||||
self.format_config_data(
|
self.format_config_data(
|
||||||
enabled,
|
awx_ldap_enabled,
|
||||||
awx_ldap_settings,
|
awx_ldap_settings,
|
||||||
"ldap",
|
"ldap",
|
||||||
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||||
str(awx_ldap_name),
|
f"LDAP_{awx_ldap_name}",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
data.append({f"LDAP_{awx_ldap_name}_missing_fields": ldap_missing_fields})
|
||||||
|
|
||||||
# write to file if requested
|
# write to file if requested
|
||||||
if options["output_file"]:
|
if options["output_file"]:
|
||||||
|
|||||||
151
awx/main/management/commands/job_performance_rollup.py
Normal file
151
awx/main/management/commands/job_performance_rollup.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import connection
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""
|
||||||
|
Emits some simple statistics suitable for external monitoring
|
||||||
|
"""
|
||||||
|
|
||||||
|
help = 'Run queries that provide an overview of the performance of the system over a given period of time'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--since', action='store', dest='days', type=str, default="1", help='Max days to look back to for data')
|
||||||
|
parser.add_argument('--limit', action='store', dest='limit', type=str, default="10", help='Max number of records for database queries (LIMIT)')
|
||||||
|
|
||||||
|
def execute_query(self, query):
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute(query)
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def jsonify(self, title, keys, values, query):
|
||||||
|
result = []
|
||||||
|
query = re.sub('\n', ' ', query)
|
||||||
|
query = re.sub('\s{2,}', ' ', query)
|
||||||
|
for value in values:
|
||||||
|
result.append(dict(zip(keys, value)))
|
||||||
|
return {title: result, 'count': len(values), 'query': query}
|
||||||
|
|
||||||
|
def jobs_pending_duration(self, days, limit):
|
||||||
|
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||||
|
query = f"""
|
||||||
|
SELECT name, id AS job_id, unified_job_template_id, created, started - created AS pending_duration
|
||||||
|
FROM main_unifiedjob
|
||||||
|
WHERE finished IS NOT null
|
||||||
|
AND started IS NOT null
|
||||||
|
AND cancel_flag IS NOT true
|
||||||
|
AND created > NOW() - INTERVAL '{days} days'
|
||||||
|
AND started - created > INTERVAL '0 seconds'
|
||||||
|
ORDER BY pending_duration DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title='completed_or_started_jobs_by_pending_duration',
|
||||||
|
keys=('job_name', 'job_id', 'unified_job_template_id', 'job_created', 'pending_duration'),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def times_of_day_pending_more_than_X_min(self, days, limit, minutes_pending):
|
||||||
|
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||||
|
query = f"""
|
||||||
|
SELECT
|
||||||
|
date_trunc('hour', created) as day_and_hour,
|
||||||
|
COUNT(created) as count_jobs_pending_greater_than_{minutes_pending}_min
|
||||||
|
FROM main_unifiedjob
|
||||||
|
WHERE started IS NOT NULL
|
||||||
|
AND started - created > INTERVAL '{minutes_pending} minutes'
|
||||||
|
AND created > NOW() - INTERVAL '{days} days'
|
||||||
|
GROUP BY date_trunc('hour', created)
|
||||||
|
ORDER BY count_jobs_pending_greater_than_{minutes_pending}_min DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title=f'times_of_day_pending_more_than_{minutes_pending}',
|
||||||
|
keys=('day_and_hour', f'count_jobs_pending_more_than_{minutes_pending}_min'),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def pending_jobs_details(self, days, limit):
|
||||||
|
"""Return list of jobs that are in pending and list details such as reasons they may be blocked, within configured number of days and limit."""
|
||||||
|
query = f"""
|
||||||
|
SELECT DISTINCT ON(A.id) A.name, A.id, A.unified_job_template_id, A.created, NOW() - A.created as pending_duration, F.allow_simultaneous, B.current_job_id as current_ujt_job, I.to_unifiedjob_id as dependency_job_id, A.dependencies_processed
|
||||||
|
FROM main_unifiedjob A
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT C.id, C.current_job_id FROM main_unifiedjobtemplate as C
|
||||||
|
) B
|
||||||
|
ON A.unified_job_template_id = B.id
|
||||||
|
LEFT JOIN main_job F ON A.id = F.unifiedjob_ptr_id
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT * FROM main_unifiedjob_dependent_jobs as G
|
||||||
|
RIGHT JOIN main_unifiedjob H ON G.to_unifiedjob_id = H.id
|
||||||
|
) I
|
||||||
|
ON A.id = I.from_unifiedjob_id
|
||||||
|
WHERE A.status = 'pending'
|
||||||
|
AND A.created > NOW() - INTERVAL '{days} days'
|
||||||
|
ORDER BY id DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title='pending_jobs_details',
|
||||||
|
keys=(
|
||||||
|
'job_name',
|
||||||
|
'job_id',
|
||||||
|
'unified_job_template_id',
|
||||||
|
'job_created',
|
||||||
|
'pending_duration',
|
||||||
|
'allow_simultaneous',
|
||||||
|
'current_ujt_job',
|
||||||
|
'dependency_job_id',
|
||||||
|
'dependencies_processed',
|
||||||
|
),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def jobs_by_FUNC_event_processing_time(self, func, days, limit):
|
||||||
|
"""Return list of jobs sorted by MAX job event procesing time within configured number of days (within limit)"""
|
||||||
|
if func not in ('MAX', 'MIN', 'AVG', 'SUM'):
|
||||||
|
raise RuntimeError('Only able to asses job events grouped by job with MAX, MIN, AVG, SUM functions')
|
||||||
|
|
||||||
|
query = f"""SELECT job_id, {func}(A.modified - A.created) as job_event_processing_delay_{func}, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||||
|
FROM main_jobevent A
|
||||||
|
RIGHT JOIN (
|
||||||
|
SELECT id, created, name, finished, controller_node, execution_node FROM
|
||||||
|
main_unifiedjob
|
||||||
|
WHERE created > NOW() - INTERVAL '{days} days'
|
||||||
|
AND created IS NOT null
|
||||||
|
AND finished IS NOT null
|
||||||
|
AND id IS NOT null
|
||||||
|
AND name IS NOT null
|
||||||
|
) B
|
||||||
|
ON A.job_id=B.id
|
||||||
|
WHERE A.job_id is not null
|
||||||
|
GROUP BY job_id, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||||
|
ORDER BY job_event_processing_delay_{func} DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title=f'jobs_by_{func}_event_processing',
|
||||||
|
keys=('job_id', f'{func}_job_event_processing_delay', 'job_name', 'job_created_time', 'job_finished_time', 'controller_node', 'execution_node'),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
items = []
|
||||||
|
for func in ('MAX', 'MIN', 'AVG'):
|
||||||
|
items.append(self.jobs_by_FUNC_event_processing_time(func, options['days'], options['limit']))
|
||||||
|
items.append(self.jobs_pending_duration(options['days'], options['limit']))
|
||||||
|
items.append(self.pending_jobs_details(options['days'], options['limit']))
|
||||||
|
items.append(self.times_of_day_pending_more_than_X_min(options['days'], options['limit'], minutes_pending=10))
|
||||||
|
self.stdout.write(json.dumps(items, indent=4, sort_keys=True, default=str))
|
||||||
@@ -101,8 +101,9 @@ class Command(BaseCommand):
|
|||||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
time.sleep(10)
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
|
||||||
return
|
return
|
||||||
|
|
||||||
# In containerized deployments, migrations happen in the task container,
|
# In containerized deployments, migrations happen in the task container,
|
||||||
@@ -121,13 +122,14 @@ class Command(BaseCommand):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
my_hostname = Instance.objects.my_hostname()
|
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
|
||||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||||
# ensure network connectivity between the task and web instance
|
# ensure network connectivity between the task and web instance
|
||||||
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
time.sleep(5)
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
|
||||||
return
|
return
|
||||||
|
|
||||||
if options.get('status'):
|
if options.get('status'):
|
||||||
@@ -165,14 +167,15 @@ class Command(BaseCommand):
|
|||||||
return
|
return
|
||||||
|
|
||||||
WebsocketsMetricsServer().start()
|
WebsocketsMetricsServer().start()
|
||||||
websocket_relay_manager = WebSocketRelayManager()
|
|
||||||
|
|
||||||
while True:
|
try:
|
||||||
try:
|
logger.info('Starting Websocket Relayer...')
|
||||||
asyncio.run(websocket_relay_manager.run())
|
websocket_relay_manager = WebSocketRelayManager()
|
||||||
except KeyboardInterrupt:
|
asyncio.run(websocket_relay_manager.run())
|
||||||
logger.info('Shutting down Websocket Relayer')
|
except KeyboardInterrupt:
|
||||||
break
|
logger.info('Terminating Websocket Relayer')
|
||||||
except Exception as e:
|
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
|
||||||
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
time.sleep(10)
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
|
||||||
|
return
|
||||||
|
|||||||
@@ -1,28 +1,25 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from pathlib import Path
|
from pathlib import Path, PurePosixPath
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import logout
|
from django.contrib.auth import logout
|
||||||
from django.contrib.auth.models import User
|
|
||||||
from django.db.migrations.recorder import MigrationRecorder
|
from django.db.migrations.recorder import MigrationRecorder
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.shortcuts import redirect
|
from django.shortcuts import redirect
|
||||||
from django.apps import apps
|
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
from django.utils.deprecation import MiddlewareMixin
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.urls import reverse, resolve
|
from django.urls import reverse, resolve
|
||||||
|
|
||||||
from awx.main import migrations
|
from awx.main import migrations
|
||||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
|
||||||
from awx.conf import fields, register
|
|
||||||
from awx.main.utils.profiling import AWXProfiler
|
from awx.main.utils.profiling import AWXProfiler
|
||||||
from awx.main.utils.common import memoize
|
from awx.main.utils.common import memoize
|
||||||
|
from awx.urls import get_urlpatterns
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.middleware')
|
logger = logging.getLogger('awx.main.middleware')
|
||||||
@@ -100,49 +97,7 @@ class DisableLocalAuthMiddleware(MiddlewareMixin):
|
|||||||
logout(request)
|
logout(request)
|
||||||
|
|
||||||
|
|
||||||
def _customize_graph():
|
|
||||||
from awx.main.models import Instance, Schedule, UnifiedJobTemplate
|
|
||||||
|
|
||||||
for model in [Schedule, UnifiedJobTemplate]:
|
|
||||||
if model in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[model].remove_bindings()
|
|
||||||
settings.NAMED_URL_GRAPH.pop(model)
|
|
||||||
if User not in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[User] = GraphNode(User, ['username'], [])
|
|
||||||
settings.NAMED_URL_GRAPH[User].add_bindings()
|
|
||||||
if Instance not in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[Instance] = GraphNode(Instance, ['hostname'], [])
|
|
||||||
settings.NAMED_URL_GRAPH[Instance].add_bindings()
|
|
||||||
|
|
||||||
|
|
||||||
class URLModificationMiddleware(MiddlewareMixin):
|
class URLModificationMiddleware(MiddlewareMixin):
|
||||||
def __init__(self, get_response):
|
|
||||||
models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')]
|
|
||||||
generate_graph(models)
|
|
||||||
_customize_graph()
|
|
||||||
register(
|
|
||||||
'NAMED_URL_FORMATS',
|
|
||||||
field_class=fields.DictField,
|
|
||||||
read_only=True,
|
|
||||||
label=_('Formats of all available named urls'),
|
|
||||||
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
|
||||||
category=_('Named URL'),
|
|
||||||
category_slug='named-url',
|
|
||||||
)
|
|
||||||
register(
|
|
||||||
'NAMED_URL_GRAPH_NODES',
|
|
||||||
field_class=fields.DictField,
|
|
||||||
read_only=True,
|
|
||||||
label=_('List of all named url graph nodes.'),
|
|
||||||
help_text=_(
|
|
||||||
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
|
||||||
' Use this list to programmatically generate named URLs for resources'
|
|
||||||
),
|
|
||||||
category=_('Named URL'),
|
|
||||||
category_slug='named-url',
|
|
||||||
)
|
|
||||||
super().__init__(get_response)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _hijack_for_old_jt_name(node, kwargs, named_url):
|
def _hijack_for_old_jt_name(node, kwargs, named_url):
|
||||||
try:
|
try:
|
||||||
@@ -183,14 +138,36 @@ class URLModificationMiddleware(MiddlewareMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _convert_named_url(cls, url_path):
|
def _convert_named_url(cls, url_path):
|
||||||
url_units = url_path.split('/')
|
default_prefix = PurePosixPath('/api/v2/')
|
||||||
# If the identifier is an empty string, it is always invalid.
|
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
|
||||||
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
|
|
||||||
return url_path
|
url_path_original = url_path
|
||||||
resource = url_units[3]
|
url_path = PurePosixPath(url_path)
|
||||||
|
|
||||||
|
if set(optional_prefix.parts).issubset(set(url_path.parts)):
|
||||||
|
url_prefix = optional_prefix
|
||||||
|
elif set(default_prefix.parts).issubset(set(url_path.parts)):
|
||||||
|
url_prefix = default_prefix
|
||||||
|
else:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
# Remove prefix
|
||||||
|
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
|
||||||
|
try:
|
||||||
|
resource_path = PurePosixPath(url_path.parts[0])
|
||||||
|
name = url_path.parts[1]
|
||||||
|
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
|
||||||
|
except IndexError:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
resource = resource_path.parts[0]
|
||||||
if resource in settings.NAMED_URL_MAPPINGS:
|
if resource in settings.NAMED_URL_MAPPINGS:
|
||||||
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
|
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
|
||||||
return '/'.join(url_units)
|
else:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
|
||||||
|
return PurePosixPath(*parts).as_posix() + '/'
|
||||||
|
|
||||||
def process_request(self, request):
|
def process_request(self, request):
|
||||||
old_path = request.path_info
|
old_path = request.path_info
|
||||||
@@ -220,3 +197,27 @@ class MigrationRanCheckMiddleware(MiddlewareMixin):
|
|||||||
def process_request(self, request):
|
def process_request(self, request):
|
||||||
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||||
return redirect(reverse("ui:migrations_notran"))
|
return redirect(reverse("ui:migrations_notran"))
|
||||||
|
|
||||||
|
|
||||||
|
class OptionalURLPrefixPath(MiddlewareMixin):
|
||||||
|
@functools.lru_cache
|
||||||
|
def _url_optional(self, prefix):
|
||||||
|
# Relavant Django code path https://github.com/django/django/blob/stable/4.2.x/django/core/handlers/base.py#L300
|
||||||
|
#
|
||||||
|
# resolve_request(request)
|
||||||
|
# get_resolver(request.urlconf)
|
||||||
|
# _get_cached_resolver(request.urlconf) <-- cached via @functools.cache
|
||||||
|
#
|
||||||
|
# Django will attempt to cache the value(s) of request.urlconf
|
||||||
|
# Being hashable is a prerequisit for being cachable.
|
||||||
|
# tuple() is hashable list() is not.
|
||||||
|
# Hence the tuple(list()) wrap.
|
||||||
|
return tuple(get_urlpatterns(prefix=prefix))
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
prefix = settings.OPTIONAL_API_URLPATTERN_PREFIX
|
||||||
|
|
||||||
|
if request.path.startswith(f"/api/{prefix}"):
|
||||||
|
request.urlconf = self._url_optional(prefix)
|
||||||
|
else:
|
||||||
|
request.urlconf = 'awx.urls'
|
||||||
|
|||||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='job_template_admin_role',
|
name='job_template_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='credential_admin_role',
|
name='credential_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='inventory_admin_role',
|
name='inventory_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='project_admin_role',
|
name='project_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='workflow_admin_role',
|
name='workflow_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='notification_admin_role',
|
name='notification_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='inventory',
|
model_name='inventory',
|
||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
|||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'organization.execute_role'],
|
parent_role=['admin_role', 'organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
|||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'admin_role',
|
'admin_role',
|
||||||
'execute_role',
|
'execute_role',
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='member_role',
|
name='member_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='approval_role',
|
name='approval_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.approval_role', 'admin_role'],
|
parent_role=['organization.approval_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.job_template_admin_role'],
|
parent_role=['organization.job_template_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'organization.execute_role'],
|
parent_role=['admin_role', 'organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='execution_environment_admin_role',
|
name='execution_environment_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator'],
|
parent_role=['singleton:system_administrator'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.role',
|
to='main.role',
|
||||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.role',
|
to='main.role',
|
||||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='instancegroup',
|
model_name='instancegroup',
|
||||||
name='use_role',
|
name='use_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0189_inbound_hop_nodes'),
|
('main', '0189_inbound_hop_nodes'),
|
||||||
]
|
]
|
||||||
|
|||||||
85
awx/main/migrations/0191_add_django_permissions.py
Normal file
85
awx/main/migrations/0191_add_django_permissions.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2023-11-13 20:10
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('main', '0190_alter_inventorysource_source_and_more'),
|
||||||
|
('dab_rbac', '__first__'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
# Add custom permissions for all special actions, like update, use, adhoc, and so on
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='credential',
|
||||||
|
options={'ordering': ('name',), 'permissions': [('use_credential', 'Can use credential in a job or related resource')]},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='instancegroup',
|
||||||
|
options={'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')]},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='inventory',
|
||||||
|
options={
|
||||||
|
'ordering': ('name',),
|
||||||
|
'permissions': [
|
||||||
|
('use_inventory', 'Can use inventory in a job template'),
|
||||||
|
('adhoc_inventory', 'Can run ad hoc commands'),
|
||||||
|
('update_inventory', 'Can update inventory sources in inventory'),
|
||||||
|
],
|
||||||
|
'verbose_name_plural': 'inventories',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='jobtemplate',
|
||||||
|
options={'ordering': ('name',), 'permissions': [('execute_jobtemplate', 'Can run this job template')]},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='project',
|
||||||
|
options={
|
||||||
|
'ordering': ('id',),
|
||||||
|
'permissions': [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='workflowjobtemplate',
|
||||||
|
options={
|
||||||
|
'permissions': [
|
||||||
|
('execute_workflowjobtemplate', 'Can run this workflow job template'),
|
||||||
|
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
|
||||||
|
]
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='organization',
|
||||||
|
options={
|
||||||
|
'default_permissions': ('change', 'delete', 'view'),
|
||||||
|
'ordering': ('name',),
|
||||||
|
'permissions': [
|
||||||
|
('member_organization', 'Basic participation permissions for organization'),
|
||||||
|
('audit_organization', 'Audit everything inside the organization'),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='team',
|
||||||
|
options={'ordering': ('organization__name', 'name'), 'permissions': [('member_team', 'Inherit all roles assigned to this team')]},
|
||||||
|
),
|
||||||
|
# Remove add default permission for a few models
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='jobtemplate',
|
||||||
|
options={
|
||||||
|
'default_permissions': ('change', 'delete', 'view'),
|
||||||
|
'ordering': ('name',),
|
||||||
|
'permissions': [('execute_jobtemplate', 'Can run this job template')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='instancegroup',
|
||||||
|
options={
|
||||||
|
'default_permissions': ('change', 'delete', 'view'),
|
||||||
|
'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
20
awx/main/migrations/0192_custom_roles.py
Normal file
20
awx/main/migrations/0192_custom_roles.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2023-11-21 02:06
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
from awx.main.migrations._dab_rbac import migrate_to_new_rbac, create_permissions_as_operation, setup_managed_role_definitions
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('main', '0191_add_django_permissions'),
|
||||||
|
('dab_rbac', '__first__'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
# make sure permissions and content types have been created by now
|
||||||
|
# these normally run in a post_migrate signal but we need them for our logic
|
||||||
|
migrations.RunPython(create_permissions_as_operation, migrations.RunPython.noop),
|
||||||
|
migrations.RunPython(setup_managed_role_definitions, migrations.RunPython.noop),
|
||||||
|
migrations.RunPython(migrate_to_new_rbac, migrations.RunPython.noop),
|
||||||
|
]
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2024-05-08 07:29
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0192_custom_roles'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notification',
|
||||||
|
name='notification_type',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('awssns', 'AWS SNS'),
|
||||||
|
('email', 'Email'),
|
||||||
|
('grafana', 'Grafana'),
|
||||||
|
('irc', 'IRC'),
|
||||||
|
('mattermost', 'Mattermost'),
|
||||||
|
('pagerduty', 'Pagerduty'),
|
||||||
|
('rocketchat', 'Rocket.Chat'),
|
||||||
|
('slack', 'Slack'),
|
||||||
|
('twilio', 'Twilio'),
|
||||||
|
('webhook', 'Webhook'),
|
||||||
|
],
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationtemplate',
|
||||||
|
name='notification_type',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('awssns', 'AWS SNS'),
|
||||||
|
('email', 'Email'),
|
||||||
|
('grafana', 'Grafana'),
|
||||||
|
('irc', 'IRC'),
|
||||||
|
('mattermost', 'Mattermost'),
|
||||||
|
('pagerduty', 'Pagerduty'),
|
||||||
|
('rocketchat', 'Rocket.Chat'),
|
||||||
|
('slack', 'Slack'),
|
||||||
|
('twilio', 'Twilio'),
|
||||||
|
('webhook', 'Webhook'),
|
||||||
|
],
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0193_alter_notification_notification_type_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventorysource',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
('terraform', 'Terraform State'),
|
||||||
|
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='inventoryupdate',
|
||||||
|
name='source',
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
('file', 'File, Directory or Script'),
|
||||||
|
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||||
|
('scm', 'Sourced from a Project'),
|
||||||
|
('ec2', 'Amazon EC2'),
|
||||||
|
('gce', 'Google Compute Engine'),
|
||||||
|
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||||
|
('vmware', 'VMware vCenter'),
|
||||||
|
('satellite6', 'Red Hat Satellite 6'),
|
||||||
|
('openstack', 'OpenStack'),
|
||||||
|
('rhv', 'Red Hat Virtualization'),
|
||||||
|
('controller', 'Red Hat Ansible Automation Platform'),
|
||||||
|
('insights', 'Red Hat Insights'),
|
||||||
|
('terraform', 'Terraform State'),
|
||||||
|
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=32,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
26
awx/main/migrations/0195_EE_permissions.py
Normal file
26
awx/main/migrations/0195_EE_permissions.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 4.2.6 on 2024-06-20 15:55
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def delete_execution_environment_read_role(apps, schema_editor):
|
||||||
|
permission_classes = [apps.get_model('auth', 'Permission'), apps.get_model('dab_rbac', 'DABPermission')]
|
||||||
|
for permission_cls in permission_classes:
|
||||||
|
ee_read_perm = permission_cls.objects.filter(codename='view_executionenvironment').first()
|
||||||
|
if ee_read_perm:
|
||||||
|
ee_read_perm.delete()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0194_alter_inventorysource_source_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='executionenvironment',
|
||||||
|
options={'default_permissions': ('add', 'change', 'delete'), 'ordering': ('-created',)},
|
||||||
|
),
|
||||||
|
migrations.RunPython(delete_execution_environment_read_role, migrations.RunPython.noop),
|
||||||
|
]
|
||||||
402
awx/main/migrations/_dab_rbac.py
Normal file
402
awx/main/migrations/_dab_rbac.py
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from django.apps import apps as global_apps
|
||||||
|
from django.db.models import ForeignKey
|
||||||
|
from django.conf import settings
|
||||||
|
from ansible_base.rbac.migrations._utils import give_permissions
|
||||||
|
from ansible_base.rbac.management import create_dab_permissions
|
||||||
|
|
||||||
|
from awx.main.fields import ImplicitRoleField
|
||||||
|
from awx.main.constants import role_name_to_perm_mapping
|
||||||
|
|
||||||
|
from ansible_base.rbac.permission_registry import permission_registry
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger('awx.main.migrations._dab_rbac')
|
||||||
|
|
||||||
|
|
||||||
|
def create_permissions_as_operation(apps, schema_editor):
|
||||||
|
create_dab_permissions(global_apps.get_app_config("main"), apps=apps)
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Data structures and methods for the migration of old Role model to ObjectRole
|
||||||
|
"""
|
||||||
|
|
||||||
|
system_admin = ImplicitRoleField(name='system_administrator')
|
||||||
|
system_auditor = ImplicitRoleField(name='system_auditor')
|
||||||
|
system_admin.model = None
|
||||||
|
system_auditor.model = None
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_parent_role(f, role_path):
|
||||||
|
"""
|
||||||
|
Given a field and a path declared in parent_role from the field definition, like
|
||||||
|
execute_role = ImplicitRoleField(parent_role='admin_role')
|
||||||
|
This expects to be passed in (execute_role object, "admin_role")
|
||||||
|
It hould return the admin_role from that object
|
||||||
|
"""
|
||||||
|
if role_path == 'singleton:system_administrator':
|
||||||
|
return system_admin
|
||||||
|
elif role_path == 'singleton:system_auditor':
|
||||||
|
return system_auditor
|
||||||
|
else:
|
||||||
|
related_field = f
|
||||||
|
current_model = f.model
|
||||||
|
for related_field_name in role_path.split('.'):
|
||||||
|
related_field = current_model._meta.get_field(related_field_name)
|
||||||
|
if isinstance(related_field, ForeignKey) and not isinstance(related_field, ImplicitRoleField):
|
||||||
|
current_model = related_field.related_model
|
||||||
|
return related_field
|
||||||
|
|
||||||
|
|
||||||
|
def build_role_map(apps):
|
||||||
|
"""
|
||||||
|
For the old Role model, this builds and returns dictionaries (children, parents)
|
||||||
|
which give a global mapping of the ImplicitRoleField instances according to the graph
|
||||||
|
"""
|
||||||
|
models = set(apps.get_app_config('main').get_models())
|
||||||
|
|
||||||
|
all_fields = set()
|
||||||
|
parents = {}
|
||||||
|
children = {}
|
||||||
|
|
||||||
|
all_fields.add(system_admin)
|
||||||
|
all_fields.add(system_auditor)
|
||||||
|
|
||||||
|
for cls in models:
|
||||||
|
for f in cls._meta.get_fields():
|
||||||
|
if isinstance(f, ImplicitRoleField):
|
||||||
|
all_fields.add(f)
|
||||||
|
|
||||||
|
for f in all_fields:
|
||||||
|
if f.parent_role is not None:
|
||||||
|
if isinstance(f.parent_role, str):
|
||||||
|
parent_roles = [f.parent_role]
|
||||||
|
else:
|
||||||
|
parent_roles = f.parent_role
|
||||||
|
|
||||||
|
# SPECIAL CASE: organization auditor_role is not a child of admin_role
|
||||||
|
# this makes no practical sense and conflicts with expected managed role
|
||||||
|
# so we put it in as a hack here
|
||||||
|
if f.name == 'auditor_role' and f.model._meta.model_name == 'organization':
|
||||||
|
parent_roles.append('admin_role')
|
||||||
|
|
||||||
|
parent_list = []
|
||||||
|
for rel_name in parent_roles:
|
||||||
|
parent_list.append(resolve_parent_role(f, rel_name))
|
||||||
|
|
||||||
|
parents[f] = parent_list
|
||||||
|
|
||||||
|
# build children lookup from parents lookup
|
||||||
|
for child_field, parent_list in parents.items():
|
||||||
|
for parent_field in parent_list:
|
||||||
|
children.setdefault(parent_field, [])
|
||||||
|
children[parent_field].append(child_field)
|
||||||
|
|
||||||
|
return (parents, children)
|
||||||
|
|
||||||
|
|
||||||
|
def get_descendents(f, children_map):
|
||||||
|
"""
|
||||||
|
Given ImplicitRoleField F and the children mapping, returns all descendents
|
||||||
|
of that field, as a set of other fields, including itself
|
||||||
|
"""
|
||||||
|
ret = {f}
|
||||||
|
if f in children_map:
|
||||||
|
for child_field in children_map[f]:
|
||||||
|
ret.update(get_descendents(child_field, children_map))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def get_permissions_for_role(role_field, children_map, apps):
|
||||||
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||||
|
|
||||||
|
perm_list = []
|
||||||
|
for child_field in get_descendents(role_field, children_map):
|
||||||
|
if child_field.name in role_name_to_perm_mapping:
|
||||||
|
for perm_name in role_name_to_perm_mapping[child_field.name]:
|
||||||
|
if perm_name == 'add_' and role_field.model._meta.model_name != 'organization':
|
||||||
|
continue # only organizations can contain add permissions
|
||||||
|
perm = Permission.objects.filter(content_type=ContentType.objects.get_for_model(child_field.model), codename__startswith=perm_name).first()
|
||||||
|
if perm is not None and perm not in perm_list:
|
||||||
|
perm_list.append(perm)
|
||||||
|
|
||||||
|
# special case for two models that have object roles but no organization roles in old system
|
||||||
|
if role_field.name == 'notification_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
|
||||||
|
ct = ContentType.objects.get_for_model(apps.get_model('main', 'NotificationTemplate'))
|
||||||
|
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
|
||||||
|
if role_field.name == 'execution_environment_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
|
||||||
|
ct = ContentType.objects.get_for_model(apps.get_model('main', 'ExecutionEnvironment'))
|
||||||
|
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
|
||||||
|
|
||||||
|
# more special cases for those same above special org-level roles
|
||||||
|
if role_field.name == 'auditor_role':
|
||||||
|
perm_list.append(Permission.objects.get(codename='view_notificationtemplate'))
|
||||||
|
|
||||||
|
return perm_list
|
||||||
|
|
||||||
|
|
||||||
|
def model_class(ct, apps):
|
||||||
|
"""
|
||||||
|
You can not use model methods in migrations, so this duplicates
|
||||||
|
what ContentType.model_class does, using current apps
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return apps.get_model(ct.app_label, ct.model)
|
||||||
|
except LookupError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_to_new_rbac(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
This method moves the assigned permissions from the old rbac.py models
|
||||||
|
to the new RoleDefinition and ObjectRole models
|
||||||
|
"""
|
||||||
|
Role = apps.get_model('main', 'Role')
|
||||||
|
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
|
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||||
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
|
||||||
|
# remove add premissions that are not valid for migrations from old versions
|
||||||
|
for perm_str in ('add_organization', 'add_jobtemplate'):
|
||||||
|
perm = Permission.objects.filter(codename=perm_str).first()
|
||||||
|
if perm:
|
||||||
|
perm.delete()
|
||||||
|
|
||||||
|
managed_definitions = dict()
|
||||||
|
for role_definition in RoleDefinition.objects.filter(managed=True):
|
||||||
|
permissions = frozenset(role_definition.permissions.values_list('id', flat=True))
|
||||||
|
managed_definitions[permissions] = role_definition
|
||||||
|
|
||||||
|
# Build map of old role model
|
||||||
|
parents, children = build_role_map(apps)
|
||||||
|
|
||||||
|
# NOTE: this import is expected to break at some point, and then just move the data here
|
||||||
|
from awx.main.models.rbac import role_descriptions
|
||||||
|
|
||||||
|
for role in Role.objects.prefetch_related('members', 'parents').iterator():
|
||||||
|
if role.singleton_name:
|
||||||
|
continue # only bothering to migrate object roles
|
||||||
|
|
||||||
|
team_roles = []
|
||||||
|
for parent in role.parents.all():
|
||||||
|
if parent.id not in json.loads(role.implicit_parents):
|
||||||
|
team_roles.append(parent)
|
||||||
|
|
||||||
|
# we will not create any roles that do not have any users or teams
|
||||||
|
if not (role.members.all() or team_roles):
|
||||||
|
logger.debug(f'Skipping role {role.role_field} for {role.content_type.model}-{role.object_id} due to no members')
|
||||||
|
continue
|
||||||
|
|
||||||
|
# get a list of permissions that the old role would grant
|
||||||
|
object_cls = apps.get_model(f'main.{role.content_type.model}')
|
||||||
|
object = object_cls.objects.get(pk=role.object_id) # WORKAROUND, role.content_object does not work in migrations
|
||||||
|
f = object._meta.get_field(role.role_field) # should be ImplicitRoleField
|
||||||
|
perm_list = get_permissions_for_role(f, children, apps)
|
||||||
|
|
||||||
|
permissions = frozenset(perm.id for perm in perm_list)
|
||||||
|
|
||||||
|
# With the needed permissions established, obtain the RoleDefinition this will need, priorities:
|
||||||
|
# 1. If it exists as a managed RoleDefinition then obviously use that
|
||||||
|
# 2. If we already created this for a prior role, use that
|
||||||
|
# 3. Create a new RoleDefinition that lists those permissions
|
||||||
|
if permissions in managed_definitions:
|
||||||
|
role_definition = managed_definitions[permissions]
|
||||||
|
else:
|
||||||
|
action = role.role_field.rsplit('_', 1)[0] # remove the _field ending of the name
|
||||||
|
role_definition_name = f'{model_class(role.content_type, apps).__name__} {action.title()}'
|
||||||
|
|
||||||
|
description = role_descriptions[role.role_field]
|
||||||
|
if type(description) == dict:
|
||||||
|
if role.content_type.model in description:
|
||||||
|
description = description.get(role.content_type.model)
|
||||||
|
else:
|
||||||
|
description = description.get('default')
|
||||||
|
if '%s' in description:
|
||||||
|
description = description % role.content_type.model
|
||||||
|
|
||||||
|
role_definition, created = RoleDefinition.objects.get_or_create(
|
||||||
|
name=role_definition_name,
|
||||||
|
defaults={'description': description, 'content_type_id': role.content_type_id},
|
||||||
|
)
|
||||||
|
|
||||||
|
if created:
|
||||||
|
logger.info(f'Created custom Role Definition {role_definition_name}, pk={role_definition.pk}')
|
||||||
|
role_definition.permissions.set(perm_list)
|
||||||
|
|
||||||
|
# Create the object role and add users to it
|
||||||
|
give_permissions(
|
||||||
|
apps,
|
||||||
|
role_definition,
|
||||||
|
users=role.members.all(),
|
||||||
|
teams=[tr.object_id for tr in team_roles],
|
||||||
|
object_id=role.object_id,
|
||||||
|
content_type_id=role.content_type_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new replacement system auditor role
|
||||||
|
new_system_auditor, created = RoleDefinition.objects.get_or_create(
|
||||||
|
name='System Auditor',
|
||||||
|
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
|
||||||
|
)
|
||||||
|
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
|
||||||
|
|
||||||
|
# migrate is_system_auditor flag, because it is no longer handled by a system role
|
||||||
|
old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first()
|
||||||
|
if old_system_auditor:
|
||||||
|
# if the system auditor role is not present, this is a new install and no users should exist
|
||||||
|
ct = 0
|
||||||
|
for user in role.members.all():
|
||||||
|
RoleUserAssignment.objects.create(user=user, role_definition=new_system_auditor)
|
||||||
|
ct += 1
|
||||||
|
if ct:
|
||||||
|
logger.info(f'Migrated {ct} users to new system auditor flag')
|
||||||
|
|
||||||
|
|
||||||
|
def get_or_create_managed(name, description, ct, permissions, RoleDefinition):
|
||||||
|
role_definition, created = RoleDefinition.objects.get_or_create(name=name, defaults={'managed': True, 'description': description, 'content_type': ct})
|
||||||
|
role_definition.permissions.set(list(permissions))
|
||||||
|
|
||||||
|
if not role_definition.managed:
|
||||||
|
role_definition.managed = True
|
||||||
|
role_definition.save(update_fields=['managed'])
|
||||||
|
|
||||||
|
if created:
|
||||||
|
logger.info(f'Created RoleDefinition {role_definition.name} pk={role_definition} with {len(permissions)} permissions')
|
||||||
|
|
||||||
|
return role_definition
|
||||||
|
|
||||||
|
|
||||||
|
def setup_managed_role_definitions(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
Idepotent method to create or sync the managed role definitions
|
||||||
|
"""
|
||||||
|
to_create = {
|
||||||
|
'object_admin': '{cls.__name__} Admin',
|
||||||
|
'org_admin': 'Organization Admin',
|
||||||
|
'org_children': 'Organization {cls.__name__} Admin',
|
||||||
|
'special': '{cls.__name__} {action}',
|
||||||
|
}
|
||||||
|
|
||||||
|
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||||
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
|
Organization = apps.get_model(settings.ANSIBLE_BASE_ORGANIZATION_MODEL)
|
||||||
|
org_ct = ContentType.objects.get_for_model(Organization)
|
||||||
|
managed_role_definitions = []
|
||||||
|
|
||||||
|
org_perms = set()
|
||||||
|
for cls in permission_registry.all_registered_models:
|
||||||
|
ct = ContentType.objects.get_for_model(cls)
|
||||||
|
cls_name = cls._meta.model_name
|
||||||
|
object_perms = set(Permission.objects.filter(content_type=ct))
|
||||||
|
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
|
||||||
|
if cls_name != 'instancegroup':
|
||||||
|
org_perms.update(object_perms)
|
||||||
|
|
||||||
|
if 'object_admin' in to_create and cls_name != 'organization':
|
||||||
|
indiv_perms = object_perms.copy()
|
||||||
|
add_perms = [perm for perm in indiv_perms if perm.codename.startswith('add_')]
|
||||||
|
if add_perms:
|
||||||
|
for perm in add_perms:
|
||||||
|
indiv_perms.remove(perm)
|
||||||
|
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
|
||||||
|
org_child_perms = object_perms.copy()
|
||||||
|
org_child_perms.add(Permission.objects.get(codename='view_organization'))
|
||||||
|
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['org_children'].format(cls=cls),
|
||||||
|
f'Has all permissions to {cls._meta.verbose_name_plural} within an organization',
|
||||||
|
org_ct,
|
||||||
|
org_child_perms,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'special' in to_create:
|
||||||
|
special_perms = []
|
||||||
|
for perm in object_perms:
|
||||||
|
# Organization auditor is handled separately
|
||||||
|
if perm.codename.split('_')[0] not in ('add', 'change', 'delete', 'view', 'audit'):
|
||||||
|
special_perms.append(perm)
|
||||||
|
for perm in special_perms:
|
||||||
|
action = perm.codename.split('_')[0]
|
||||||
|
view_perm = Permission.objects.get(content_type=ct, codename__startswith='view_')
|
||||||
|
perm_list = [perm, view_perm]
|
||||||
|
# Handle special-case where adhoc role also listed use permission
|
||||||
|
if action == 'adhoc':
|
||||||
|
for other_perm in object_perms:
|
||||||
|
if other_perm.codename == 'use_inventory':
|
||||||
|
perm_list.append(other_perm)
|
||||||
|
break
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['special'].format(cls=cls, action=action.title()),
|
||||||
|
f'Has {action} permissions to a single {cls._meta.verbose_name}',
|
||||||
|
ct,
|
||||||
|
perm_list,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'org_admin' in to_create:
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
to_create['org_admin'].format(cls=Organization),
|
||||||
|
'Has all permissions to a single organization and all objects inside of it',
|
||||||
|
org_ct,
|
||||||
|
org_perms,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Special "organization action" roles
|
||||||
|
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
|
||||||
|
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
'Organization Audit',
|
||||||
|
'Has permission to view all objects inside of a single organization',
|
||||||
|
org_ct,
|
||||||
|
audit_permissions,
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
org_execute_permissions = {'view_jobtemplate', 'execute_jobtemplate', 'view_workflowjobtemplate', 'execute_workflowjobtemplate', 'view_organization'}
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
'Organization Execute',
|
||||||
|
'Has permission to execute all runnable objects in the organization',
|
||||||
|
org_ct,
|
||||||
|
[perm for perm in org_perms if perm.codename in org_execute_permissions],
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
org_approval_permissions = {'view_organization', 'view_workflowjobtemplate', 'approve_workflowjobtemplate'}
|
||||||
|
managed_role_definitions.append(
|
||||||
|
get_or_create_managed(
|
||||||
|
'Organization Approval',
|
||||||
|
'Has permission to approve any workflow steps within a single organization',
|
||||||
|
org_ct,
|
||||||
|
[perm for perm in org_perms if perm.codename in org_approval_permissions],
|
||||||
|
RoleDefinition,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
unexpected_role_definitions = RoleDefinition.objects.filter(managed=True).exclude(pk__in=[rd.pk for rd in managed_role_definitions])
|
||||||
|
for role_definition in unexpected_role_definitions:
|
||||||
|
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
|
||||||
|
role_definition.delete()
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings # noqa
|
from django.conf import settings # noqa
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
@@ -8,7 +10,10 @@ from django.db.models.signals import pre_delete # noqa
|
|||||||
|
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
from ansible_base.resource_registry.fields import AnsibleResourceField
|
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment
|
||||||
from ansible_base.lib.utils.models import prevent_search
|
from ansible_base.lib.utils.models import prevent_search
|
||||||
|
from ansible_base.lib.utils.models import user_summary_fields
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
|
from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
|
||||||
@@ -102,6 +107,7 @@ User.add_to_class('get_queryset', get_user_queryset)
|
|||||||
User.add_to_class('can_access', check_user_access)
|
User.add_to_class('can_access', check_user_access)
|
||||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||||
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
|
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
|
||||||
|
User.add_to_class('summary_fields', user_summary_fields)
|
||||||
|
|
||||||
|
|
||||||
def convert_jsonfields():
|
def convert_jsonfields():
|
||||||
@@ -170,17 +176,17 @@ pre_delete.connect(cleanup_created_modified_by, sender=User)
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def user_get_organizations(user):
|
def user_get_organizations(user):
|
||||||
return Organization.objects.filter(member_role__members=user)
|
return Organization.access_qs(user, 'member')
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_get_admin_of_organizations(user):
|
def user_get_admin_of_organizations(user):
|
||||||
return Organization.objects.filter(admin_role__members=user)
|
return Organization.access_qs(user, 'change')
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_get_auditor_of_organizations(user):
|
def user_get_auditor_of_organizations(user):
|
||||||
return Organization.objects.filter(auditor_role__members=user)
|
return Organization.access_qs(user, 'audit')
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -194,11 +200,21 @@ User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
|
|||||||
User.add_to_class('created', created)
|
User.add_to_class('created', created)
|
||||||
|
|
||||||
|
|
||||||
|
def get_system_auditor_role():
|
||||||
|
rd, created = RoleDefinition.objects.get_or_create(
|
||||||
|
name='System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
|
||||||
|
)
|
||||||
|
if created:
|
||||||
|
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_is_system_auditor(user):
|
def user_is_system_auditor(user):
|
||||||
if not hasattr(user, '_is_system_auditor'):
|
if not hasattr(user, '_is_system_auditor'):
|
||||||
if user.pk:
|
if user.pk:
|
||||||
user._is_system_auditor = user.roles.filter(singleton_name='system_auditor', role_field='system_auditor').exists()
|
rd = get_system_auditor_role()
|
||||||
|
user._is_system_auditor = RoleUserAssignment.objects.filter(user=user, role_definition=rd).exists()
|
||||||
else:
|
else:
|
||||||
# Odd case where user is unsaved, this should never be relied on
|
# Odd case where user is unsaved, this should never be relied on
|
||||||
return False
|
return False
|
||||||
@@ -212,17 +228,17 @@ def user_is_system_auditor(user, tf):
|
|||||||
# time they've logged in, and we've just created the new User in this
|
# time they've logged in, and we've just created the new User in this
|
||||||
# request), we need one to set up the system auditor role
|
# request), we need one to set up the system auditor role
|
||||||
user.save()
|
user.save()
|
||||||
if tf:
|
rd = get_system_auditor_role()
|
||||||
role = Role.singleton('system_auditor')
|
assignment = RoleUserAssignment.objects.filter(user=user, role_definition=rd).first()
|
||||||
# must check if member to not duplicate activity stream
|
prior_value = bool(assignment)
|
||||||
if user not in role.members.all():
|
if prior_value != bool(tf):
|
||||||
role.members.add(user)
|
if assignment:
|
||||||
user._is_system_auditor = True
|
assignment.delete()
|
||||||
else:
|
else:
|
||||||
role = Role.singleton('system_auditor')
|
rd.give_global_permission(user)
|
||||||
if user in role.members.all():
|
user._is_system_auditor = bool(tf)
|
||||||
role.members.remove(user)
|
entry = ActivityStream.objects.create(changes=json.dumps({"is_system_auditor": [prior_value, bool(tf)]}), object1='user', operation='update')
|
||||||
user._is_system_auditor = False
|
entry.user.add(user)
|
||||||
|
|
||||||
|
|
||||||
User.add_to_class('is_system_auditor', user_is_system_auditor)
|
User.add_to_class('is_system_auditor', user_is_system_auditor)
|
||||||
@@ -290,6 +306,10 @@ activity_stream_registrar.connect(WorkflowApprovalTemplate)
|
|||||||
activity_stream_registrar.connect(OAuth2Application)
|
activity_stream_registrar.connect(OAuth2Application)
|
||||||
activity_stream_registrar.connect(OAuth2AccessToken)
|
activity_stream_registrar.connect(OAuth2AccessToken)
|
||||||
|
|
||||||
|
# Register models
|
||||||
|
permission_registry.register(Project, Team, WorkflowJobTemplate, JobTemplate, Inventory, Organization, Credential, NotificationTemplate, ExecutionEnvironment)
|
||||||
|
permission_registry.register(InstanceGroup, parent_field_name=None) # Not part of an organization
|
||||||
|
|
||||||
# prevent API filtering on certain Django-supplied sensitive fields
|
# prevent API filtering on certain Django-supplied sensitive fields
|
||||||
prevent_search(User._meta.get_field('password'))
|
prevent_search(User._meta.get_field('password'))
|
||||||
prevent_search(OAuth2AccessToken._meta.get_field('token'))
|
prevent_search(OAuth2AccessToken._meta.get_field('token'))
|
||||||
|
|||||||
@@ -7,6 +7,9 @@ from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
|
||||||
# Django-CRUM
|
# Django-CRUM
|
||||||
from crum import get_current_user
|
from crum import get_current_user
|
||||||
|
|
||||||
@@ -139,6 +142,23 @@ class BaseModel(models.Model):
|
|||||||
self.save(update_fields=update_fields)
|
self.save(update_fields=update_fields)
|
||||||
return update_fields
|
return update_fields
|
||||||
|
|
||||||
|
def summary_fields(self):
|
||||||
|
"""
|
||||||
|
This exists for use by django-ansible-base,
|
||||||
|
which has standard patterns that differ from AWX, but we enable views from DAB
|
||||||
|
for those views to list summary_fields for AWX models, those models need to provide this
|
||||||
|
"""
|
||||||
|
from awx.api.serializers import SUMMARIZABLE_FK_FIELDS
|
||||||
|
|
||||||
|
model_name = get_type_for_model(self)
|
||||||
|
related_fields = SUMMARIZABLE_FK_FIELDS.get(model_name, {})
|
||||||
|
summary_data = {}
|
||||||
|
for field_name in related_fields:
|
||||||
|
fval = getattr(self, field_name, None)
|
||||||
|
if fval is not None:
|
||||||
|
summary_data[field_name] = fval
|
||||||
|
return summary_data
|
||||||
|
|
||||||
|
|
||||||
class CreatedModifiedModel(BaseModel):
|
class CreatedModifiedModel(BaseModel):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -21,6 +21,10 @@ from django.conf import settings
|
|||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
|
||||||
|
# DRF
|
||||||
|
from rest_framework.serializers import ValidationError as DRFValidationError
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
@@ -41,6 +45,7 @@ from awx.main.models.rbac import (
|
|||||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||||
)
|
)
|
||||||
|
from awx.main.models import Team, Organization
|
||||||
from awx.main.utils import encrypt_field
|
from awx.main.utils import encrypt_field
|
||||||
from . import injectors as builtin_injectors
|
from . import injectors as builtin_injectors
|
||||||
|
|
||||||
@@ -83,6 +88,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
|||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
unique_together = ('organization', 'name', 'credential_type')
|
unique_together = ('organization', 'name', 'credential_type')
|
||||||
|
permissions = [('use_credential', 'Can use credential in a job or related resource')]
|
||||||
|
|
||||||
PASSWORD_FIELDS = ['inputs']
|
PASSWORD_FIELDS = ['inputs']
|
||||||
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
|
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
|
||||||
@@ -314,6 +320,16 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
|||||||
else:
|
else:
|
||||||
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
||||||
|
|
||||||
|
def validate_role_assignment(self, actor, role_definition):
|
||||||
|
if self.organization:
|
||||||
|
if isinstance(actor, User):
|
||||||
|
if actor.is_superuser or Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists():
|
||||||
|
return
|
||||||
|
if isinstance(actor, Team):
|
||||||
|
if actor.organization == self.organization:
|
||||||
|
return
|
||||||
|
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
|
||||||
|
|
||||||
|
|
||||||
class CredentialType(CommonModelNameNotUnique):
|
class CredentialType(CommonModelNameNotUnique):
|
||||||
"""
|
"""
|
||||||
@@ -1231,6 +1247,14 @@ ManagedCredentialType(
|
|||||||
'multiline': True,
|
'multiline': True,
|
||||||
'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'),
|
'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'id': 'gce_credentials',
|
||||||
|
'label': gettext_noop('Google Cloud Platform account credentials'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
'multiline': True,
|
||||||
|
'help_text': gettext_noop('Google Cloud Platform account credentials in JSON format.'),
|
||||||
|
},
|
||||||
],
|
],
|
||||||
'required': ['configuration'],
|
'required': ['configuration'],
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -130,3 +130,10 @@ def terraform(cred, env, private_data_dir):
|
|||||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||||
f.write(cred.get_input('configuration'))
|
f.write(cred.get_input('configuration'))
|
||||||
env['TF_BACKEND_CONFIG_FILE'] = to_container_path(path, private_data_dir)
|
env['TF_BACKEND_CONFIG_FILE'] = to_container_path(path, private_data_dir)
|
||||||
|
# Handle env variables for GCP account credentials
|
||||||
|
if 'gce_credentials' in cred.inputs:
|
||||||
|
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||||
|
with os.fdopen(handle, 'w') as f:
|
||||||
|
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||||
|
f.write(cred.get_input('gce_credentials'))
|
||||||
|
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(path, private_data_dir)
|
||||||
|
|||||||
@@ -4,11 +4,12 @@ import datetime
|
|||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
import logging
|
import logging
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
import itertools
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
from django.db import models, DatabaseError
|
from django.db import models, DatabaseError, transaction
|
||||||
from django.db.models.functions import Cast
|
from django.db.models.functions import Cast
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.dateparse import parse_datetime
|
||||||
from django.utils.text import Truncator
|
from django.utils.text import Truncator
|
||||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
|||||||
def _update_host_metrics(updated_hosts_list):
|
def _update_host_metrics(updated_hosts_list):
|
||||||
from awx.main.models import HostMetric # circular import
|
from awx.main.models import HostMetric # circular import
|
||||||
|
|
||||||
# bulk-create
|
|
||||||
current_time = now()
|
current_time = now()
|
||||||
HostMetric.objects.bulk_create(
|
|
||||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
# FUTURE:
|
||||||
)
|
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||||
# bulk-update
|
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||||
batch_start, batch_size = 0, 1000
|
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||||
while batch_start <= len(updated_hosts_list):
|
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
|
||||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
args = [iter(updated_hosts_list)] * 500
|
||||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
for hosts in itertools.zip_longest(*args):
|
||||||
)
|
with transaction.atomic():
|
||||||
batch_start += batch_size
|
HostMetric.objects.bulk_create(
|
||||||
|
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||||
|
)
|
||||||
|
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||||
|
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def job_verbosity(self):
|
def job_verbosity(self):
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.models.base import CommonModel
|
from awx.main.models.base import CommonModel
|
||||||
from awx.main.validators import validate_container_image_name
|
from awx.main.validators import validate_container_image_name
|
||||||
@@ -12,6 +14,8 @@ __all__ = ['ExecutionEnvironment']
|
|||||||
class ExecutionEnvironment(CommonModel):
|
class ExecutionEnvironment(CommonModel):
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('-created',)
|
ordering = ('-created',)
|
||||||
|
# Remove view permission, as a temporary solution, defer to organization read permission
|
||||||
|
default_permissions = ('add', 'change', 'delete')
|
||||||
|
|
||||||
PULL_CHOICES = [
|
PULL_CHOICES = [
|
||||||
('always', _("Always pull container before running.")),
|
('always', _("Always pull container before running.")),
|
||||||
@@ -53,3 +57,12 @@ class ExecutionEnvironment(CommonModel):
|
|||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|
||||||
|
def validate_role_assignment(self, actor, role_definition):
|
||||||
|
if self.managed:
|
||||||
|
raise ValidationError({'object_id': _('Can not assign object roles to managed Execution Environments')})
|
||||||
|
if self.organization_id is None:
|
||||||
|
raise ValidationError({'object_id': _('Can not assign object roles to global Execution Environments')})
|
||||||
|
|
||||||
|
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
|
||||||
|
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})
|
||||||
|
|||||||
@@ -485,6 +485,9 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin, ResourceMi
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
|
permissions = [('use_instancegroup', 'Can use instance group in a preference list of a resource')]
|
||||||
|
# Since this has no direct organization field only superuser can add, so remove add permission
|
||||||
|
default_permissions = ('change', 'delete', 'view')
|
||||||
|
|
||||||
def set_default_policy_fields(self):
|
def set_default_policy_fields(self):
|
||||||
self.policy_instance_list = []
|
self.policy_instance_list = []
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ import os.path
|
|||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
import tempfile
|
||||||
|
import stat
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -89,6 +91,11 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
|||||||
verbose_name_plural = _('inventories')
|
verbose_name_plural = _('inventories')
|
||||||
unique_together = [('name', 'organization')]
|
unique_together = [('name', 'organization')]
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
permissions = [
|
||||||
|
('use_inventory', 'Can use inventory in a job template'),
|
||||||
|
('adhoc_inventory', 'Can run ad hoc commands'),
|
||||||
|
('update_inventory', 'Can update inventory sources in inventory'),
|
||||||
|
]
|
||||||
|
|
||||||
organization = models.ForeignKey(
|
organization = models.ForeignKey(
|
||||||
'Organization',
|
'Organization',
|
||||||
@@ -926,6 +933,7 @@ class InventorySourceOptions(BaseModel):
|
|||||||
('controller', _('Red Hat Ansible Automation Platform')),
|
('controller', _('Red Hat Ansible Automation Platform')),
|
||||||
('insights', _('Red Hat Insights')),
|
('insights', _('Red Hat Insights')),
|
||||||
('terraform', _('Terraform State')),
|
('terraform', _('Terraform State')),
|
||||||
|
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||||
]
|
]
|
||||||
|
|
||||||
# From the options of the Django management base command
|
# From the options of the Django management base command
|
||||||
@@ -1035,7 +1043,7 @@ class InventorySourceOptions(BaseModel):
|
|||||||
def cloud_credential_validation(source, cred):
|
def cloud_credential_validation(source, cred):
|
||||||
if not source:
|
if not source:
|
||||||
return None
|
return None
|
||||||
if cred and source not in ('custom', 'scm'):
|
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||||
# If a credential was provided, it's important that it matches
|
# If a credential was provided, it's important that it matches
|
||||||
# the actual inventory source being used (Amazon requires Amazon
|
# the actual inventory source being used (Amazon requires Amazon
|
||||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||||
@@ -1044,12 +1052,14 @@ class InventorySourceOptions(BaseModel):
|
|||||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||||
# from the instance metadata instead of those explicitly provided.
|
# from the instance metadata instead of those explicitly provided.
|
||||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||||
return _('Credential is required for a cloud source.')
|
return _('Credential is required for a cloud source.')
|
||||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||||
|
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||||
|
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_cloud_credential(self):
|
def get_cloud_credential(self):
|
||||||
@@ -1400,7 +1410,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
|||||||
return selected_groups
|
return selected_groups
|
||||||
|
|
||||||
|
|
||||||
class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
|
class CustomInventoryScript(CommonModelNameNotUnique):
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
@@ -1633,17 +1643,39 @@ class satellite6(PluginFileInjector):
|
|||||||
|
|
||||||
class terraform(PluginFileInjector):
|
class terraform(PluginFileInjector):
|
||||||
plugin_name = 'terraform_state'
|
plugin_name = 'terraform_state'
|
||||||
base_injector = 'managed'
|
|
||||||
namespace = 'cloud'
|
namespace = 'cloud'
|
||||||
collection = 'terraform'
|
collection = 'terraform'
|
||||||
use_fqcn = True
|
use_fqcn = True
|
||||||
|
|
||||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, None)
|
|
||||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||||
ret['backend_config_files'] = env["TF_BACKEND_CONFIG_FILE"]
|
credential = inventory_update.get_cloud_credential()
|
||||||
|
config_cred = credential.get_input('configuration')
|
||||||
|
if config_cred:
|
||||||
|
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||||
|
with os.fdopen(handle, 'w') as f:
|
||||||
|
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||||
|
f.write(config_cred)
|
||||||
|
ret['backend_config_files'] = to_container_path(path, private_data_dir)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def build_plugin_private_data(self, inventory_update, private_data_dir):
|
||||||
|
credential = inventory_update.get_cloud_credential()
|
||||||
|
|
||||||
|
private_data = {'credentials': {}}
|
||||||
|
gce_cred = credential.get_input('gce_credentials', default=None)
|
||||||
|
if gce_cred:
|
||||||
|
private_data['credentials'][credential] = gce_cred
|
||||||
|
return private_data
|
||||||
|
|
||||||
|
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
||||||
|
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||||
|
credential = inventory_update.get_cloud_credential()
|
||||||
|
cred_data = private_data_files['credentials']
|
||||||
|
if credential in cred_data:
|
||||||
|
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
|
||||||
|
return env
|
||||||
|
|
||||||
|
|
||||||
class controller(PluginFileInjector):
|
class controller(PluginFileInjector):
|
||||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||||
@@ -1664,6 +1696,16 @@ class insights(PluginFileInjector):
|
|||||||
use_fqcn = True
|
use_fqcn = True
|
||||||
|
|
||||||
|
|
||||||
|
class openshift_virtualization(PluginFileInjector):
|
||||||
|
plugin_name = 'kubevirt'
|
||||||
|
base_injector = 'template'
|
||||||
|
namespace = 'kubevirt'
|
||||||
|
collection = 'core'
|
||||||
|
downstream_namespace = 'redhat'
|
||||||
|
downstream_collection = 'openshift_virtualization'
|
||||||
|
use_fqcn = True
|
||||||
|
|
||||||
|
|
||||||
class constructed(PluginFileInjector):
|
class constructed(PluginFileInjector):
|
||||||
plugin_name = 'constructed'
|
plugin_name = 'constructed'
|
||||||
namespace = 'ansible'
|
namespace = 'ansible'
|
||||||
|
|||||||
@@ -205,6 +205,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
|||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
permissions = [('execute_jobtemplate', 'Can run this job template')]
|
||||||
|
# Remove add permission, ability to add comes from use permission for inventory, project, credentials
|
||||||
|
default_permissions = ('change', 'delete', 'view')
|
||||||
|
|
||||||
job_type = models.CharField(
|
job_type = models.CharField(
|
||||||
max_length=64,
|
max_length=64,
|
||||||
|
|||||||
@@ -19,13 +19,14 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from ansible_base.lib.utils.models import prevent_search
|
from ansible_base.lib.utils.models import prevent_search
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.rbac import Role, RoleAncestorEntry
|
|
||||||
|
from awx.main.models.rbac import Role, RoleAncestorEntry, to_permissions
|
||||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
|
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
|
||||||
from awx.main.utils.execution_environments import get_default_execution_environment
|
from awx.main.utils.execution_environments import get_default_execution_environment
|
||||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||||
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
||||||
from awx.main.fields import AskForField
|
from awx.main.fields import AskForField
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES, org_role_to_permission
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.models.mixins')
|
logger = logging.getLogger('awx.main.models.mixins')
|
||||||
@@ -64,6 +65,18 @@ class ResourceMixin(models.Model):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
|
def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
if cls._meta.model_name == 'organization' and role_field in org_role_to_permission:
|
||||||
|
# Organization roles can not use the DAB RBAC shortcuts
|
||||||
|
# like Organization.access_qs(user, 'change_jobtemplate') is needed
|
||||||
|
# not just Organization.access_qs(user, 'change') is needed
|
||||||
|
if accessor.is_superuser:
|
||||||
|
return cls.objects.values_list('id')
|
||||||
|
|
||||||
|
codename = org_role_to_permission[role_field]
|
||||||
|
|
||||||
|
return cls.access_ids_qs(accessor, codename, content_types=content_types)
|
||||||
|
return cls.access_ids_qs(accessor, to_permissions[role_field], content_types=content_types)
|
||||||
if accessor._meta.model_name == 'user':
|
if accessor._meta.model_name == 'user':
|
||||||
ancestor_roles = accessor.roles.all()
|
ancestor_roles = accessor.roles.all()
|
||||||
elif type(accessor) == Role:
|
elif type(accessor) == Role:
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
|
|||||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||||
from awx.main.notifications.irc_backend import IrcBackend
|
from awx.main.notifications.irc_backend import IrcBackend
|
||||||
|
from awx.main.notifications.awssns_backend import AWSSNSBackend
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.models.notifications')
|
logger = logging.getLogger('awx.main.models.notifications')
|
||||||
@@ -40,6 +41,7 @@ __all__ = ['NotificationTemplate', 'Notification']
|
|||||||
|
|
||||||
class NotificationTemplate(CommonModelNameNotUnique):
|
class NotificationTemplate(CommonModelNameNotUnique):
|
||||||
NOTIFICATION_TYPES = [
|
NOTIFICATION_TYPES = [
|
||||||
|
('awssns', _('AWS SNS'), AWSSNSBackend),
|
||||||
('email', _('Email'), CustomEmailBackend),
|
('email', _('Email'), CustomEmailBackend),
|
||||||
('slack', _('Slack'), SlackBackend),
|
('slack', _('Slack'), SlackBackend),
|
||||||
('twilio', _('Twilio'), TwilioBackend),
|
('twilio', _('Twilio'), TwilioBackend),
|
||||||
@@ -394,11 +396,11 @@ class JobNotificationMixin(object):
|
|||||||
'verbosity': 0,
|
'verbosity': 0,
|
||||||
},
|
},
|
||||||
'job_friendly_name': 'Job',
|
'job_friendly_name': 'Job',
|
||||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
'url': 'https://platformhost/#/jobs/playbook/1010',
|
||||||
'approval_status': 'approved',
|
'approval_status': 'approved',
|
||||||
'approval_node_name': 'Approve Me',
|
'approval_node_name': 'Approve Me',
|
||||||
'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
|
'workflow_url': 'https://platformhost/#/jobs/workflow/1010',
|
||||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
'job_metadata': """{'url': 'https://platformhost/$/jobs/playbook/13',
|
||||||
'traceback': '',
|
'traceback': '',
|
||||||
'status': 'running',
|
'status': 'running',
|
||||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||||
|
|||||||
@@ -35,6 +35,12 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
|||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
permissions = [
|
||||||
|
('member_organization', 'Basic participation permissions for organization'),
|
||||||
|
('audit_organization', 'Audit everything inside the organization'),
|
||||||
|
]
|
||||||
|
# Remove add permission, only superuser can add
|
||||||
|
default_permissions = ('change', 'delete', 'view')
|
||||||
|
|
||||||
instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership')
|
instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership')
|
||||||
galaxy_credentials = OrderedManyToManyField(
|
galaxy_credentials = OrderedManyToManyField(
|
||||||
@@ -137,6 +143,7 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
|||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
unique_together = [('organization', 'name')]
|
unique_together = [('organization', 'name')]
|
||||||
ordering = ('organization__name', 'name')
|
ordering = ('organization__name', 'name')
|
||||||
|
permissions = [('member_team', 'Inherit all roles assigned to this team')]
|
||||||
|
|
||||||
organization = models.ForeignKey(
|
organization = models.ForeignKey(
|
||||||
'Organization',
|
'Organization',
|
||||||
|
|||||||
@@ -259,6 +259,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
|||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
ordering = ('id',)
|
ordering = ('id',)
|
||||||
|
permissions = [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')]
|
||||||
|
|
||||||
default_environment = models.ForeignKey(
|
default_environment = models.ForeignKey(
|
||||||
'ExecutionEnvironment',
|
'ExecutionEnvironment',
|
||||||
|
|||||||
@@ -7,14 +7,30 @@ import threading
|
|||||||
import contextlib
|
import contextlib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
# django-rest-framework
|
||||||
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
|
# crum to impersonate users
|
||||||
|
from crum import impersonate
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.db import models, transaction, connection
|
from django.db import models, transaction, connection
|
||||||
|
from django.db.models.signals import m2m_changed
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.apps import apps
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
# Ansible_base app
|
||||||
|
from ansible_base.rbac.models import RoleDefinition
|
||||||
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
|
from awx.main.migrations._dab_rbac import build_role_map, get_permissions_for_role
|
||||||
|
from awx.main.constants import role_name_to_perm_mapping, org_role_to_permission
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'Role',
|
'Role',
|
||||||
@@ -75,6 +91,11 @@ role_descriptions = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
to_permissions = {}
|
||||||
|
for k, v in role_name_to_perm_mapping.items():
|
||||||
|
to_permissions[k] = v[0].strip('_')
|
||||||
|
|
||||||
|
|
||||||
tls = threading.local() # thread local storage
|
tls = threading.local() # thread local storage
|
||||||
|
|
||||||
|
|
||||||
@@ -86,10 +107,8 @@ def check_singleton(func):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
sys_admin = Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR)
|
|
||||||
sys_audit = Role.singleton(ROLE_SINGLETON_SYSTEM_AUDITOR)
|
|
||||||
user = args[0]
|
user = args[0]
|
||||||
if user in sys_admin or user in sys_audit:
|
if user.is_superuser or user.is_system_auditor:
|
||||||
if len(args) == 2:
|
if len(args) == 2:
|
||||||
return args[1]
|
return args[1]
|
||||||
return Role.objects.all()
|
return Role.objects.all()
|
||||||
@@ -169,6 +188,24 @@ class Role(models.Model):
|
|||||||
|
|
||||||
def __contains__(self, accessor):
|
def __contains__(self, accessor):
|
||||||
if accessor._meta.model_name == 'user':
|
if accessor._meta.model_name == 'user':
|
||||||
|
if accessor.is_superuser:
|
||||||
|
return True
|
||||||
|
if self.role_field == 'system_administrator':
|
||||||
|
return accessor.is_superuser
|
||||||
|
elif self.role_field == 'system_auditor':
|
||||||
|
return accessor.is_system_auditor
|
||||||
|
elif self.role_field in ('read_role', 'auditor_role') and accessor.is_system_auditor:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
if self.content_object and self.content_object._meta.model_name == 'organization' and self.role_field in org_role_to_permission:
|
||||||
|
codename = org_role_to_permission[self.role_field]
|
||||||
|
|
||||||
|
return accessor.has_obj_perm(self.content_object, codename)
|
||||||
|
|
||||||
|
if self.role_field not in to_permissions:
|
||||||
|
raise Exception(f'{self.role_field} evaluated but not a translatable permission')
|
||||||
|
return accessor.has_obj_perm(self.content_object, to_permissions[self.role_field])
|
||||||
return self.ancestors.filter(members=accessor).exists()
|
return self.ancestors.filter(members=accessor).exists()
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f'Role evaluations only valid for users, received {accessor}')
|
raise RuntimeError(f'Role evaluations only valid for users, received {accessor}')
|
||||||
@@ -280,6 +317,9 @@ class Role(models.Model):
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return
|
||||||
|
|
||||||
if len(additions) == 0 and len(removals) == 0:
|
if len(additions) == 0 and len(removals) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -412,6 +452,12 @@ class Role(models.Model):
|
|||||||
in their organization, but some of those roles descend from
|
in their organization, but some of those roles descend from
|
||||||
organization admin_role, but not auditor_role.
|
organization admin_role, but not auditor_role.
|
||||||
"""
|
"""
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
|
|
||||||
|
q = RoleEvaluation.objects.filter(role__in=user.has_roles.all()).values_list('object_id', 'content_type_id').query
|
||||||
|
return roles_qs.extra(where=[f'(object_id,content_type_id) in ({q})'])
|
||||||
|
|
||||||
return roles_qs.filter(
|
return roles_qs.filter(
|
||||||
id__in=RoleAncestorEntry.objects.filter(
|
id__in=RoleAncestorEntry.objects.filter(
|
||||||
descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list(
|
descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list(
|
||||||
@@ -434,6 +480,13 @@ class Role(models.Model):
|
|||||||
return self.singleton_name in [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]
|
return self.singleton_name in [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]
|
||||||
|
|
||||||
|
|
||||||
|
class AncestorManager(models.Manager):
|
||||||
|
def get_queryset(self):
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
raise RuntimeError('The old RBAC system has been disabled, this should never be called')
|
||||||
|
return super(AncestorManager, self).get_queryset()
|
||||||
|
|
||||||
|
|
||||||
class RoleAncestorEntry(models.Model):
|
class RoleAncestorEntry(models.Model):
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
@@ -451,6 +504,8 @@ class RoleAncestorEntry(models.Model):
|
|||||||
content_type_id = models.PositiveIntegerField(null=False)
|
content_type_id = models.PositiveIntegerField(null=False)
|
||||||
object_id = models.PositiveIntegerField(null=False)
|
object_id = models.PositiveIntegerField(null=False)
|
||||||
|
|
||||||
|
objects = AncestorManager()
|
||||||
|
|
||||||
|
|
||||||
def role_summary_fields_generator(content_object, role_field):
|
def role_summary_fields_generator(content_object, role_field):
|
||||||
global role_descriptions
|
global role_descriptions
|
||||||
@@ -479,3 +534,185 @@ def role_summary_fields_generator(content_object, role_field):
|
|||||||
summary['name'] = role_names[role_field]
|
summary['name'] = role_names[role_field]
|
||||||
summary['id'] = getattr(content_object, '{}_id'.format(role_field))
|
summary['id'] = getattr(content_object, '{}_id'.format(role_field))
|
||||||
return summary
|
return summary
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------- Custom Role Compatibility -------------------------
|
||||||
|
# The following are methods to connect this (old) RBAC system to the new
|
||||||
|
# system which allows custom roles
|
||||||
|
# this follows the ORM interface layer documented in docs/rbac.md
|
||||||
|
def get_role_codenames(role):
|
||||||
|
obj = role.content_object
|
||||||
|
if obj is None:
|
||||||
|
return
|
||||||
|
f = obj._meta.get_field(role.role_field)
|
||||||
|
parents, children = build_role_map(apps)
|
||||||
|
return [perm.codename for perm in get_permissions_for_role(f, children, apps)]
|
||||||
|
|
||||||
|
|
||||||
|
def get_role_definition(role):
|
||||||
|
"""Given a old-style role, this gives a role definition in the new RBAC system for it"""
|
||||||
|
obj = role.content_object
|
||||||
|
if obj is None:
|
||||||
|
return
|
||||||
|
f = obj._meta.get_field(role.role_field)
|
||||||
|
action_name = f.name.rsplit("_", 1)[0]
|
||||||
|
model_print = type(obj).__name__
|
||||||
|
rd_name = f'{model_print} {action_name.title()} Compat'
|
||||||
|
perm_list = get_role_codenames(role)
|
||||||
|
defaults = {
|
||||||
|
'content_type_id': role.content_type_id,
|
||||||
|
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
|
||||||
|
}
|
||||||
|
with impersonate(None):
|
||||||
|
try:
|
||||||
|
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||||
|
except ValidationError:
|
||||||
|
# This is a tricky case - practically speaking, users should not be allowed to create team roles
|
||||||
|
# or roles that include the team member permission.
|
||||||
|
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
|
||||||
|
defaults['managed'] = True
|
||||||
|
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
|
def get_role_from_object_role(object_role):
|
||||||
|
"""
|
||||||
|
Given an object role from the new system, return the corresponding role from the old system
|
||||||
|
reverses naming from get_role_definition, and the ANSIBLE_BASE_ROLE_PRECREATE setting.
|
||||||
|
"""
|
||||||
|
rd = object_role.role_definition
|
||||||
|
if rd.name.endswith(' Compat'):
|
||||||
|
model_name, role_name, _ = rd.name.split()
|
||||||
|
role_name = role_name.lower()
|
||||||
|
role_name += '_role'
|
||||||
|
elif rd.name.endswith(' Admin') and rd.name.count(' ') == 2:
|
||||||
|
# cases like "Organization Project Admin"
|
||||||
|
model_name, target_model_name, role_name = rd.name.split()
|
||||||
|
role_name = role_name.lower()
|
||||||
|
model_cls = apps.get_model('main', target_model_name)
|
||||||
|
target_model_name = get_type_for_model(model_cls)
|
||||||
|
|
||||||
|
# exception cases completely specific to one model naming convention
|
||||||
|
if target_model_name == 'notification_template':
|
||||||
|
target_model_name = 'notification'
|
||||||
|
elif target_model_name == 'workflow_job_template':
|
||||||
|
target_model_name = 'workflow'
|
||||||
|
|
||||||
|
role_name = f'{target_model_name}_admin_role'
|
||||||
|
elif rd.name.endswith(' Admin'):
|
||||||
|
# cases like "project-admin"
|
||||||
|
role_name = 'admin_role'
|
||||||
|
elif rd.name == 'Organization Audit':
|
||||||
|
role_name = 'auditor_role'
|
||||||
|
else:
|
||||||
|
model_name, role_name = rd.name.split()
|
||||||
|
role_name = role_name.lower()
|
||||||
|
role_name += '_role'
|
||||||
|
return getattr(object_role.content_object, role_name)
|
||||||
|
|
||||||
|
|
||||||
|
def give_or_remove_permission(role, actor, giving=True):
|
||||||
|
obj = role.content_object
|
||||||
|
if obj is None:
|
||||||
|
return
|
||||||
|
rd = get_role_definition(role)
|
||||||
|
rd.give_or_remove_permission(actor, obj, giving=giving)
|
||||||
|
|
||||||
|
|
||||||
|
class SyncEnabled(threading.local):
|
||||||
|
def __init__(self):
|
||||||
|
self.enabled = True
|
||||||
|
|
||||||
|
|
||||||
|
rbac_sync_enabled = SyncEnabled()
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def disable_rbac_sync():
|
||||||
|
try:
|
||||||
|
previous_value = rbac_sync_enabled.enabled
|
||||||
|
rbac_sync_enabled.enabled = False
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
rbac_sync_enabled.enabled = previous_value
|
||||||
|
|
||||||
|
|
||||||
|
def give_creator_permissions(user, obj):
|
||||||
|
assignment = RoleDefinition.objects.give_creator_permissions(user, obj)
|
||||||
|
if assignment:
|
||||||
|
with disable_rbac_sync():
|
||||||
|
old_role = get_role_from_object_role(assignment.object_role)
|
||||||
|
old_role.members.add(user)
|
||||||
|
|
||||||
|
|
||||||
|
def sync_members_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
||||||
|
if action.startswith('pre_'):
|
||||||
|
return
|
||||||
|
if not rbac_sync_enabled.enabled:
|
||||||
|
return
|
||||||
|
|
||||||
|
if action == 'post_add':
|
||||||
|
is_giving = True
|
||||||
|
elif action == 'post_remove':
|
||||||
|
is_giving = False
|
||||||
|
elif action == 'post_clear':
|
||||||
|
raise RuntimeError('Clearing of role members not supported')
|
||||||
|
|
||||||
|
if reverse:
|
||||||
|
user = instance
|
||||||
|
else:
|
||||||
|
role = instance
|
||||||
|
|
||||||
|
for user_or_role_id in pk_set:
|
||||||
|
if reverse:
|
||||||
|
role = Role.objects.get(pk=user_or_role_id)
|
||||||
|
else:
|
||||||
|
user = get_user_model().objects.get(pk=user_or_role_id)
|
||||||
|
give_or_remove_permission(role, user, giving=is_giving)
|
||||||
|
|
||||||
|
|
||||||
|
def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
||||||
|
if action.startswith('pre_'):
|
||||||
|
return
|
||||||
|
|
||||||
|
if action == 'post_add':
|
||||||
|
is_giving = True
|
||||||
|
elif action == 'post_remove':
|
||||||
|
is_giving = False
|
||||||
|
elif action == 'post_clear':
|
||||||
|
raise RuntimeError('Clearing of role members not supported')
|
||||||
|
|
||||||
|
if reverse:
|
||||||
|
parent_role = instance
|
||||||
|
else:
|
||||||
|
child_role = instance
|
||||||
|
|
||||||
|
for role_id in pk_set:
|
||||||
|
if reverse:
|
||||||
|
try:
|
||||||
|
child_role = Role.objects.get(id=role_id)
|
||||||
|
except Role.DoesNotExist:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
parent_role = Role.objects.get(id=role_id)
|
||||||
|
except Role.DoesNotExist:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# To a fault, we want to avoid running this if triggered from implicit_parents management
|
||||||
|
# we only want to do anything if we know for sure this is a non-implicit team role
|
||||||
|
if parent_role.role_field == 'member_role' and parent_role.content_type.model == 'team':
|
||||||
|
# Team internal parents are member_role->read_role and admin_role->member_role
|
||||||
|
# for the same object, this parenting will also be implicit_parents management
|
||||||
|
# do nothing for internal parents, but OTHER teams may still be assigned permissions to a team
|
||||||
|
if (child_role.content_type_id == parent_role.content_type_id) and (child_role.object_id == parent_role.object_id):
|
||||||
|
return
|
||||||
|
|
||||||
|
from awx.main.models.organization import Team
|
||||||
|
|
||||||
|
team = Team.objects.get(pk=parent_role.object_id)
|
||||||
|
give_or_remove_permission(child_role, team, giving=is_giving)
|
||||||
|
|
||||||
|
|
||||||
|
m2m_changed.connect(sync_members_to_new_rbac, Role.members.through)
|
||||||
|
m2m_changed.connect(sync_parents_to_new_rbac, Role.parents.through)
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
|||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models, connection
|
from django.db import models, connection, transaction
|
||||||
from django.core.exceptions import NON_FIELD_ERRORS
|
from django.core.exceptions import NON_FIELD_ERRORS
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
@@ -31,13 +31,15 @@ from rest_framework.exceptions import ParseError
|
|||||||
from polymorphic.models import PolymorphicModel
|
from polymorphic.models import PolymorphicModel
|
||||||
|
|
||||||
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
||||||
from awx.main.dispatch import get_task_queuename
|
from awx.main.dispatch import get_task_queuename
|
||||||
from awx.main.dispatch.control import Control as ControlDispatcher
|
from awx.main.dispatch.control import Control as ControlDispatcher
|
||||||
from awx.main.registrar import activity_stream_registrar
|
from awx.main.registrar import activity_stream_registrar
|
||||||
from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
from awx.main.models.mixins import TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
|
||||||
|
from awx.main.models.rbac import to_permissions
|
||||||
from awx.main.utils.common import (
|
from awx.main.utils.common import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
get_model_for_type,
|
get_model_for_type,
|
||||||
@@ -196,9 +198,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _submodels_with_roles(cls):
|
def _submodels_with_roles(cls):
|
||||||
ujt_classes = [c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
|
return [c for c in cls.__subclasses__() if permission_registry.is_registered(c)]
|
||||||
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
|
|
||||||
return [ct.id for ct in ct_dict.values()]
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def accessible_pk_qs(cls, accessor, role_field):
|
def accessible_pk_qs(cls, accessor, role_field):
|
||||||
@@ -210,7 +210,23 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
|||||||
# do not use this if in a subclass
|
# do not use this if in a subclass
|
||||||
if cls != UnifiedJobTemplate:
|
if cls != UnifiedJobTemplate:
|
||||||
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
|
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
|
||||||
return ResourceMixin._accessible_pk_qs(cls, accessor, role_field, content_types=cls._submodels_with_roles())
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
|
|
||||||
|
action = to_permissions[role_field]
|
||||||
|
|
||||||
|
# Special condition for super auditor
|
||||||
|
role_subclasses = cls._submodels_with_roles()
|
||||||
|
role_cts = ContentType.objects.get_for_models(*role_subclasses).values()
|
||||||
|
all_codenames = {f'{action}_{cls._meta.model_name}' for cls in role_subclasses}
|
||||||
|
if not (all_codenames - accessor.singleton_permissions()):
|
||||||
|
qs = cls.objects.filter(polymorphic_ctype__in=role_cts)
|
||||||
|
return qs.values_list('id', flat=True)
|
||||||
|
|
||||||
|
return (
|
||||||
|
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__in=all_codenames, content_type_id__in=[ct.id for ct in role_cts])
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
def _perform_unique_checks(self, unique_checks):
|
def _perform_unique_checks(self, unique_checks):
|
||||||
# Handle the list of unique fields returned above. Replace with an
|
# Handle the list of unique fields returned above. Replace with an
|
||||||
@@ -264,7 +280,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
|||||||
if new_next_schedule:
|
if new_next_schedule:
|
||||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||||
return # no-op, common for infrequent schedules
|
return # no-op, common for infrequent schedules
|
||||||
self.next_schedule = new_next_schedule
|
|
||||||
|
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||||
|
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||||
|
if transaction.get_autocommit():
|
||||||
|
self.next_schedule = related_schedules.first()
|
||||||
|
else:
|
||||||
|
self.next_schedule = related_schedules.select_for_update().first()
|
||||||
|
|
||||||
self.next_job_run = new_next_schedule.next_run
|
self.next_job_run = new_next_schedule.next_run
|
||||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||||
|
|
||||||
@@ -814,7 +837,7 @@ class UnifiedJob(
|
|||||||
update_fields.append(key)
|
update_fields.append(key)
|
||||||
|
|
||||||
if parent_instance:
|
if parent_instance:
|
||||||
if self.status in ('pending', 'waiting', 'running'):
|
if self.status in ('pending', 'running'):
|
||||||
if parent_instance.current_job != self:
|
if parent_instance.current_job != self:
|
||||||
parent_instance_set('current_job', self)
|
parent_instance_set('current_job', self)
|
||||||
# Update parent with all the 'good' states of it's child
|
# Update parent with all the 'good' states of it's child
|
||||||
@@ -851,7 +874,7 @@ class UnifiedJob(
|
|||||||
# If this job already exists in the database, retrieve a copy of
|
# If this job already exists in the database, retrieve a copy of
|
||||||
# the job in its prior state.
|
# the job in its prior state.
|
||||||
# If update_fields are given without status, then that indicates no change
|
# If update_fields are given without status, then that indicates no change
|
||||||
if self.pk and ((not update_fields) or ('status' in update_fields)):
|
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||||
self_before = self.__class__.objects.get(pk=self.pk)
|
self_before = self.__class__.objects.get(pk=self.pk)
|
||||||
if self_before.status != self.status:
|
if self_before.status != self.status:
|
||||||
status_before = self_before.status
|
status_before = self_before.status
|
||||||
@@ -893,7 +916,8 @@ class UnifiedJob(
|
|||||||
update_fields.append('elapsed')
|
update_fields.append('elapsed')
|
||||||
|
|
||||||
# Ensure that the job template information is current.
|
# Ensure that the job template information is current.
|
||||||
if self.unified_job_template != self._get_parent_instance():
|
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||||
|
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
|
||||||
self.unified_job_template = self._get_parent_instance()
|
self.unified_job_template = self._get_parent_instance()
|
||||||
if 'unified_job_template' not in update_fields:
|
if 'unified_job_template' not in update_fields:
|
||||||
update_fields.append('unified_job_template')
|
update_fields.append('unified_job_template')
|
||||||
@@ -906,8 +930,9 @@ class UnifiedJob(
|
|||||||
# Okay; we're done. Perform the actual save.
|
# Okay; we're done. Perform the actual save.
|
||||||
result = super(UnifiedJob, self).save(*args, **kwargs)
|
result = super(UnifiedJob, self).save(*args, **kwargs)
|
||||||
|
|
||||||
# If status changed, update the parent instance.
|
# If status changed, update the parent instance
|
||||||
if self.status != status_before:
|
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||||
|
if self.status != status_before and self.status != 'waiting':
|
||||||
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
||||||
# This dodges lock contention at the expense of the foreign key not being
|
# This dodges lock contention at the expense of the foreign key not being
|
||||||
# completely correct.
|
# completely correct.
|
||||||
|
|||||||
@@ -467,6 +467,10 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
app_label = 'main'
|
app_label = 'main'
|
||||||
|
permissions = [
|
||||||
|
('execute_workflowjobtemplate', 'Can run this workflow job template'),
|
||||||
|
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
|
||||||
|
]
|
||||||
|
|
||||||
notification_templates_approvals = models.ManyToManyField(
|
notification_templates_approvals = models.ManyToManyField(
|
||||||
"NotificationTemplate",
|
"NotificationTemplate",
|
||||||
|
|||||||
70
awx/main/notifications/awssns_backend.py
Normal file
70
awx/main/notifications/awssns_backend.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# Copyright (c) 2016 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
|
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||||
|
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||||
|
|
||||||
|
logger = logging.getLogger('awx.main.notifications.awssns_backend')
|
||||||
|
WEBSOCKET_TIMEOUT = 30
|
||||||
|
|
||||||
|
|
||||||
|
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
|
init_parameters = {
|
||||||
|
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
|
||||||
|
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
|
||||||
|
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
|
||||||
|
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
|
||||||
|
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
|
||||||
|
}
|
||||||
|
recipient_parameter = "sns_topic_arn"
|
||||||
|
sender_parameter = None
|
||||||
|
|
||||||
|
DEFAULT_BODY = "{{ job_metadata }}"
|
||||||
|
default_messages = CustomNotificationBase.job_metadata_messages
|
||||||
|
|
||||||
|
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
|
||||||
|
session = boto3.session.Session()
|
||||||
|
client_config = {"service_name": 'sns'}
|
||||||
|
if aws_region:
|
||||||
|
client_config["region_name"] = aws_region
|
||||||
|
if aws_secret_access_key:
|
||||||
|
client_config["aws_secret_access_key"] = aws_secret_access_key
|
||||||
|
if aws_access_key_id:
|
||||||
|
client_config["aws_access_key_id"] = aws_access_key_id
|
||||||
|
if aws_session_token:
|
||||||
|
client_config["aws_session_token"] = aws_session_token
|
||||||
|
self.client = session.client(**client_config)
|
||||||
|
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
|
||||||
|
|
||||||
|
def _sns_publish(self, topic_arn, message):
|
||||||
|
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
|
||||||
|
|
||||||
|
def format_body(self, body):
|
||||||
|
if isinstance(body, str):
|
||||||
|
try:
|
||||||
|
body = json.loads(body)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if isinstance(body, dict):
|
||||||
|
body = json.dumps(body)
|
||||||
|
# convert dict body to json string
|
||||||
|
return body
|
||||||
|
|
||||||
|
def send_messages(self, messages):
|
||||||
|
sent_messages = 0
|
||||||
|
for message in messages:
|
||||||
|
sns_topic_arn = str(message.recipients()[0])
|
||||||
|
try:
|
||||||
|
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
|
||||||
|
sent_messages += 1
|
||||||
|
except ClientError as error:
|
||||||
|
if not self.fail_silently:
|
||||||
|
raise error
|
||||||
|
|
||||||
|
return sent_messages
|
||||||
@@ -32,3 +32,15 @@ class CustomNotificationBase(object):
|
|||||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
job_metadata_messages = {
|
||||||
|
"started": {"body": "{{ job_metadata }}"},
|
||||||
|
"success": {"body": "{{ job_metadata }}"},
|
||||||
|
"error": {"body": "{{ job_metadata }}"},
|
||||||
|
"workflow_approval": {
|
||||||
|
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||||
|
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||||
|
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||||
|
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|||||||
@@ -27,17 +27,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
sender_parameter = None
|
sender_parameter = None
|
||||||
|
|
||||||
DEFAULT_BODY = "{{ job_metadata }}"
|
DEFAULT_BODY = "{{ job_metadata }}"
|
||||||
default_messages = {
|
default_messages = CustomNotificationBase.job_metadata_messages
|
||||||
"started": {"body": DEFAULT_BODY},
|
|
||||||
"success": {"body": DEFAULT_BODY},
|
|
||||||
"error": {"body": DEFAULT_BODY},
|
|
||||||
"workflow_approval": {
|
|
||||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
|
||||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
|
||||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
|
||||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||||
self.http_method = http_method
|
self.http_method = http_method
|
||||||
|
|||||||
@@ -63,6 +63,10 @@ websocket_urlpatterns = [
|
|||||||
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
||||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||||
|
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
|
||||||
|
|
||||||
websocket_relay_urlpatterns = [
|
websocket_relay_urlpatterns = [
|
||||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -138,7 +138,8 @@ class TaskBase:
|
|||||||
|
|
||||||
# Lock
|
# Lock
|
||||||
with task_manager_bulk_reschedule():
|
with task_manager_bulk_reschedule():
|
||||||
with advisory_lock(f"{self.prefix}_lock", wait=False) as acquired:
|
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000 # convert to milliseconds
|
||||||
|
with advisory_lock(f"{self.prefix}_lock", lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
if acquired is False:
|
if acquired is False:
|
||||||
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")
|
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")
|
||||||
|
|||||||
@@ -126,6 +126,8 @@ def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwarg
|
|||||||
|
|
||||||
def sync_superuser_status_to_rbac(instance, **kwargs):
|
def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||||
'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role'
|
'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role'
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return
|
||||||
update_fields = kwargs.get('update_fields', None)
|
update_fields = kwargs.get('update_fields', None)
|
||||||
if update_fields and 'is_superuser' not in update_fields:
|
if update_fields and 'is_superuser' not in update_fields:
|
||||||
return
|
return
|
||||||
@@ -137,6 +139,8 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
|
|||||||
|
|
||||||
def sync_rbac_to_superuser_status(instance, sender, **kwargs):
|
def sync_rbac_to_superuser_status(instance, sender, **kwargs):
|
||||||
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
|
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return
|
||||||
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
|
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
|
||||||
new_status_value = bool(kwargs['action'] == 'post_add')
|
new_status_value = bool(kwargs['action'] == 'post_add')
|
||||||
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()
|
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()
|
||||||
|
|||||||
@@ -405,10 +405,11 @@ class AWXReceptorJob:
|
|||||||
finally:
|
finally:
|
||||||
# Make sure to always release the work unit if we established it
|
# Make sure to always release the work unit if we established it
|
||||||
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
|
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
|
||||||
try:
|
if settings.RECPETOR_KEEP_WORK_ON_ERROR and getattr(res, 'status', 'error') == 'error':
|
||||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
try:
|
||||||
except Exception:
|
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
except Exception:
|
||||||
|
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||||
|
|
||||||
def _run_internal(self, receptor_ctl):
|
def _run_internal(self, receptor_ctl):
|
||||||
# Create a socketpair. Where the left side will be used for writing our payload
|
# Create a socketpair. Where the left side will be used for writing our payload
|
||||||
|
|||||||
@@ -36,6 +36,9 @@ import ansible_runner.cleanup
|
|||||||
# dateutil
|
# dateutil
|
||||||
from dateutil.parser import parse as parse_date
|
from dateutil.parser import parse as parse_date
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx import __version__ as awx_application_version
|
from awx import __version__ as awx_application_version
|
||||||
from awx.main.access import access_registry
|
from awx.main.access import access_registry
|
||||||
@@ -51,7 +54,7 @@ from awx.main.models import (
|
|||||||
Job,
|
Job,
|
||||||
convert_jsonfields,
|
convert_jsonfields,
|
||||||
)
|
)
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES, ERROR_STATES
|
||||||
from awx.main.dispatch.publish import task
|
from awx.main.dispatch.publish import task
|
||||||
from awx.main.dispatch import get_task_queuename, reaper
|
from awx.main.dispatch import get_task_queuename, reaper
|
||||||
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
||||||
@@ -682,6 +685,8 @@ def awx_receptor_workunit_reaper():
|
|||||||
|
|
||||||
unit_ids = [id for id in receptor_work_list]
|
unit_ids = [id for id in receptor_work_list]
|
||||||
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
||||||
|
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||||
|
jobs_with_unreleased_receptor_units = jobs_with_unreleased_receptor_units.exclude(status__in=ERROR_STATES)
|
||||||
for job in jobs_with_unreleased_receptor_units:
|
for job in jobs_with_unreleased_receptor_units:
|
||||||
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
||||||
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
|
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
|
||||||
@@ -701,7 +706,10 @@ def awx_k8s_reaper():
|
|||||||
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
||||||
pods = PodManager.list_active_jobs(group)
|
pods = PodManager.list_active_jobs(group)
|
||||||
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
|
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
|
||||||
for job in UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES):
|
reap_job_candidates = UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES)
|
||||||
|
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||||
|
reap_job_candidates = reap_job_candidates.exclude(status__in=ERROR_STATES)
|
||||||
|
for job in reap_job_candidates:
|
||||||
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
||||||
try:
|
try:
|
||||||
pm = PodManager(job)
|
pm = PodManager(job)
|
||||||
@@ -712,7 +720,8 @@ def awx_k8s_reaper():
|
|||||||
|
|
||||||
@task(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
def awx_periodic_scheduler():
|
def awx_periodic_scheduler():
|
||||||
with advisory_lock('awx_periodic_scheduler_lock', wait=False) as acquired:
|
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000
|
||||||
|
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||||
if acquired is False:
|
if acquired is False:
|
||||||
logger.debug("Not running periodic scheduler, another task holds lock")
|
logger.debug("Not running periodic scheduler, another task holds lock")
|
||||||
return
|
return
|
||||||
@@ -964,3 +973,27 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
|||||||
permission_check_func(creater, copy_mapping.values())
|
permission_check_func(creater, copy_mapping.values())
|
||||||
if isinstance(new_obj, Inventory):
|
if isinstance(new_obj, Inventory):
|
||||||
update_inventory_computed_fields.delay(new_obj.id)
|
update_inventory_computed_fields.delay(new_obj.id)
|
||||||
|
|
||||||
|
|
||||||
|
@task(queue=get_task_queuename)
|
||||||
|
def periodic_resource_sync():
|
||||||
|
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||||
|
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||||
|
return
|
||||||
|
|
||||||
|
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||||
|
if acquired is False:
|
||||||
|
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||||
|
return
|
||||||
|
logger.debug("Running periodic resource sync")
|
||||||
|
|
||||||
|
executor = SyncExecutor()
|
||||||
|
executor.run()
|
||||||
|
for key, item_list in executor.results.items():
|
||||||
|
if not item_list or key == 'noop':
|
||||||
|
continue
|
||||||
|
# Log creations and conflicts
|
||||||
|
if len(item_list) > 10 and settings.LOG_AGGREGATOR_LEVEL != 'DEBUG':
|
||||||
|
logger.info(f'Periodic resource sync {key}, first 10 items:\n{item_list[:10]}')
|
||||||
|
else:
|
||||||
|
logger.info(f'Periodic resource sync {key}:\n{item_list}')
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||||
|
"K8S_AUTH_API_KEY": "fooo",
|
||||||
|
"K8S_AUTH_VERIFY_SSL": "False"
|
||||||
|
}
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"TF_BACKEND_CONFIG_FILE": "{{ file_reference }}"
|
"GOOGLE_BACKEND_CREDENTIALS": "{{ file_reference }}"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ class TestSwaggerGeneration:
|
|||||||
# The number of API endpoints changes over time, but let's just check
|
# The number of API endpoints changes over time, but let's just check
|
||||||
# for a reasonable number here; if this test starts failing, raise/lower the bounds
|
# for a reasonable number here; if this test starts failing, raise/lower the bounds
|
||||||
paths = JSON['paths']
|
paths = JSON['paths']
|
||||||
assert 250 < len(paths) < 375
|
assert 250 < len(paths) < 400
|
||||||
assert set(list(paths['/api/'].keys())) == set(['get', 'parameters'])
|
assert set(list(paths['/api/'].keys())) == set(['get', 'parameters'])
|
||||||
assert set(list(paths['/api/v2/'].keys())) == set(['get', 'parameters'])
|
assert set(list(paths['/api/v2/'].keys())) == set(['get', 'parameters'])
|
||||||
assert set(list(sorted(paths['/api/v2/credentials/'].keys()))) == set(['get', 'post', 'parameters'])
|
assert set(list(sorted(paths['/api/v2/credentials/'].keys()))) == set(['get', 'post', 'parameters'])
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ from prometheus_client.parser import text_string_to_metric_families
|
|||||||
from awx.main import models
|
from awx.main import models
|
||||||
from awx.main.analytics.metrics import metrics
|
from awx.main.analytics.metrics import metrics
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.models.rbac import Role
|
|
||||||
|
|
||||||
EXPECTED_VALUES = {
|
EXPECTED_VALUES = {
|
||||||
'awx_system_info': 1.0,
|
'awx_system_info': 1.0,
|
||||||
@@ -66,7 +65,6 @@ def test_metrics_permissions(get, admin, org_admin, alice, bob, organization):
|
|||||||
organization.auditor_role.members.add(bob)
|
organization.auditor_role.members.add(bob)
|
||||||
assert get(get_metrics_view_db_only(), user=bob).status_code == 403
|
assert get(get_metrics_view_db_only(), user=bob).status_code == 403
|
||||||
|
|
||||||
Role.singleton('system_auditor').members.add(bob)
|
|
||||||
bob.is_system_auditor = True
|
bob.is_system_auditor = True
|
||||||
assert get(get_metrics_view_db_only(), user=bob).status_code == 200
|
assert get(get_metrics_view_db_only(), user=bob).status_code == 200
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
|
|||||||
role_pk = inventory.admin_role.pk
|
role_pk = inventory.admin_role.pk
|
||||||
data = {"id": role_pk}
|
data = {"id": role_pk}
|
||||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
||||||
|
|
||||||
|
|
||||||
@@ -21,8 +21,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
|
|||||||
role_pk = inventory.admin_role.pk
|
role_pk = inventory.admin_role.pk
|
||||||
data = {"id": role_pk}
|
data = {"id": role_pk}
|
||||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||||
|
|
||||||
|
|
||||||
@@ -33,8 +33,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
|
|||||||
role_pk = inventory.admin_role.pk
|
role_pk = inventory.admin_role.pk
|
||||||
data = {"id": team.pk}
|
data = {"id": team.pk}
|
||||||
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
||||||
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
|
mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access)
|
||||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||||
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ def test_idempotent_credential_type_setup():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh):
|
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||||
params = {
|
params = {
|
||||||
'credential_type': 1,
|
'credential_type': 1,
|
||||||
'inputs': {'username': 'someusername'},
|
'inputs': {'username': 'someusername'},
|
||||||
@@ -81,7 +81,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh):
|
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||||
params = {
|
params = {
|
||||||
'credential_type': 1,
|
'credential_type': 1,
|
||||||
'inputs': {'username': 'someusername'},
|
'inputs': {'username': 'someusername'},
|
||||||
@@ -385,10 +385,9 @@ def test_list_created_org_credentials(post, get, organization, org_admin, org_me
|
|||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_list_cannot_order_by_encrypted_field(post, get, organization, org_admin, credentialtype_ssh, order_by):
|
def test_list_cannot_order_by_encrypted_field(post, get, organization, org_admin, credentialtype_ssh, order_by):
|
||||||
for i, password in enumerate(('abc', 'def', 'xyz')):
|
for i, password in enumerate(('abc', 'def', 'xyz')):
|
||||||
response = post(reverse('api:credential_list'), {'organization': organization.id, 'name': 'C%d' % i, 'password': password}, org_admin)
|
post(reverse('api:credential_list'), {'organization': organization.id, 'name': 'C%d' % i, 'password': password}, org_admin, expect=400)
|
||||||
|
|
||||||
response = get(reverse('api:credential_list'), org_admin, QUERY_STRING='order_by=%s' % order_by, status=400)
|
get(reverse('api:credential_list'), org_admin, QUERY_STRING='order_by=%s' % order_by, expect=400)
|
||||||
assert response.status_code == 400
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@@ -399,8 +398,7 @@ def test_inputs_cannot_contain_extra_fields(get, post, organization, admin, cred
|
|||||||
'credential_type': credentialtype_ssh.pk,
|
'credential_type': credentialtype_ssh.pk,
|
||||||
'inputs': {'invalid_field': 'foo'},
|
'inputs': {'invalid_field': 'foo'},
|
||||||
}
|
}
|
||||||
response = post(reverse('api:credential_list'), params, admin)
|
response = post(reverse('api:credential_list'), params, admin, expect=400)
|
||||||
assert response.status_code == 400
|
|
||||||
assert "'invalid_field' was unexpected" in response.data['inputs'][0]
|
assert "'invalid_field' was unexpected" in response.data['inputs'][0]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,22 +1,30 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
|
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
|
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||||
|
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||||
|
|
||||||
|
|
||||||
|
class HeaderTrackingMiddleware(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.environ = {}
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def process_response(self, request, response):
|
||||||
|
self.environ = request.environ
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_proxy_ip_allowed(get, patch, admin):
|
def test_proxy_ip_allowed(get, patch, admin):
|
||||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||||
|
|
||||||
class HeaderTrackingMiddleware(object):
|
|
||||||
environ = {}
|
|
||||||
|
|
||||||
def process_request(self, request):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def process_response(self, request, response):
|
|
||||||
self.environ = request.environ
|
|
||||||
|
|
||||||
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||||
# should just pass through
|
# should just pass through
|
||||||
middleware = HeaderTrackingMiddleware()
|
middleware = HeaderTrackingMiddleware()
|
||||||
@@ -45,6 +53,51 @@ def test_proxy_ip_allowed(get, patch, admin):
|
|||||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
class TestTrustedProxyAllowListIntegration:
|
||||||
|
@pytest.fixture
|
||||||
|
def url(self, patch, admin):
|
||||||
|
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||||
|
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||||
|
patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
|
||||||
|
return url
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def middleware(self):
|
||||||
|
return HeaderTrackingMiddleware()
|
||||||
|
|
||||||
|
def test_x_trusted_proxy_valid_signature(self, get, admin, rsa_keypair, url, middleware): # noqa: F811
|
||||||
|
# Headers should NOT get deleted
|
||||||
|
headers = {
|
||||||
|
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||||
|
}
|
||||||
|
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||||
|
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public, PROXY_IP_ALLOWED_LIST=[]):
|
||||||
|
get(url, user=admin, middleware=middleware, **headers)
|
||||||
|
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||||
|
|
||||||
|
def test_x_trusted_proxy_invalid_signature(self, get, admin, url, patch, middleware):
|
||||||
|
# Headers should NOT get deleted
|
||||||
|
headers = {
|
||||||
|
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||||
|
}
|
||||||
|
with override_settings(PROXY_IP_ALLOWED_LIST=[]):
|
||||||
|
get(url, user=admin, middleware=middleware, **headers)
|
||||||
|
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||||
|
|
||||||
|
def test_x_trusted_proxy_invalid_signature_valid_proxy(self, get, admin, url, middleware):
|
||||||
|
# A valid explicit proxy SHOULD result in sensitive headers NOT being deleted, regardless of the trusted proxy signature results
|
||||||
|
headers = {
|
||||||
|
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||||
|
'REMOTE_ADDR': 'my.proxy.example.org',
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||||
|
}
|
||||||
|
get(url, user=admin, middleware=middleware, **headers)
|
||||||
|
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestDeleteViews:
|
class TestDeleteViews:
|
||||||
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
||||||
|
|||||||
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from awx.api.versioning import reverse
|
||||||
|
from awx.main.models import Organization
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
class TestImmutableSharedFields:
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def configure_settings(self, settings):
|
||||||
|
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
|
||||||
|
|
||||||
|
def test_create_raises_permission_denied(self, admin_user, post):
|
||||||
|
orgA = Organization.objects.create(name='orgA')
|
||||||
|
resp = post(
|
||||||
|
url=reverse('api:team_list'),
|
||||||
|
data={'name': 'teamA', 'organization': orgA.id},
|
||||||
|
user=admin_user,
|
||||||
|
expect=403,
|
||||||
|
)
|
||||||
|
assert "Creation of this resource is not allowed" in resp.data['detail']
|
||||||
|
|
||||||
|
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
|
||||||
|
orgA = Organization.objects.create(name='orgA')
|
||||||
|
team = orgA.teams.create(name='teamA')
|
||||||
|
resp = delete(
|
||||||
|
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||||
|
user=admin_user,
|
||||||
|
expect=403,
|
||||||
|
)
|
||||||
|
assert "Deletion of this resource is not allowed" in resp.data['detail']
|
||||||
|
|
||||||
|
def test_perform_update(self, admin_user, patch):
|
||||||
|
orgA = Organization.objects.create(name='orgA')
|
||||||
|
team = orgA.teams.create(name='teamA')
|
||||||
|
# allow patching non-shared fields
|
||||||
|
patch(
|
||||||
|
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||||
|
data={"description": "can change this field"},
|
||||||
|
user=admin_user,
|
||||||
|
expect=200,
|
||||||
|
)
|
||||||
|
orgB = Organization.objects.create(name='orgB')
|
||||||
|
# prevent patching shared fields
|
||||||
|
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
|
||||||
|
assert "Cannot change shared field" in resp.data['organization']
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'role',
|
||||||
|
['admin_role', 'member_role'],
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize('resource', ['organization', 'team'])
|
||||||
|
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
|
||||||
|
orgA = Organization.objects.create(name='orgA')
|
||||||
|
if resource == 'organization':
|
||||||
|
role = getattr(orgA, role)
|
||||||
|
elif resource == 'team':
|
||||||
|
teamA = orgA.teams.create(name='teamA')
|
||||||
|
role = getattr(teamA, role)
|
||||||
|
resp = post(
|
||||||
|
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
|
||||||
|
data={'id': role.id},
|
||||||
|
user=admin_user,
|
||||||
|
expect=403,
|
||||||
|
)
|
||||||
|
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']
|
||||||
@@ -32,13 +32,6 @@ def node_type_instance():
|
|||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def instance_group(job_factory):
|
|
||||||
ig = InstanceGroup(name="east")
|
|
||||||
ig.save()
|
|
||||||
return ig
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def containerized_instance_group(instance_group, kube_credential):
|
def containerized_instance_group(instance_group, kube_credential):
|
||||||
ig = InstanceGroup(name="container")
|
ig = InstanceGroup(name="container")
|
||||||
|
|||||||
@@ -131,11 +131,11 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
|
|||||||
|
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||||
|
|
||||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||||
assert JobTemplate.create_unified_job.called
|
assert JobTemplate.create_unified_job.called
|
||||||
assert JobTemplate.create_unified_job.call_args == ()
|
assert JobTemplate.create_unified_job.call_args == ()
|
||||||
|
|
||||||
# Check that job is serialized correctly
|
# Check that job is serialized correctly
|
||||||
job_id = response.data['job']
|
job_id = response.data['job']
|
||||||
@@ -167,12 +167,12 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
|
|||||||
|
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||||
|
|
||||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||||
assert JobTemplate.create_unified_job.called
|
assert JobTemplate.create_unified_job.called
|
||||||
called_with = data_to_internal(runtime_data)
|
called_with = data_to_internal(runtime_data)
|
||||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||||
|
|
||||||
job_id = response.data['job']
|
job_id = response.data['job']
|
||||||
assert job_id == 968
|
assert job_id == 968
|
||||||
@@ -187,11 +187,11 @@ def test_job_accept_empty_tags(job_template_prompts, post, admin_user, mocker):
|
|||||||
|
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||||
|
|
||||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||||
assert JobTemplate.create_unified_job.called
|
assert JobTemplate.create_unified_job.called
|
||||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||||
|
|
||||||
mock_job.signal_start.assert_called_once()
|
mock_job.signal_start.assert_called_once()
|
||||||
|
|
||||||
@@ -203,14 +203,14 @@ def test_slice_timeout_forks_need_int(job_template_prompts, post, admin_user, mo
|
|||||||
|
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||||
|
|
||||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||||
response = post(
|
response = post(
|
||||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||||
)
|
)
|
||||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@@ -244,12 +244,12 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
|
|||||||
|
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||||
|
|
||||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||||
assert JobTemplate.create_unified_job.called
|
assert JobTemplate.create_unified_job.called
|
||||||
expected_call = data_to_internal(runtime_data)
|
expected_call = data_to_internal(runtime_data)
|
||||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||||
|
|
||||||
job_id = response.data['job']
|
job_id = response.data['job']
|
||||||
assert job_id == 968
|
assert job_id == 968
|
||||||
@@ -641,18 +641,18 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
|
|||||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||||
job_template.save()
|
job_template.save()
|
||||||
|
|
||||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||||
response = post(
|
response = post(
|
||||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||||
admin_user,
|
admin_user,
|
||||||
expect=201,
|
expect=201,
|
||||||
)
|
)
|
||||||
assert JobTemplate.create_unified_job.called
|
assert JobTemplate.create_unified_job.called
|
||||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||||
|
|
||||||
job_id = response.data['job']
|
job_id = response.data['job']
|
||||||
assert job_id == 968
|
assert job_id == 968
|
||||||
@@ -670,22 +670,22 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem
|
|||||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||||
job_template.save()
|
job_template.save()
|
||||||
|
|
||||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||||
post(
|
post(
|
||||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||||
admin_user,
|
admin_user,
|
||||||
expect=201,
|
expect=201,
|
||||||
format='json',
|
format='json',
|
||||||
)
|
)
|
||||||
assert UnifiedJobTemplate.create_unified_job.called
|
assert UnifiedJobTemplate.create_unified_job.called
|
||||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||||
call_args.pop('_eager_fields', None) # internal purposes
|
call_args.pop('_eager_fields', None) # internal purposes
|
||||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||||
|
|
||||||
mock_job.signal_start.assert_called_once()
|
mock_job.signal_start.assert_called_once()
|
||||||
|
|
||||||
@@ -697,22 +697,22 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t
|
|||||||
job_template.host_config_key = "foo"
|
job_template.host_config_key = "foo"
|
||||||
job_template.save()
|
job_template.save()
|
||||||
|
|
||||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||||
post(
|
post(
|
||||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||||
admin_user,
|
admin_user,
|
||||||
expect=201,
|
expect=201,
|
||||||
format='json',
|
format='json',
|
||||||
)
|
)
|
||||||
assert UnifiedJobTemplate.create_unified_job.called
|
assert UnifiedJobTemplate.create_unified_job.called
|
||||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||||
call_args.pop('_eager_fields', None) # internal purposes
|
call_args.pop('_eager_fields', None) # internal purposes
|
||||||
assert call_args == {'limit': 'single-host'}
|
assert call_args == {'limit': 'single-host'}
|
||||||
|
|
||||||
mock_job.signal_start.assert_called_once()
|
mock_job.signal_start.assert_called_once()
|
||||||
|
|
||||||
@@ -725,9 +725,9 @@ def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_u
|
|||||||
job_template.save()
|
job_template.save()
|
||||||
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
|
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
|
||||||
host_with_alias.save()
|
host_with_alias.save()
|
||||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@@ -738,6 +738,6 @@ def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_templ
|
|||||||
job_template.save()
|
job_template.save()
|
||||||
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
|
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
|
||||||
host_with_alias.save()
|
host_with_alias.save()
|
||||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||||
assert not r.data['matching_hosts']
|
assert not r.data['matching_hosts']
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.serializers import JobTemplateSerializer
|
from awx.api.serializers import JobTemplateSerializer
|
||||||
@@ -8,10 +9,15 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
|||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
# DRF
|
# DRF
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
|
# DAB
|
||||||
|
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||||
|
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -369,3 +375,113 @@ def test_job_template_missing_inventory(project, inventory, admin_user, post):
|
|||||||
)
|
)
|
||||||
assert r.status_code == 400
|
assert r.status_code == 400
|
||||||
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
class TestJobTemplateCallbackProxyIntegration:
|
||||||
|
"""
|
||||||
|
Test the interaction of provision job template callback feature and:
|
||||||
|
settings.PROXY_IP_ALLOWED_LIST
|
||||||
|
x-trusted-proxy http header
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def job_template(self, inventory, project):
|
||||||
|
jt = JobTemplate.objects.create(name='test-jt', inventory=inventory, project=project, playbook='helloworld.yml', host_config_key='abcd')
|
||||||
|
return jt
|
||||||
|
|
||||||
|
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||||
|
def test_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||||
|
job_template.inventory.hosts.create(name='foobar')
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz',
|
||||||
|
'REMOTE_HOST': 'baz',
|
||||||
|
'REMOTE_ADDR': 'baz',
|
||||||
|
}
|
||||||
|
r = post(
|
||||||
|
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=400, **headers
|
||||||
|
)
|
||||||
|
assert r.data['msg'] == 'No matching host could be found!'
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'headers, expected',
|
||||||
|
(
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||||
|
'REMOTE_HOST': 'my.proxy.example.org',
|
||||||
|
},
|
||||||
|
201,
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||||
|
'REMOTE_HOST': 'not-my-proxy.org',
|
||||||
|
},
|
||||||
|
400,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||||
|
def test_proxy_ip_allowed_list(self, job_template, admin_user, post, headers, expected): # noqa: F811
|
||||||
|
job_template.inventory.hosts.create(name='my.proxy.example.org')
|
||||||
|
|
||||||
|
post(
|
||||||
|
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||||
|
data={'host_config_key': 'abcd'},
|
||||||
|
user=admin_user,
|
||||||
|
expect=expected,
|
||||||
|
**headers
|
||||||
|
)
|
||||||
|
|
||||||
|
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=[])
|
||||||
|
def test_no_proxy_trust_all_headers(self, job_template, admin_user, post):
|
||||||
|
job_template.inventory.hosts.create(name='foobar')
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||||
|
'REMOTE_ADDR': 'bar',
|
||||||
|
'REMOTE_HOST': 'baz',
|
||||||
|
}
|
||||||
|
post(url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=201, **headers)
|
||||||
|
|
||||||
|
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||||
|
def test_trusted_proxy(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||||
|
job_template.inventory.hosts.create(name='foobar')
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar, my.proxy.example.org',
|
||||||
|
}
|
||||||
|
|
||||||
|
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||||
|
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||||
|
post(
|
||||||
|
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||||
|
data={'host_config_key': 'abcd'},
|
||||||
|
user=admin_user,
|
||||||
|
expect=201,
|
||||||
|
**headers
|
||||||
|
)
|
||||||
|
|
||||||
|
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||||
|
def test_trusted_proxy_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||||
|
job_template.inventory.hosts.create(name='foobar')
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||||
|
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz, my.proxy.example.org',
|
||||||
|
'REMOTE_ADDR': 'bar',
|
||||||
|
'REMOTE_HOST': 'baz',
|
||||||
|
}
|
||||||
|
|
||||||
|
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||||
|
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||||
|
post(
|
||||||
|
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||||
|
data={'host_config_key': 'abcd'},
|
||||||
|
user=admin_user,
|
||||||
|
expect=400,
|
||||||
|
**headers
|
||||||
|
)
|
||||||
|
|||||||
@@ -165,8 +165,8 @@ class TestAccessListCapabilities:
|
|||||||
def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
|
def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
|
||||||
inventory.admin_role.members.add(rando)
|
inventory.admin_role.members.add(rando)
|
||||||
|
|
||||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||||
|
|
||||||
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
|
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
|
||||||
self._assert_one_in_list(response.data)
|
self._assert_one_in_list(response.data)
|
||||||
@@ -174,8 +174,8 @@ class TestAccessListCapabilities:
|
|||||||
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
|
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
|
||||||
|
|
||||||
def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
|
def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
|
||||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||||
|
|
||||||
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
|
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
|
||||||
self._assert_one_in_list(response.data, sublist='indirect_access')
|
self._assert_one_in_list(response.data, sublist='indirect_access')
|
||||||
@@ -185,8 +185,8 @@ class TestAccessListCapabilities:
|
|||||||
def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
|
def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
|
||||||
team.member_role.children.add(inventory.admin_role)
|
team.member_role.children.add(inventory.admin_role)
|
||||||
|
|
||||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||||
|
|
||||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
|
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
|
||||||
self._assert_one_in_list(response.data)
|
self._assert_one_in_list(response.data)
|
||||||
@@ -198,8 +198,8 @@ class TestAccessListCapabilities:
|
|||||||
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
||||||
team.member_role.children.add(inventory.admin_role)
|
team.member_role.children.add(inventory.admin_role)
|
||||||
|
|
||||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||||
|
|
||||||
# Did we assess whether team_member can remove team's permission to the inventory?
|
# Did we assess whether team_member can remove team's permission to the inventory?
|
||||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
|
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
|
||||||
@@ -212,8 +212,8 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
|
|||||||
organization.member_role.members.add(alice)
|
organization.member_role.members.add(alice)
|
||||||
organization.member_role.members.add(bob)
|
organization.member_role.members.add(bob)
|
||||||
|
|
||||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||||
|
|
||||||
# Did we assess whether bob can remove alice's permission to the inventory?
|
# Did we assess whether bob can remove alice's permission to the inventory?
|
||||||
mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
|
mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
|
||||||
|
|||||||
@@ -3,17 +3,6 @@ import pytest
|
|||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
|
||||||
def test_admin_visible_to_orphaned_users(get, alice):
|
|
||||||
names = set()
|
|
||||||
|
|
||||||
response = get(reverse('api:role_list'), user=alice)
|
|
||||||
for item in response.data['results']:
|
|
||||||
names.add(item['name'])
|
|
||||||
assert 'System Auditor' in names
|
|
||||||
assert 'System Administrator' in names
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@pytest.mark.parametrize('role,code', [('member_role', 400), ('admin_role', 400), ('inventory_admin_role', 204)])
|
@pytest.mark.parametrize('role,code', [('member_role', 400), ('admin_role', 400), ('inventory_admin_role', 204)])
|
||||||
@pytest.mark.parametrize('reversed', [True, False])
|
@pytest.mark.parametrize('reversed', [True, False])
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -43,9 +43,9 @@ def run_command(name, *args, **options):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_update_password_command(mocker, username, password, expected, changed):
|
def test_update_password_command(mocker, username, password, expected, changed):
|
||||||
with mocker.patch.object(UpdatePassword, 'update_password', return_value=changed):
|
mocker.patch.object(UpdatePassword, 'update_password', return_value=changed)
|
||||||
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
||||||
if result is None:
|
if result is None:
|
||||||
assert stdout == expected
|
assert stdout == expected
|
||||||
else:
|
else:
|
||||||
assert str(result) == expected
|
assert str(result) == expected
|
||||||
|
|||||||
@@ -16,9 +16,11 @@ from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
|||||||
|
|
||||||
from django.db.models.signals import post_migrate
|
from django.db.models.signals import post_migrate
|
||||||
|
|
||||||
|
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.projects import Project
|
from awx.main.models.projects import Project
|
||||||
from awx.main.models.ha import Instance
|
from awx.main.models.ha import Instance, InstanceGroup
|
||||||
|
|
||||||
from rest_framework.test import (
|
from rest_framework.test import (
|
||||||
APIRequestFactory,
|
APIRequestFactory,
|
||||||
@@ -32,7 +34,6 @@ from awx.main.models.organization import (
|
|||||||
Organization,
|
Organization,
|
||||||
Team,
|
Team,
|
||||||
)
|
)
|
||||||
from awx.main.models.rbac import Role
|
|
||||||
from awx.main.models.notifications import NotificationTemplate, Notification
|
from awx.main.models.notifications import NotificationTemplate, Notification
|
||||||
from awx.main.models.events import (
|
from awx.main.models.events import (
|
||||||
JobEvent,
|
JobEvent,
|
||||||
@@ -91,6 +92,17 @@ def deploy_jobtemplate(project, inventory, credential):
|
|||||||
return jt
|
return jt
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def execution_environment():
|
||||||
|
return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", managed=True)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def setup_managed_roles():
|
||||||
|
"Run the migration script to pre-create managed role definitions"
|
||||||
|
setup_managed_role_definitions(apps, None)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def team(organization):
|
def team(organization):
|
||||||
return organization.teams.create(name='test-team')
|
return organization.teams.create(name='test-team')
|
||||||
@@ -434,7 +446,7 @@ def admin(user):
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def system_auditor(user):
|
def system_auditor(user):
|
||||||
u = user('an-auditor', False)
|
u = user('an-auditor', False)
|
||||||
Role.singleton('system_auditor').members.add(u)
|
u.is_system_auditor = True
|
||||||
return u
|
return u
|
||||||
|
|
||||||
|
|
||||||
@@ -723,6 +735,11 @@ def jt_linked(organization, project, inventory, machine_credential, credential,
|
|||||||
return jt
|
return jt
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def instance_group():
|
||||||
|
return InstanceGroup.objects.create(name="east")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def workflow_job_template(organization):
|
def workflow_job_template(organization):
|
||||||
wjt = WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization)
|
wjt = WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization)
|
||||||
|
|||||||
125
awx/main/tests/functional/dab_rbac/test_access_list.py
Normal file
125
awx/main/tests/functional/dab_rbac/test_access_list.py
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from awx.main.models import User
|
||||||
|
from awx.api.versioning import reverse
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_access_list_superuser(get, admin_user, inventory):
|
||||||
|
url = reverse('api:inventory_access_list', kwargs={'pk': inventory.id})
|
||||||
|
|
||||||
|
response = get(url, user=admin_user, expect=200)
|
||||||
|
by_username = {}
|
||||||
|
for entry in response.data['results']:
|
||||||
|
by_username[entry['username']] = entry
|
||||||
|
assert 'admin' in by_username
|
||||||
|
|
||||||
|
assert len(by_username['admin']['summary_fields']['indirect_access']) == 1
|
||||||
|
assert len(by_username['admin']['summary_fields']['direct_access']) == 0
|
||||||
|
access_entry = by_username['admin']['summary_fields']['indirect_access'][0]
|
||||||
|
assert sorted(access_entry['descendant_roles']) == sorted(['adhoc_role', 'use_role', 'update_role', 'read_role', 'admin_role'])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_access_list_system_auditor(get, admin_user, inventory):
|
||||||
|
sys_auditor = User.objects.create(username='sys-aud')
|
||||||
|
sys_auditor.is_system_auditor = True
|
||||||
|
assert sys_auditor.is_system_auditor
|
||||||
|
url = reverse('api:inventory_access_list', kwargs={'pk': inventory.id})
|
||||||
|
|
||||||
|
response = get(url, user=admin_user, expect=200)
|
||||||
|
by_username = {}
|
||||||
|
for entry in response.data['results']:
|
||||||
|
by_username[entry['username']] = entry
|
||||||
|
assert 'sys-aud' in by_username
|
||||||
|
|
||||||
|
assert len(by_username['sys-aud']['summary_fields']['indirect_access']) == 1
|
||||||
|
assert len(by_username['sys-aud']['summary_fields']['direct_access']) == 0
|
||||||
|
access_entry = by_username['sys-aud']['summary_fields']['indirect_access'][0]
|
||||||
|
assert access_entry['descendant_roles'] == ['read_role']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_access_list_direct_access(get, admin_user, inventory):
|
||||||
|
u1 = User.objects.create(username='u1')
|
||||||
|
|
||||||
|
inventory.admin_role.members.add(u1)
|
||||||
|
|
||||||
|
url = reverse('api:inventory_access_list', kwargs={'pk': inventory.id})
|
||||||
|
response = get(url, user=admin_user, expect=200)
|
||||||
|
by_username = {}
|
||||||
|
for entry in response.data['results']:
|
||||||
|
by_username[entry['username']] = entry
|
||||||
|
assert 'u1' in by_username
|
||||||
|
|
||||||
|
assert len(by_username['u1']['summary_fields']['direct_access']) == 1
|
||||||
|
assert len(by_username['u1']['summary_fields']['indirect_access']) == 0
|
||||||
|
access_entry = by_username['u1']['summary_fields']['direct_access'][0]
|
||||||
|
assert sorted(access_entry['descendant_roles']) == sorted(['adhoc_role', 'use_role', 'update_role', 'read_role', 'admin_role'])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_access_list_organization_access(get, admin_user, inventory):
|
||||||
|
u2 = User.objects.create(username='u2')
|
||||||
|
|
||||||
|
inventory.organization.inventory_admin_role.members.add(u2)
|
||||||
|
|
||||||
|
# User has indirect access to the inventory
|
||||||
|
url = reverse('api:inventory_access_list', kwargs={'pk': inventory.id})
|
||||||
|
response = get(url, user=admin_user, expect=200)
|
||||||
|
by_username = {}
|
||||||
|
for entry in response.data['results']:
|
||||||
|
by_username[entry['username']] = entry
|
||||||
|
assert 'u2' in by_username
|
||||||
|
|
||||||
|
assert len(by_username['u2']['summary_fields']['indirect_access']) == 1
|
||||||
|
assert len(by_username['u2']['summary_fields']['direct_access']) == 0
|
||||||
|
access_entry = by_username['u2']['summary_fields']['indirect_access'][0]
|
||||||
|
assert sorted(access_entry['descendant_roles']) == sorted(['adhoc_role', 'use_role', 'update_role', 'read_role', 'admin_role'])
|
||||||
|
|
||||||
|
# Test that user shows up in the organization access list with direct access of expected roles
|
||||||
|
url = reverse('api:organization_access_list', kwargs={'pk': inventory.organization_id})
|
||||||
|
response = get(url, user=admin_user, expect=200)
|
||||||
|
by_username = {}
|
||||||
|
for entry in response.data['results']:
|
||||||
|
by_username[entry['username']] = entry
|
||||||
|
assert 'u2' in by_username
|
||||||
|
|
||||||
|
assert len(by_username['u2']['summary_fields']['direct_access']) == 1
|
||||||
|
assert len(by_username['u2']['summary_fields']['indirect_access']) == 0
|
||||||
|
access_entry = by_username['u2']['summary_fields']['direct_access'][0]
|
||||||
|
assert sorted(access_entry['descendant_roles']) == sorted(['inventory_admin_role', 'read_role'])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_team_indirect_access(get, team, admin_user, inventory):
|
||||||
|
u1 = User.objects.create(username='u1')
|
||||||
|
team.member_role.members.add(u1)
|
||||||
|
|
||||||
|
inventory.organization.inventory_admin_role.parents.add(team.member_role)
|
||||||
|
|
||||||
|
url = reverse('api:inventory_access_list', kwargs={'pk': inventory.id})
|
||||||
|
response = get(url, user=admin_user, expect=200)
|
||||||
|
by_username = {}
|
||||||
|
for entry in response.data['results']:
|
||||||
|
by_username[entry['username']] = entry
|
||||||
|
assert 'u1' in by_username
|
||||||
|
|
||||||
|
assert len(by_username['u1']['summary_fields']['direct_access']) == 1
|
||||||
|
assert len(by_username['u1']['summary_fields']['indirect_access']) == 0
|
||||||
|
access_entry = by_username['u1']['summary_fields']['direct_access'][0]
|
||||||
|
assert sorted(access_entry['descendant_roles']) == sorted(['adhoc_role', 'use_role', 'update_role', 'read_role', 'admin_role'])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_workflow_access_list(workflow_job_template, alice, bob, setup_managed_roles, get, admin_user):
|
||||||
|
"""Basic verification that WFJT access_list is functional"""
|
||||||
|
workflow_job_template.admin_role.members.add(alice)
|
||||||
|
workflow_job_template.organization.workflow_admin_role.members.add(bob)
|
||||||
|
|
||||||
|
url = reverse('api:workflow_job_template_access_list', kwargs={'pk': workflow_job_template.pk})
|
||||||
|
for u in (alice, bob, admin_user):
|
||||||
|
response = get(url, user=u, expect=200)
|
||||||
|
user_ids = [item['id'] for item in response.data['results']]
|
||||||
|
assert alice.pk in user_ids
|
||||||
|
assert bob.pk in user_ids
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from awx.main.access import InstanceGroupAccess, NotificationTemplateAccess
|
||||||
|
|
||||||
|
from ansible_base.rbac.models import RoleDefinition
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_instance_group_object_role_delete(rando, instance_group, setup_managed_roles):
|
||||||
|
"""Basic functionality of IG object-level admin role function AAP-25506"""
|
||||||
|
rd = RoleDefinition.objects.get(name='InstanceGroup Admin')
|
||||||
|
rd.give_permission(rando, instance_group)
|
||||||
|
access = InstanceGroupAccess(rando)
|
||||||
|
assert access.can_delete(instance_group)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_notification_template_object_role_change(rando, notification_template, setup_managed_roles):
|
||||||
|
"""Basic functionality of NT object-level admin role function AAP-25493"""
|
||||||
|
rd = RoleDefinition.objects.get(name='NotificationTemplate Admin')
|
||||||
|
rd.give_permission(rando, notification_template)
|
||||||
|
access = NotificationTemplateAccess(rando)
|
||||||
|
assert access.can_change(notification_template, {'name': 'new name'})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_organization_auditor_role(rando, setup_managed_roles, organization, inventory, project, jt_linked):
|
||||||
|
obj_list = (inventory, project, jt_linked)
|
||||||
|
for obj in obj_list:
|
||||||
|
assert obj.organization == organization, obj # sanity
|
||||||
|
|
||||||
|
assert [rando.has_obj_perm(obj, 'view') for obj in obj_list] == [False for i in range(3)], obj_list
|
||||||
|
|
||||||
|
rd = RoleDefinition.objects.get(name='Organization Audit')
|
||||||
|
rd.give_permission(rando, organization)
|
||||||
|
|
||||||
|
codename_set = set(rd.permissions.values_list('codename', flat=True))
|
||||||
|
assert not ({'view_inventory', 'view_jobtemplate', 'audit_organization'} - codename_set) # sanity
|
||||||
|
|
||||||
|
assert [obj in type(obj).access_qs(rando) for obj in obj_list] == [True for i in range(3)], obj_list
|
||||||
|
assert [rando.has_obj_perm(obj, 'view') for obj in obj_list] == [True for i in range(3)], obj_list
|
||||||
156
awx/main/tests/functional/dab_rbac/test_dab_rbac_api.py
Normal file
156
awx/main/tests/functional/dab_rbac/test_dab_rbac_api.py
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.urls import reverse as django_reverse
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
|
from awx.api.versioning import reverse
|
||||||
|
from awx.main.models import JobTemplate, Inventory, Organization
|
||||||
|
from awx.main.access import JobTemplateAccess, WorkflowJobTemplateAccess
|
||||||
|
|
||||||
|
from ansible_base.rbac.models import RoleDefinition
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_managed_roles_created(setup_managed_roles):
|
||||||
|
"Managed RoleDefinitions are created in post_migration signal, we expect to see them here"
|
||||||
|
for cls in (JobTemplate, Inventory):
|
||||||
|
ct = ContentType.objects.get_for_model(cls)
|
||||||
|
rds = list(RoleDefinition.objects.filter(content_type=ct))
|
||||||
|
assert len(rds) > 1
|
||||||
|
assert f'{cls.__name__} Admin' in [rd.name for rd in rds]
|
||||||
|
for rd in rds:
|
||||||
|
assert rd.managed is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_custom_read_role(admin_user, post, setup_managed_roles):
|
||||||
|
rd_url = django_reverse('roledefinition-list')
|
||||||
|
resp = post(
|
||||||
|
url=rd_url, data={"name": "read role made for test", "content_type": "awx.inventory", "permissions": ['view_inventory']}, user=admin_user, expect=201
|
||||||
|
)
|
||||||
|
rd_id = resp.data['id']
|
||||||
|
rd = RoleDefinition.objects.get(id=rd_id)
|
||||||
|
assert rd.content_type == ContentType.objects.get_for_model(Inventory)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_custom_system_roles_prohibited(admin_user, post):
|
||||||
|
rd_url = django_reverse('roledefinition-list')
|
||||||
|
resp = post(url=rd_url, data={"name": "read role made for test", "content_type": None, "permissions": ['view_inventory']}, user=admin_user, expect=400)
|
||||||
|
assert 'System-wide roles are not enabled' in str(resp.data)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post, setup_managed_roles):
|
||||||
|
"Alice can not see rando, and so can not give them a role assignment"
|
||||||
|
rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||||
|
rd.give_permission(alice, inventory)
|
||||||
|
url = django_reverse('roleuserassignment-list')
|
||||||
|
r = post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": inventory.id}, user=alice, expect=400)
|
||||||
|
assert 'does not exist' in str(r.data)
|
||||||
|
assert not rando.has_obj_perm(inventory, 'change')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_assign_managed_role(admin_user, alice, rando, inventory, post, setup_managed_roles, organization):
|
||||||
|
rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||||
|
rd.give_permission(alice, inventory)
|
||||||
|
# When alice and rando are members of the same org, they can see each other
|
||||||
|
member_rd = RoleDefinition.objects.get(name='Organization Member')
|
||||||
|
for u in (alice, rando):
|
||||||
|
member_rd.give_permission(u, organization)
|
||||||
|
# Now that alice has full permissions to the inventory, and can see rando, she will give rando permission
|
||||||
|
url = django_reverse('roleuserassignment-list')
|
||||||
|
post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": inventory.id}, user=alice, expect=201)
|
||||||
|
assert rando.has_obj_perm(inventory, 'change') is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_assign_custom_delete_role(admin_user, rando, inventory, delete, patch):
|
||||||
|
# TODO: just a delete_inventory, without change_inventory
|
||||||
|
rd, _ = RoleDefinition.objects.get_or_create(
|
||||||
|
name='inventory-delete',
|
||||||
|
permissions=['delete_inventory', 'view_inventory', 'change_inventory'],
|
||||||
|
content_type=ContentType.objects.get_for_model(Inventory),
|
||||||
|
)
|
||||||
|
rd.give_permission(rando, inventory)
|
||||||
|
inv_id = inventory.pk
|
||||||
|
inv_url = reverse('api:inventory_detail', kwargs={'pk': inv_id})
|
||||||
|
# TODO: eventually this will be valid test, for now ignore
|
||||||
|
# patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
|
||||||
|
delete(url=inv_url, user=rando, expect=202)
|
||||||
|
assert Inventory.objects.get(id=inv_id).pending_deletion
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_assign_custom_add_role(admin_user, rando, organization, post, setup_managed_roles):
|
||||||
|
rd, _ = RoleDefinition.objects.get_or_create(
|
||||||
|
name='inventory-add', permissions=['add_inventory', 'view_organization'], content_type=ContentType.objects.get_for_model(Organization)
|
||||||
|
)
|
||||||
|
rd.give_permission(rando, organization)
|
||||||
|
url = reverse('api:inventory_list')
|
||||||
|
r = post(url=url, data={'name': 'abc', 'organization': organization.id}, user=rando, expect=201)
|
||||||
|
inv_id = r.data['id']
|
||||||
|
inventory = Inventory.objects.get(id=inv_id)
|
||||||
|
assert rando.has_obj_perm(inventory, 'change')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_jt_creation_permissions(setup_managed_roles, inventory, project, rando):
|
||||||
|
"""This tests that if you assign someone required permissions in the new API
|
||||||
|
using the managed roles, then that works to give permissions to create a job template"""
|
||||||
|
inv_rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||||
|
proj_rd = RoleDefinition.objects.get(name='Project Admin')
|
||||||
|
# establish prior state
|
||||||
|
access = JobTemplateAccess(rando)
|
||||||
|
assert not access.can_add({'inventory': inventory.pk, 'project': project.pk, 'name': 'foo-jt'})
|
||||||
|
|
||||||
|
inv_rd.give_permission(rando, inventory)
|
||||||
|
proj_rd.give_permission(rando, project)
|
||||||
|
|
||||||
|
assert access.can_add({'inventory': inventory.pk, 'project': project.pk, 'name': 'foo-jt'})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_workflow_creation_permissions(setup_managed_roles, organization, workflow_job_template, rando):
|
||||||
|
"""Similar to JT, assigning new roles gives creator permissions"""
|
||||||
|
org_wf_rd = RoleDefinition.objects.get(name='Organization WorkflowJobTemplate Admin')
|
||||||
|
assert workflow_job_template.organization == organization # sanity
|
||||||
|
# establish prior state
|
||||||
|
access = WorkflowJobTemplateAccess(rando)
|
||||||
|
assert not access.can_add({'name': 'foo-flow', 'organization': organization.pk})
|
||||||
|
org_wf_rd.give_permission(rando, organization)
|
||||||
|
|
||||||
|
assert access.can_add({'name': 'foo-flow', 'organization': organization.pk})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_assign_credential_to_user_of_another_org(setup_managed_roles, credential, admin_user, rando, org_admin, organization, post):
|
||||||
|
'''Test that a credential can only be assigned to a user in the same organization'''
|
||||||
|
# cannot assign credential to rando, as rando is not in the same org as the credential
|
||||||
|
rd = RoleDefinition.objects.get(name="Credential Admin")
|
||||||
|
credential.organization = organization
|
||||||
|
credential.save(update_fields=['organization'])
|
||||||
|
assert credential.organization not in Organization.access_qs(rando, 'member')
|
||||||
|
url = django_reverse('roleuserassignment-list')
|
||||||
|
resp = post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=400)
|
||||||
|
assert "You cannot grant credential access to a User not in the credentials' organization" in str(resp.data)
|
||||||
|
|
||||||
|
# can assign credential to superuser
|
||||||
|
rando.is_superuser = True
|
||||||
|
rando.save()
|
||||||
|
post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||||
|
|
||||||
|
# can assign credential to org_admin
|
||||||
|
assert credential.organization in Organization.access_qs(org_admin, 'member')
|
||||||
|
post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@override_settings(ALLOW_LOCAL_RESOURCE_MANAGEMENT=False)
|
||||||
|
def test_team_member_role_not_assignable(team, rando, post, admin_user, setup_managed_roles):
|
||||||
|
member_rd = RoleDefinition.objects.get(name='Organization Member')
|
||||||
|
url = django_reverse('roleuserassignment-list')
|
||||||
|
r = post(url, data={'object_id': team.id, 'role_definition': member_rd.id, 'user': rando.id}, user=admin_user, expect=400)
|
||||||
|
assert 'Not managed locally' in str(r.data)
|
||||||
120
awx/main/tests/functional/dab_rbac/test_external_auditor.py
Normal file
120
awx/main/tests/functional/dab_rbac/test_external_auditor.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from django.apps import apps
|
||||||
|
|
||||||
|
from ansible_base.rbac.managed import SystemAuditor
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
|
from awx.main.access import check_user_access, get_user_queryset
|
||||||
|
from awx.main.models import User, AdHocCommandEvent
|
||||||
|
from awx.api.versioning import reverse
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ext_auditor_rd():
|
||||||
|
info = SystemAuditor(overrides={'name': 'Alien Auditor', 'shortname': 'ext_auditor'})
|
||||||
|
rd, _ = info.get_or_create(apps)
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ext_auditor(ext_auditor_rd):
|
||||||
|
u = User.objects.create(username='external-auditor-user')
|
||||||
|
ext_auditor_rd.give_global_permission(u)
|
||||||
|
return u
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def obj_factory(request):
|
||||||
|
def _rf(fixture_name):
|
||||||
|
obj = request.getfixturevalue(fixture_name)
|
||||||
|
|
||||||
|
# special case to make obj organization-scoped
|
||||||
|
if obj._meta.model_name == 'executionenvironment':
|
||||||
|
obj.organization = request.getfixturevalue('organization')
|
||||||
|
obj.save(update_fields=['organization'])
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
return _rf
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_access_qs_external_auditor(ext_auditor_rd, rando, job_template):
|
||||||
|
ext_auditor_rd.give_global_permission(rando)
|
||||||
|
jt_cls = apps.get_model('main', 'JobTemplate')
|
||||||
|
ujt_cls = apps.get_model('main', 'UnifiedJobTemplate')
|
||||||
|
assert job_template in jt_cls.access_qs(rando)
|
||||||
|
assert job_template.id in jt_cls.access_ids_qs(rando)
|
||||||
|
assert job_template.id in ujt_cls.accessible_pk_qs(rando, 'read_role')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@pytest.mark.parametrize('model', sorted(permission_registry.all_registered_models, key=lambda cls: cls._meta.model_name))
|
||||||
|
class TestExternalAuditorRoleAllModels:
|
||||||
|
def test_access_can_read_method(self, obj_factory, model, ext_auditor, rando):
|
||||||
|
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||||
|
obj = obj_factory(fixture_name)
|
||||||
|
|
||||||
|
assert check_user_access(rando, model, 'read', obj) is False
|
||||||
|
assert check_user_access(ext_auditor, model, 'read', obj) is True
|
||||||
|
|
||||||
|
def test_access_get_queryset(self, obj_factory, model, ext_auditor, rando):
|
||||||
|
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||||
|
obj = obj_factory(fixture_name)
|
||||||
|
|
||||||
|
assert obj not in get_user_queryset(rando, model)
|
||||||
|
assert obj in get_user_queryset(ext_auditor, model)
|
||||||
|
|
||||||
|
def test_global_list(self, obj_factory, model, ext_auditor, rando, get):
|
||||||
|
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||||
|
obj_factory(fixture_name)
|
||||||
|
|
||||||
|
url = reverse(f'api:{fixture_name}_list')
|
||||||
|
r = get(url, user=rando, expect=200)
|
||||||
|
initial_ct = r.data['count']
|
||||||
|
|
||||||
|
r = get(url, user=ext_auditor, expect=200)
|
||||||
|
assert r.data['count'] == initial_ct + 1
|
||||||
|
|
||||||
|
if fixture_name in ('job_template', 'workflow_job_template'):
|
||||||
|
url = reverse('api:unified_job_template_list')
|
||||||
|
r = get(url, user=rando, expect=200)
|
||||||
|
initial_ct = r.data['count']
|
||||||
|
|
||||||
|
r = get(url, user=ext_auditor, expect=200)
|
||||||
|
assert r.data['count'] == initial_ct + 1
|
||||||
|
|
||||||
|
def test_detail_view(self, obj_factory, model, ext_auditor, rando, get):
|
||||||
|
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||||
|
obj = obj_factory(fixture_name)
|
||||||
|
|
||||||
|
url = reverse(f'api:{fixture_name}_detail', kwargs={'pk': obj.pk})
|
||||||
|
get(url, user=rando, expect=403) # NOTE: should be 401
|
||||||
|
get(url, user=ext_auditor, expect=200)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
class TestExternalAuditorNonRoleModels:
|
||||||
|
def test_ad_hoc_command_view(self, ad_hoc_command_factory, rando, ext_auditor, get):
|
||||||
|
"""The AdHocCommandAccess class references is_system_auditor
|
||||||
|
|
||||||
|
this is to prove it works with other system-level view roles"""
|
||||||
|
ad_hoc_command = ad_hoc_command_factory()
|
||||||
|
url = reverse('api:ad_hoc_command_list')
|
||||||
|
r = get(url, user=rando, expect=200)
|
||||||
|
assert r.data['count'] == 0
|
||||||
|
r = get(url, user=ext_auditor, expect=200)
|
||||||
|
assert r.data['count'] == 1
|
||||||
|
assert r.data['results'][0]['id'] == ad_hoc_command.id
|
||||||
|
|
||||||
|
event = AdHocCommandEvent.objects.create(ad_hoc_command=ad_hoc_command)
|
||||||
|
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': ad_hoc_command.id})
|
||||||
|
r = get(url, user=rando, expect=403)
|
||||||
|
r = get(url, user=ext_auditor, expect=200)
|
||||||
|
assert r.data['count'] == 1
|
||||||
|
|
||||||
|
url = reverse('api:ad_hoc_command_event_detail', kwargs={'pk': event.id})
|
||||||
|
r = get(url, user=rando, expect=403)
|
||||||
|
r = get(url, user=ext_auditor, expect=200)
|
||||||
|
assert r.data['id'] == event.id
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user