mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
164 Commits
x-request-
...
rollup
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5bea2b557 | ||
|
|
6d0c47fdd0 | ||
|
|
54b4acbdfc | ||
|
|
a41766090e | ||
|
|
34fa897dda | ||
|
|
32df114e41 | ||
|
|
018f235a64 | ||
|
|
7e77235d5e | ||
|
|
139d8f0ae2 | ||
|
|
7691365aea | ||
|
|
59f61517d4 | ||
|
|
fa670e2d7f | ||
|
|
a87a044d64 | ||
|
|
381ade1148 | ||
|
|
864a30e3d4 | ||
|
|
5f42db67e6 | ||
|
|
ddf4f288d4 | ||
|
|
e75bc8bc1e | ||
|
|
bb533287b8 | ||
|
|
9979fc659e | ||
|
|
9e5babc093 | ||
|
|
c71e2524ed | ||
|
|
48b4c62186 | ||
|
|
853730acb9 | ||
|
|
f1448fced1 | ||
|
|
7697b6a69b | ||
|
|
22a491c32c | ||
|
|
cbd9dce940 | ||
|
|
a4fdcc1cca | ||
|
|
df95439008 | ||
|
|
acd834df8b | ||
|
|
587f0ecf98 | ||
|
|
5a2091f7bf | ||
|
|
fa7423819a | ||
|
|
fde8af9f11 | ||
|
|
209e7e27b1 | ||
|
|
6c7d29a982 | ||
|
|
282ba36839 | ||
|
|
b727d2c3b3 | ||
|
|
7fc3d5c7c7 | ||
|
|
4e055f46c4 | ||
|
|
f595985b7c | ||
|
|
ea232315bf | ||
|
|
ee251812b5 | ||
|
|
00ba1ea569 | ||
|
|
d91af132c1 | ||
|
|
94e5795dfc | ||
|
|
c4688d6298 | ||
|
|
6763badea3 | ||
|
|
2c4ad6ef0f | ||
|
|
37f44d7214 | ||
|
|
98bbc836a6 | ||
|
|
b59aff50dc | ||
|
|
a70b0c1ddc | ||
|
|
db72c9d5b8 | ||
|
|
4e0d19914f | ||
|
|
6f2307f50e | ||
|
|
dbc2215bb6 | ||
|
|
7c08b29827 | ||
|
|
407194d320 | ||
|
|
853af295d9 | ||
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 | ||
|
|
6dc4a4508d | ||
|
|
cf09a4220d | ||
|
|
659c3b64de | ||
|
|
37ad690d09 | ||
|
|
7845ec7e01 | ||
|
|
a15bcf1d55 | ||
|
|
7b3fb2c2a8 | ||
|
|
6df47c8449 | ||
|
|
cae42653bf | ||
|
|
da46a29f40 | ||
|
|
0eb465531c | ||
|
|
d0fe0ed796 | ||
|
|
ceafa14c9d | ||
|
|
08e1454098 | ||
|
|
776b661fb3 | ||
|
|
af6ccdbde5 | ||
|
|
559ab3564b | ||
|
|
208ef0ce25 | ||
|
|
c3d9aa54d8 | ||
|
|
66efe7198a | ||
|
|
adf930ee42 | ||
|
|
892410477a | ||
|
|
0d4f653794 | ||
|
|
8de8f6dce2 | ||
|
|
fc9064e27f | ||
|
|
7de350dc3e | ||
|
|
d4bdaad4d8 | ||
|
|
a9b2ffa3e9 | ||
|
|
1b8d409043 | ||
|
|
da2bccf5a8 | ||
|
|
a2f083bd8e | ||
|
|
4d641b6cf5 | ||
|
|
439c3f0c23 | ||
|
|
946bbe3560 | ||
|
|
20f054d600 | ||
|
|
918d5b3565 | ||
|
|
158314af50 | ||
|
|
4754819a09 | ||
|
|
78fc23138a | ||
|
|
014534bfa5 | ||
|
|
2502e7c7d8 | ||
|
|
fb237e3834 |
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -24,7 +24,7 @@ runs:
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
run: docker pull ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
|
||||
- name: Build image for current source checkout
|
||||
shell: bash
|
||||
|
||||
10
.github/actions/run_awx_devel/action.yml
vendored
10
.github/actions/run_awx_devel/action.yml
vendored
@@ -57,16 +57,6 @@ runs:
|
||||
awx-manage update_password --username=admin --password=password
|
||||
EOSH
|
||||
|
||||
- name: Build UI
|
||||
# This must be a string comparison in composite actions:
|
||||
# https://github.com/actions/runner/issues/2238
|
||||
if: ${{ inputs.build-ui == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
docker exec -i tools_awx_1 sh <<-EOSH
|
||||
make ui-devel
|
||||
EOSH
|
||||
|
||||
- name: Get instance data
|
||||
id: data
|
||||
shell: bash
|
||||
|
||||
2
.github/triage_replies.md
vendored
2
.github/triage_replies.md
vendored
@@ -1,7 +1,7 @@
|
||||
## General
|
||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
||||
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
|
||||
|
||||
|
||||
|
||||
|
||||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -38,7 +38,9 @@ jobs:
|
||||
- name: ui-test-general
|
||||
command: make ui-test-general
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Build awx_devel image for running checks
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
@@ -52,7 +54,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -70,13 +74,15 @@ jobs:
|
||||
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false\
|
||||
repository: ansible/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
@@ -130,7 +136,9 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||
- name: Upgrade ansible-core
|
||||
@@ -154,7 +162,9 @@ jobs:
|
||||
- name: r-z0-9
|
||||
regex: ^[r-z0-9]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -200,7 +210,9 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
57
.github/workflows/dab-release.yml
vendored
Normal file
57
.github/workflows/dab-release.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
name: django-ansible-base requirements update
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 6 * * *' # once an day @ 6 AM
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
jobs:
|
||||
dab-pin-newest:
|
||||
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: dab-release
|
||||
name: Get current django-ansible-base release version
|
||||
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
||||
with:
|
||||
owner: ansible
|
||||
repo: django-ansible-base
|
||||
excludes: prerelease, draft
|
||||
|
||||
- name: Check out respository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- id: dab-pinned
|
||||
name: Get current django-ansible-base pinned version
|
||||
run:
|
||||
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Update django-ansible-base pinned version to upstream release
|
||||
run:
|
||||
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||
with:
|
||||
base: devel
|
||||
branch: bump-django-ansible-base
|
||||
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
||||
body: |
|
||||
##### SUMMARY
|
||||
Automated .github/workflows/dab-release.yml
|
||||
|
||||
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
||||
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
||||
|
||||
##### ISSUE TYPE
|
||||
- Bug, Docs Fix or other nominal change
|
||||
|
||||
##### COMPONENT NAME
|
||||
- API
|
||||
|
||||
commit-message: |
|
||||
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
||||
add-paths:
|
||||
requirements/requirements_git.txt
|
||||
13
.github/workflows/devel_images.yml
vendored
13
.github/workflows/devel_images.yml
vendored
@@ -2,6 +2,7 @@
|
||||
name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
@@ -34,7 +35,9 @@ jobs:
|
||||
exit 0
|
||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
@@ -59,16 +62,14 @@ jobs:
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Setup node and npm
|
||||
- name: Setup node and npm for the new UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
node-version: '18'
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
|
||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -8,7 +8,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: install tox
|
||||
run: pip install tox
|
||||
|
||||
75
.github/workflows/e2e_test.yml
vendored
75
.github/workflows/e2e_test.yml
vendored
@@ -1,75 +0,0 @@
|
||||
---
|
||||
name: E2E Tests
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
e2e-test:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 40
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
with:
|
||||
build-ui: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Pull awx_cypress_base image
|
||||
run: |
|
||||
docker pull quay.io/awx/awx_cypress_base:latest
|
||||
|
||||
- name: Checkout test project
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/tower-qa
|
||||
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
||||
path: tower-qa
|
||||
ref: devel
|
||||
|
||||
- name: Build cypress
|
||||
run: |
|
||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||
docker build -t awx-pf-tests .
|
||||
|
||||
- name: Run E2E tests
|
||||
env:
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
run: |
|
||||
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
||||
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
||||
export COMMIT_INFO_SHA=$GITHUB_SHA
|
||||
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||
AWX_IP=${{ steps.awx.outputs.ip }}
|
||||
printenv > .env
|
||||
echo "Executing tests:"
|
||||
docker run \
|
||||
--network '_sources_default' \
|
||||
--ipc=host \
|
||||
--env-file=.env \
|
||||
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
||||
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
||||
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
||||
-e COMMAND="npm run cypress-concurrently-gha" \
|
||||
-v /dev/shm:/dev/shm \
|
||||
-v $PWD:/e2e \
|
||||
-w /e2e \
|
||||
awx-pf-tests run --project .
|
||||
|
||||
- uses: ./.github/actions/upload_awx_devel_logs
|
||||
if: always()
|
||||
with:
|
||||
log-filename: e2e-${{ matrix.job }}.log
|
||||
5
.github/workflows/label_issue.yml
vendored
5
.github/workflows/label_issue.yml
vendored
@@ -30,7 +30,10 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
name: Label Issue - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
5
.github/workflows/label_pr.yml
vendored
5
.github/workflows/label_pr.yml
vendored
@@ -29,7 +29,10 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
name: Label PR - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
19
.github/workflows/promote.yml
vendored
19
.github/workflows/promote.yml
vendored
@@ -29,10 +29,12 @@ jobs:
|
||||
- name: Set GitHub Env vars if release event
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
run: |
|
||||
echo "TAG_NAME=${{ env.TAG_NAME }}" >> $GITHUB_ENV
|
||||
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
@@ -60,15 +62,18 @@ jobs:
|
||||
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
||||
COLLECTION_TEMPLATE_VERSION: true
|
||||
run: |
|
||||
sudo apt-get install jq
|
||||
make build_collection
|
||||
curl_with_redirects=$(curl --head -sLw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
|
||||
curl_without_redirects=$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
|
||||
if [[ "$curl_with_redirects" == "302" ]] || [[ "$curl_without_redirects" == "302" ]]; then
|
||||
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
||||
if [[ "$count" == "1" ]]; then
|
||||
echo "Galaxy release already done";
|
||||
else
|
||||
elif [[ "$count" == "0" ]]; then
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz;
|
||||
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
||||
else
|
||||
echo "Unexpected count from galaxy search: $count";
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Set official pypi info
|
||||
|
||||
26
.github/workflows/stage.yml
vendored
26
.github/workflows/stage.yml
vendored
@@ -45,19 +45,22 @@ jobs:
|
||||
exit 0
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Checkout awx-logos
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
@@ -86,17 +89,14 @@ jobs:
|
||||
run: |
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Setup node and npm
|
||||
- name: Setup node and npm for new UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
node-version: '18'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
run: make ui-next
|
||||
|
||||
- name: Set build env variables
|
||||
run: |
|
||||
@@ -136,9 +136,9 @@ jobs:
|
||||
- name: Pulling images for test deployment with awx-operator
|
||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||
run: |
|
||||
docker pull ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull ${AWX_EE_TEST_IMAGE}
|
||||
docker pull ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull -q ${AWX_EE_TEST_IMAGE}
|
||||
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
|
||||
4
.github/workflows/update_dependabot_prs.yml
vendored
4
.github/workflows/update_dependabot_prs.yml
vendored
@@ -13,7 +13,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout branch
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Update PR Body
|
||||
env:
|
||||
|
||||
6
.github/workflows/upload_schema.yml
vendored
6
.github/workflows/upload_schema.yml
vendored
@@ -18,7 +18,9 @@ jobs:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
@@ -34,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
|
||||
@@ -11,6 +11,8 @@ ignore: |
|
||||
# django template files
|
||||
awx/api/templates/instance_install_bundle/**
|
||||
.readthedocs.yaml
|
||||
tools/loki
|
||||
tools/otel
|
||||
|
||||
extends: default
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
||||
|
||||
#### Frontend Development
|
||||
|
||||
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
||||
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
|
||||
|
||||
#### Fork and clone the AWX repo
|
||||
|
||||
@@ -121,7 +121,7 @@ If it has someone assigned to it then that person is the person responsible for
|
||||
|
||||
**NOTES**
|
||||
|
||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||
|
||||
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
@@ -132,7 +132,7 @@ If it has someone assigned to it then that person is the person responsible for
|
||||
|
||||
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||
|
||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||
|
||||
|
||||
## Submitting Pull Requests
|
||||
@@ -161,7 +161,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
||||
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
|
||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||
|
||||
## Getting Help
|
||||
|
||||
69
Makefile
69
Makefile
@@ -47,8 +47,14 @@ VAULT ?= false
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||
OTEL ?= false
|
||||
# If set to true docker-compose will also start a Loki instance
|
||||
LOKI ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
# If set to true, use tls for postgres connection
|
||||
PG_TLS ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -57,6 +63,11 @@ DEV_DOCKER_OWNER ?= ansible
|
||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
||||
|
||||
# Common command to use for running ansible-playbook
|
||||
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||
|
||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
|
||||
@@ -65,7 +76,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -80,6 +91,18 @@ I18N_FLAG_FILE = .i18n_built
|
||||
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||
|
||||
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
|
||||
# DOCKER_CACHE=--no-cache make docker-compose-build
|
||||
ifeq ($(DOCKER_CACHE),)
|
||||
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
|
||||
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
|
||||
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
|
||||
else
|
||||
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
endif
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
@@ -362,7 +385,7 @@ symlink_collection:
|
||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||
|
||||
awx_collection_build: $(shell find awx_collection -type f)
|
||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||
-e collection_version=$(COLLECTION_VERSION) \
|
||||
@@ -479,13 +502,7 @@ ui-test-general:
|
||||
$(NPM_BIN) run --prefix awx/ui pretest
|
||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||
|
||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
||||
HEADLESS ?= no
|
||||
ifeq ($(HEADLESS), yes)
|
||||
dist/$(SDIST_TAR_FILE):
|
||||
else
|
||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
||||
endif
|
||||
$(PYTHON) -m build -s
|
||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||
|
||||
@@ -516,10 +533,10 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
@@ -535,12 +552,15 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
-e enable_otel=$(OTEL) \
|
||||
-e enable_loki=$(LOKI) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
-e pg_tls=$(PG_TLS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP); \
|
||||
@@ -583,7 +603,7 @@ docker-compose-container-group-clean:
|
||||
.PHONY: Dockerfile.dev
|
||||
## Generate Dockerfile.dev for awx_devel image
|
||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.dev \
|
||||
-e build_dev=True \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||
@@ -594,8 +614,7 @@ docker-compose-build: Dockerfile.dev
|
||||
-f Dockerfile.dev \
|
||||
-t $(DEVEL_IMAGE_NAME) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
$(DOCKER_DEVEL_CACHE_FLAG) .
|
||||
|
||||
.PHONY: docker-compose-buildx
|
||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
@@ -605,7 +624,7 @@ docker-compose-buildx: Dockerfile.dev
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) \
|
||||
$(DOCKER_DEVEL_CACHE_FLAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEVEL_IMAGE_NAME) \
|
||||
-f Dockerfile.dev .
|
||||
@@ -658,7 +677,7 @@ version-for-buildyml:
|
||||
.PHONY: Dockerfile
|
||||
## Generate Dockerfile for awx image
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
-e headless=$(HEADLESS)
|
||||
|
||||
@@ -668,7 +687,8 @@ awx-kube-build: Dockerfile
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||
-t $(IMAGE_KUBE) .
|
||||
|
||||
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||
awx-kube-buildx: Dockerfile
|
||||
@@ -680,7 +700,8 @@ awx-kube-buildx: Dockerfile
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) \
|
||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||
--tag $(IMAGE_KUBE) \
|
||||
-f Dockerfile .
|
||||
- docker buildx rm awx-kube-buildx
|
||||
|
||||
@@ -688,7 +709,7 @@ awx-kube-buildx: Dockerfile
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.kube-dev \
|
||||
-e kube_dev=True \
|
||||
-e template_dest=_build_kube_dev \
|
||||
@@ -698,8 +719,8 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
awx-kube-dev-build: Dockerfile.kube-dev
|
||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
-t $(IMAGE_KUBE_DEV) .
|
||||
|
||||
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
@@ -708,14 +729,14 @@ awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
--tag $(IMAGE_KUBE_DEV) \
|
||||
-f Dockerfile.kube-dev .
|
||||
- docker buildx rm awx-kube-dev-buildx
|
||||
|
||||
kind-dev-load: awx-kube-dev-build
|
||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
|
||||
|
||||
# Translation TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@@ -33,8 +33,10 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# django-ansible-base
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
from ansible_base.lib.utils.requests import get_remote_host
|
||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
@@ -42,6 +44,7 @@ from awx.main.models.rbac import give_creator_permissions
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
@@ -93,8 +96,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||
ret.set_cookie(
|
||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||
)
|
||||
@@ -103,12 +107,15 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
|
||||
class LoggedLogoutView(auth_views.LogoutView):
|
||||
|
||||
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
@@ -148,22 +155,23 @@ class APIView(views.APIView):
|
||||
Store the Django REST Framework Request object as an attribute on the
|
||||
normal Django request, store time the request started.
|
||||
"""
|
||||
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
self.time_started = time.time()
|
||||
if getattr(settings, 'SQL_DEBUG', False):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||
else:
|
||||
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the allowed proxy list
|
||||
if all(
|
||||
[
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
]
|
||||
):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
request.environ.pop(custom_header, None)
|
||||
if settings.PROXY_IP_ALLOWED_LIST:
|
||||
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
@@ -208,17 +216,21 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
msg_data = {
|
||||
'status_code': response.status_code,
|
||||
'user_name': request.user,
|
||||
'url_path': request.path,
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||
'remote_addr': ip,
|
||||
}
|
||||
|
||||
if type(response.data) is dict:
|
||||
msg_data['error'] = response.data.get('error', response.status_text)
|
||||
elif type(response.data) is list:
|
||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||
if len(response.data) > 0 and isinstance(response.data[0], str):
|
||||
msg_data['error'] = str(response.data[0])
|
||||
else:
|
||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||
else:
|
||||
msg_data['error'] = response.status_text
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
default = field.get_default()
|
||||
if type(default) is UUID:
|
||||
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
|
||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost':
|
||||
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
||||
field_info['default'] = default
|
||||
except serializers.SkipField:
|
||||
|
||||
@@ -5381,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
|
||||
)
|
||||
|
||||
def get_body(self, obj):
|
||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
||||
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
|
||||
if isinstance(obj.body, dict):
|
||||
if 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
@@ -5403,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
|
||||
def to_representation(self, obj):
|
||||
ret = super(NotificationSerializer, self).to_representation(obj)
|
||||
|
||||
if obj.notification_type == 'webhook':
|
||||
if obj.notification_type in ('webhook', 'awssns'):
|
||||
ret.pop('subject')
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
|
||||
ret.pop('body')
|
||||
return ret
|
||||
|
||||
|
||||
@@ -2,6 +2,12 @@
|
||||
- hosts: all
|
||||
become: yes
|
||||
tasks:
|
||||
- name: Create the receptor group
|
||||
group:
|
||||
{% verbatim %}
|
||||
name: "{{ receptor_group }}"
|
||||
{% endverbatim %}
|
||||
state: present
|
||||
- name: Create the receptor user
|
||||
user:
|
||||
{% verbatim %}
|
||||
|
||||
@@ -61,7 +61,9 @@ import pytz
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
@@ -128,6 +130,7 @@ from awx.api.views.mixin import (
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
|
||||
@@ -710,16 +713,81 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
def immutablesharedfields(cls):
|
||||
'''
|
||||
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||
|
||||
Works by overriding these view methods:
|
||||
- create
|
||||
- delete
|
||||
- perform_update
|
||||
create and delete are overridden to raise a PermissionDenied exception.
|
||||
perform_update is overridden to check if any shared fields are being modified,
|
||||
and raise a PermissionDenied exception if so.
|
||||
'''
|
||||
# create instead of perform_create because some of our views
|
||||
# override create instead of perform_create
|
||||
if hasattr(cls, 'create'):
|
||||
cls.original_create = cls.create
|
||||
|
||||
@functools.wraps(cls.create)
|
||||
def create_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_create(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||
|
||||
cls.create = create_wrapper
|
||||
|
||||
if hasattr(cls, 'delete'):
|
||||
cls.original_delete = cls.delete
|
||||
|
||||
@functools.wraps(cls.delete)
|
||||
def delete_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_delete(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||
|
||||
cls.delete = delete_wrapper
|
||||
|
||||
if hasattr(cls, 'perform_update'):
|
||||
cls.original_perform_update = cls.perform_update
|
||||
|
||||
@functools.wraps(cls.perform_update)
|
||||
def update_wrapper(*args, **kwargs):
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
view, serializer = args
|
||||
instance = view.get_object()
|
||||
if instance:
|
||||
if isinstance(instance, models.Organization):
|
||||
shared_fields = OrganizationType._declared_fields.keys()
|
||||
elif isinstance(instance, models.User):
|
||||
shared_fields = UserType._declared_fields.keys()
|
||||
elif isinstance(instance, models.Team):
|
||||
shared_fields = TeamType._declared_fields.keys()
|
||||
attrs = serializer.validated_data
|
||||
for field in shared_fields:
|
||||
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||
return cls.original_perform_update(*args, **kwargs)
|
||||
|
||||
cls.perform_update = update_wrapper
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamList(ListCreateAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamUsersList(BaseUsersList):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1101,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
||||
copy_return_serializer_class = serializers.ProjectSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1271,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(models.Role, pk=sub_id)
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
@@ -1343,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -2313,6 +2392,14 @@ class JobTemplateList(ListCreateAPIView):
|
||||
serializer_class = serializers.JobTemplateSerializer
|
||||
always_allow_superuser = False
|
||||
|
||||
def check_permissions(self, request):
|
||||
if request.method == 'POST':
|
||||
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||
if not can_access:
|
||||
self.permission_denied(request, message=messages)
|
||||
|
||||
super(JobTemplateList, self).check_permissions(request)
|
||||
|
||||
|
||||
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = models.JobTemplate
|
||||
@@ -2692,12 +2779,7 @@ class JobTemplateCallback(GenericAPIView):
|
||||
host for the current request.
|
||||
"""
|
||||
# Find the list of remote host names/IPs to check.
|
||||
remote_hosts = set()
|
||||
for header in settings.REMOTE_HOST_HEADERS:
|
||||
for value in self.request.META.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
remote_hosts = set(get_remote_hosts(self.request))
|
||||
# Add the reverse lookup of IP addresses.
|
||||
for rh in list(remote_hosts):
|
||||
try:
|
||||
@@ -3037,6 +3119,14 @@ class WorkflowJobTemplateList(ListCreateAPIView):
|
||||
serializer_class = serializers.WorkflowJobTemplateSerializer
|
||||
always_allow_superuser = False
|
||||
|
||||
def check_permissions(self, request):
|
||||
if request.method == 'POST':
|
||||
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||
if not can_access:
|
||||
self.permission_denied(request, message=messages)
|
||||
|
||||
super(WorkflowJobTemplateList, self).check_permissions(request)
|
||||
|
||||
|
||||
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = models.WorkflowJobTemplate
|
||||
@@ -4295,7 +4385,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
|
||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
|
||||
@@ -61,6 +61,10 @@ class StringListBooleanField(ListField):
|
||||
|
||||
def to_representation(self, value):
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
|
||||
# DRF changed truthy and falsy lists to be capitalized
|
||||
value = value.lower()
|
||||
if isinstance(value, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_representation(value)
|
||||
elif value in BooleanField.TRUE_VALUES:
|
||||
@@ -78,6 +82,8 @@ class StringListBooleanField(ListField):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
if isinstance(data, str):
|
||||
data = data.lower()
|
||||
if isinstance(data, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_internal_value(data)
|
||||
elif data in BooleanField.TRUE_VALUES:
|
||||
|
||||
@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
|
||||
|
||||
def test_empty_setting(settings, mocker):
|
||||
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
|
||||
|
||||
def test_setting_from_db(settings, mocker):
|
||||
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
|
||||
|
||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
assert existing_setting.value == 'NEW-VALUE'
|
||||
existing_setting.save.assert_called_with(update_fields=['value'])
|
||||
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
||||
del settings.AWX_SOME_SETTING
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
|
||||
del settings.AWX_SOME_SETTING
|
||||
|
||||
assert existing_setting.delete.call_count == 1
|
||||
|
||||
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
# use its primary key as part of the encryption key
|
||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
|
||||
|
||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
|
||||
@@ -598,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
- a superuser
|
||||
- admin role on the Instance group
|
||||
I can add/delete Instance Groups:
|
||||
- a superuser(system administrator)
|
||||
- a superuser(system administrator), because these are not org-scoped
|
||||
I can use Instance Groups when I have:
|
||||
- use_role on the instance group
|
||||
"""
|
||||
@@ -627,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
return self.user.has_obj_perm(obj, 'delete')
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
@@ -1387,12 +1387,11 @@ class TeamAccess(BaseAccess):
|
||||
class ExecutionEnvironmentAccess(BaseAccess):
|
||||
"""
|
||||
I can see an execution environment when:
|
||||
- I'm a superuser
|
||||
- I'm a member of the same organization
|
||||
- it is a global ExecutionEnvironment
|
||||
- I can see its organization
|
||||
- It is a global ExecutionEnvironment
|
||||
I can create/change an execution environment when:
|
||||
- I'm a superuser
|
||||
- I'm an admin for the organization(s)
|
||||
- I have an organization or object role that gives access
|
||||
"""
|
||||
|
||||
model = ExecutionEnvironment
|
||||
@@ -1401,7 +1400,9 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
return ExecutionEnvironment.objects.filter(
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(organization__isnull=True)
|
||||
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
|
||||
).distinct()
|
||||
|
||||
@check_superuser
|
||||
@@ -1416,15 +1417,17 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
raise PermissionDenied
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
if not self.user.has_obj_perm(obj, 'change'):
|
||||
raise PermissionDenied
|
||||
return False
|
||||
else:
|
||||
if self.user not in obj.organization.execution_environment_admin_role:
|
||||
raise PermissionDenied
|
||||
if data and 'organization' in data:
|
||||
new_org = get_object_from_data('organization', Organization, data, obj=obj)
|
||||
if not new_org or self.user not in new_org.execution_environment_admin_role:
|
||||
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
|
||||
return False
|
||||
# Special case that check_related does not catch, org users can not remove the organization from the EE
|
||||
if data and ('organization' in data or 'organization_id' in data):
|
||||
if (not data.get('organization')) and (not data.get('organization_id')):
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
||||
return True
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj.managed:
|
||||
@@ -1596,6 +1599,8 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
inventory = get_value(Inventory, 'inventory')
|
||||
if inventory:
|
||||
if self.user not in inventory.use_role:
|
||||
if self.save_messages:
|
||||
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
|
||||
return False
|
||||
|
||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||
@@ -1604,11 +1609,16 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
project = get_value(Project, 'project')
|
||||
# If the user has admin access to the project (as an org admin), should
|
||||
# be able to proceed without additional checks.
|
||||
if project:
|
||||
return self.user in project.use_role
|
||||
else:
|
||||
if not project:
|
||||
return False
|
||||
|
||||
if self.user not in project.use_role:
|
||||
if self.save_messages:
|
||||
self.messages['project'] = [_('You do not have use permission on Project')]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
def can_copy_related(self, obj):
|
||||
"""
|
||||
@@ -2092,11 +2102,23 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||
|
||||
return bool(
|
||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
||||
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
||||
)
|
||||
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
|
||||
if data.get('organization', None) is None:
|
||||
if self.save_messages:
|
||||
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||
return False
|
||||
|
||||
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
|
||||
if self.save_messages:
|
||||
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||
return False
|
||||
|
||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||
if self.save_messages:
|
||||
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
@@ -2628,7 +2650,7 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
|
||||
class NotificationTemplateAccess(BaseAccess):
|
||||
"""
|
||||
I can see/use a notification_template if I have permission to
|
||||
Run standard logic from DAB RBAC
|
||||
"""
|
||||
|
||||
model = NotificationTemplate
|
||||
@@ -2649,10 +2671,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if obj.organization is None:
|
||||
# only superusers are allowed to edit orphan notification templates
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
||||
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||
|
||||
def can_admin(self, obj, data):
|
||||
return self.can_change(obj, data)
|
||||
@@ -2662,9 +2681,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if obj.organization is None:
|
||||
return False
|
||||
return self.user in obj.organization.notification_admin_role
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
|
||||
@@ -66,10 +66,8 @@ class FixedSlidingWindow:
|
||||
|
||||
|
||||
class RelayWebsocketStatsManager:
|
||||
def __init__(self, event_loop, local_hostname):
|
||||
def __init__(self, local_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
|
||||
self._event_loop = event_loop
|
||||
self._stats = dict()
|
||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||
|
||||
@@ -94,7 +92,10 @@ class RelayWebsocketStatsManager:
|
||||
self.start()
|
||||
|
||||
def start(self):
|
||||
self.async_task = self._event_loop.create_task(self.run_loop())
|
||||
self.async_task = asyncio.get_running_loop().create_task(
|
||||
self.run_loop(),
|
||||
name='RelayWebsocketStatsManager.run_loop',
|
||||
)
|
||||
return self.async_task
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -929,6 +929,16 @@ register(
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
register(
|
||||
'RECEPTOR_KEEP_WORK_ON_ERROR',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Keep receptor work on error'),
|
||||
default=False,
|
||||
help_text=_('Prevent receptor work from being released on when error is detected'),
|
||||
category=('Debug'),
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
@@ -43,6 +43,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
||||
}
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
ACTIVE_STATES = CAN_CANCEL
|
||||
ERROR_STATES = ('error',)
|
||||
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
||||
CENSOR_VALUE = '************'
|
||||
ENV_BLOCKLIST = frozenset(
|
||||
|
||||
@@ -102,7 +102,8 @@ def create_listener_connection():
|
||||
|
||||
# Apply overrides specifically for the listener connection
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
conf[k] = v
|
||||
if k != 'OPTIONS':
|
||||
conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
conf['OPTIONS'][k] = v
|
||||
|
||||
|
||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('on_delete', models.CASCADE)
|
||||
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
|
||||
12
awx/main/management/commands/check_instance_ready.py
Normal file
12
awx/main/management/commands/check_instance_ready.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from awx.main.models.ha import Instance
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Instance.objects.me().node_state != Instance.States.READY:
|
||||
raise CommandError('Instance is not ready') # so that return code is not 0
|
||||
|
||||
return
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from crum import impersonate
|
||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||
from awx.main.signals import disable_computed_fields
|
||||
@@ -13,6 +14,12 @@ class Command(BaseCommand):
|
||||
help = 'Creates a preload tower data if there is none.'
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# Wrap the operation in an atomic block, so we do not on accident
|
||||
# create the organization but not create the project, etc.
|
||||
with transaction.atomic():
|
||||
self._handle()
|
||||
|
||||
def _handle(self):
|
||||
changed = False
|
||||
|
||||
# Create a default organization as the first superuser found.
|
||||
@@ -43,10 +50,11 @@ class Command(BaseCommand):
|
||||
|
||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||
c, _ = Credential.objects.get_or_create(
|
||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': getattr(superuser, 'username', 'null')}, created_by=superuser
|
||||
)
|
||||
|
||||
c.admin_role.members.add(superuser)
|
||||
if superuser:
|
||||
c.admin_role.members.add(superuser)
|
||||
|
||||
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
||||
name='Ansible Galaxy',
|
||||
|
||||
151
awx/main/management/commands/job_performance_rollup.py
Normal file
151
awx/main/management/commands/job_performance_rollup.py
Normal file
@@ -0,0 +1,151 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Emits some simple statistics suitable for external monitoring
|
||||
"""
|
||||
|
||||
help = 'Run queries that provide an overview of the performance of the system over a given period of time'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--since', action='store', dest='days', type=str, default="1", help='Max days to look back to for data')
|
||||
parser.add_argument('--limit', action='store', dest='limit', type=str, default="10", help='Max number of records for database queries (LIMIT)')
|
||||
|
||||
def execute_query(self, query):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
rows = cursor.fetchall()
|
||||
return rows
|
||||
|
||||
def jsonify(self, title, keys, values, query):
|
||||
result = []
|
||||
query = re.sub('\n', ' ', query)
|
||||
query = re.sub('\s{2,}', ' ', query)
|
||||
for value in values:
|
||||
result.append(dict(zip(keys, value)))
|
||||
return {title: result, 'count': len(values), 'query': query}
|
||||
|
||||
def jobs_pending_duration(self, days, limit):
|
||||
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||
query = f"""
|
||||
SELECT name, id AS job_id, unified_job_template_id, created, started - created AS pending_duration
|
||||
FROM main_unifiedjob
|
||||
WHERE finished IS NOT null
|
||||
AND started IS NOT null
|
||||
AND cancel_flag IS NOT true
|
||||
AND created > NOW() - INTERVAL '{days} days'
|
||||
AND started - created > INTERVAL '0 seconds'
|
||||
ORDER BY pending_duration DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title='completed_or_started_jobs_by_pending_duration',
|
||||
keys=('job_name', 'job_id', 'unified_job_template_id', 'job_created', 'pending_duration'),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def times_of_day_pending_more_than_X_min(self, days, limit, minutes_pending):
|
||||
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||
query = f"""
|
||||
SELECT
|
||||
date_trunc('hour', created) as day_and_hour,
|
||||
COUNT(created) as count_jobs_pending_greater_than_{minutes_pending}_min
|
||||
FROM main_unifiedjob
|
||||
WHERE started IS NOT NULL
|
||||
AND started - created > INTERVAL '{minutes_pending} minutes'
|
||||
AND created > NOW() - INTERVAL '{days} days'
|
||||
GROUP BY date_trunc('hour', created)
|
||||
ORDER BY count_jobs_pending_greater_than_{minutes_pending}_min DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title=f'times_of_day_pending_more_than_{minutes_pending}',
|
||||
keys=('day_and_hour', f'count_jobs_pending_more_than_{minutes_pending}_min'),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def pending_jobs_details(self, days, limit):
|
||||
"""Return list of jobs that are in pending and list details such as reasons they may be blocked, within configured number of days and limit."""
|
||||
query = f"""
|
||||
SELECT DISTINCT ON(A.id) A.name, A.id, A.unified_job_template_id, A.created, NOW() - A.created as pending_duration, F.allow_simultaneous, B.current_job_id as current_ujt_job, I.to_unifiedjob_id as dependency_job_id, A.dependencies_processed
|
||||
FROM main_unifiedjob A
|
||||
LEFT JOIN (
|
||||
SELECT C.id, C.current_job_id FROM main_unifiedjobtemplate as C
|
||||
) B
|
||||
ON A.unified_job_template_id = B.id
|
||||
LEFT JOIN main_job F ON A.id = F.unifiedjob_ptr_id
|
||||
LEFT JOIN (
|
||||
SELECT * FROM main_unifiedjob_dependent_jobs as G
|
||||
RIGHT JOIN main_unifiedjob H ON G.to_unifiedjob_id = H.id
|
||||
) I
|
||||
ON A.id = I.from_unifiedjob_id
|
||||
WHERE A.status = 'pending'
|
||||
AND A.created > NOW() - INTERVAL '{days} days'
|
||||
ORDER BY id DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title='pending_jobs_details',
|
||||
keys=(
|
||||
'job_name',
|
||||
'job_id',
|
||||
'unified_job_template_id',
|
||||
'job_created',
|
||||
'pending_duration',
|
||||
'allow_simultaneous',
|
||||
'current_ujt_job',
|
||||
'dependency_job_id',
|
||||
'dependencies_processed',
|
||||
),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def jobs_by_FUNC_event_processing_time(self, func, days, limit):
|
||||
"""Return list of jobs sorted by MAX job event procesing time within configured number of days (within limit)"""
|
||||
if func not in ('MAX', 'MIN', 'AVG', 'SUM'):
|
||||
raise RuntimeError('Only able to asses job events grouped by job with MAX, MIN, AVG, SUM functions')
|
||||
|
||||
query = f"""SELECT job_id, {func}(A.modified - A.created) as job_event_processing_delay_{func}, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||
FROM main_jobevent A
|
||||
RIGHT JOIN (
|
||||
SELECT id, created, name, finished, controller_node, execution_node FROM
|
||||
main_unifiedjob
|
||||
WHERE created > NOW() - INTERVAL '{days} days'
|
||||
AND created IS NOT null
|
||||
AND finished IS NOT null
|
||||
AND id IS NOT null
|
||||
AND name IS NOT null
|
||||
) B
|
||||
ON A.job_id=B.id
|
||||
WHERE A.job_id is not null
|
||||
GROUP BY job_id, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||
ORDER BY job_event_processing_delay_{func} DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title=f'jobs_by_{func}_event_processing',
|
||||
keys=('job_id', f'{func}_job_event_processing_delay', 'job_name', 'job_created_time', 'job_finished_time', 'controller_node', 'execution_node'),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
items = []
|
||||
for func in ('MAX', 'MIN', 'AVG'):
|
||||
items.append(self.jobs_by_FUNC_event_processing_time(func, options['days'], options['limit']))
|
||||
items.append(self.jobs_pending_duration(options['days'], options['limit']))
|
||||
items.append(self.pending_jobs_details(options['days'], options['limit']))
|
||||
items.append(self.times_of_day_pending_more_than_X_min(options['days'], options['limit'], minutes_pending=10))
|
||||
self.stdout.write(json.dumps(items, indent=4, sort_keys=True, default=str))
|
||||
@@ -101,8 +101,9 @@ class Command(BaseCommand):
|
||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||
except Exception as exc:
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
||||
time.sleep(10)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
|
||||
return
|
||||
|
||||
# In containerized deployments, migrations happen in the task container,
|
||||
@@ -121,13 +122,14 @@ class Command(BaseCommand):
|
||||
return
|
||||
|
||||
try:
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
|
||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||
except RuntimeError as e:
|
||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||
# ensure network connectivity between the task and web instance
|
||||
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
||||
time.sleep(5)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
|
||||
return
|
||||
|
||||
if options.get('status'):
|
||||
@@ -166,12 +168,14 @@ class Command(BaseCommand):
|
||||
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
while True:
|
||||
try:
|
||||
asyncio.run(WebSocketRelayManager().run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Shutting down Websocket Relayer')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
|
||||
time.sleep(10)
|
||||
try:
|
||||
logger.info('Starting Websocket Relayer...')
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Terminating Websocket Relayer')
|
||||
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
|
||||
return
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
from pathlib import Path, PurePosixPath
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
@@ -58,7 +58,7 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
response['X-API-Profile-File'] = self.prof.stop()
|
||||
perf_logger.debug(
|
||||
f'request: {request}, response_time: {response["X-API-Total-Time"]}',
|
||||
extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"], x_request_id=request.get('x-request-id', 'not-set'))),
|
||||
extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"])),
|
||||
)
|
||||
return response
|
||||
|
||||
@@ -138,14 +138,36 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
|
||||
@classmethod
|
||||
def _convert_named_url(cls, url_path):
|
||||
url_units = url_path.split('/')
|
||||
# If the identifier is an empty string, it is always invalid.
|
||||
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
|
||||
return url_path
|
||||
resource = url_units[3]
|
||||
default_prefix = PurePosixPath('/api/v2/')
|
||||
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
|
||||
|
||||
url_path_original = url_path
|
||||
url_path = PurePosixPath(url_path)
|
||||
|
||||
if set(optional_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = optional_prefix
|
||||
elif set(default_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = default_prefix
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
# Remove prefix
|
||||
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
|
||||
try:
|
||||
resource_path = PurePosixPath(url_path.parts[0])
|
||||
name = url_path.parts[1]
|
||||
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
|
||||
except IndexError:
|
||||
return url_path_original
|
||||
|
||||
resource = resource_path.parts[0]
|
||||
if resource in settings.NAMED_URL_MAPPINGS:
|
||||
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
|
||||
return '/'.join(url_units)
|
||||
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
|
||||
return PurePosixPath(*parts).as_posix() + '/'
|
||||
|
||||
def process_request(self, request):
|
||||
old_path = request.path_info
|
||||
|
||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'admin_role',
|
||||
'execute_role',
|
||||
|
||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='approval_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.approval_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execution_environment_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
||||
model_name='instancegroup',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
# Generated by Django 4.2.6 on 2024-05-08 07:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0192_custom_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0193_alter_notification_notification_type_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
26
awx/main/migrations/0195_EE_permissions.py
Normal file
26
awx/main/migrations/0195_EE_permissions.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 4.2.6 on 2024-06-20 15:55
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def delete_execution_environment_read_role(apps, schema_editor):
|
||||
permission_classes = [apps.get_model('auth', 'Permission'), apps.get_model('dab_rbac', 'DABPermission')]
|
||||
for permission_cls in permission_classes:
|
||||
ee_read_perm = permission_cls.objects.filter(codename='view_executionenvironment').first()
|
||||
if ee_read_perm:
|
||||
ee_read_perm.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0194_alter_inventorysource_source_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='executionenvironment',
|
||||
options={'default_permissions': ('add', 'change', 'delete'), 'ordering': ('-created',)},
|
||||
),
|
||||
migrations.RunPython(delete_execution_environment_read_role, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -134,8 +134,7 @@ def get_permissions_for_role(role_field, children_map, apps):
|
||||
|
||||
# more special cases for those same above special org-level roles
|
||||
if role_field.name == 'auditor_role':
|
||||
for codename in ('view_notificationtemplate', 'view_executionenvironment'):
|
||||
perm_list.append(Permission.objects.get(codename=codename))
|
||||
perm_list.append(Permission.objects.get(codename='view_notificationtemplate'))
|
||||
|
||||
return perm_list
|
||||
|
||||
@@ -275,7 +274,12 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
"""
|
||||
Idepotent method to create or sync the managed role definitions
|
||||
"""
|
||||
to_create = settings.ANSIBLE_BASE_ROLE_PRECREATE
|
||||
to_create = {
|
||||
'object_admin': '{cls.__name__} Admin',
|
||||
'org_admin': 'Organization Admin',
|
||||
'org_children': 'Organization {cls.__name__} Admin',
|
||||
'special': '{cls.__name__} {action}',
|
||||
}
|
||||
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||
@@ -285,14 +289,15 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
managed_role_definitions = []
|
||||
|
||||
org_perms = set()
|
||||
for cls in permission_registry._registry:
|
||||
for cls in permission_registry.all_registered_models:
|
||||
ct = ContentType.objects.get_for_model(cls)
|
||||
cls_name = cls._meta.model_name
|
||||
object_perms = set(Permission.objects.filter(content_type=ct))
|
||||
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
|
||||
if cls._meta.model_name != 'instancegroup':
|
||||
if cls_name != 'instancegroup':
|
||||
org_perms.update(object_perms)
|
||||
|
||||
if 'object_admin' in to_create and cls != Organization:
|
||||
if 'object_admin' in to_create and cls_name != 'organization':
|
||||
indiv_perms = object_perms.copy()
|
||||
add_perms = [perm for perm in indiv_perms if perm.codename.startswith('add_')]
|
||||
if add_perms:
|
||||
@@ -305,7 +310,7 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
)
|
||||
)
|
||||
|
||||
if 'org_children' in to_create and cls != Organization:
|
||||
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
|
||||
org_child_perms = object_perms.copy()
|
||||
org_child_perms.add(Permission.objects.get(codename='view_organization'))
|
||||
|
||||
@@ -322,17 +327,25 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
if 'special' in to_create:
|
||||
special_perms = []
|
||||
for perm in object_perms:
|
||||
if perm.codename.split('_')[0] not in ('add', 'change', 'update', 'delete', 'view'):
|
||||
# Organization auditor is handled separately
|
||||
if perm.codename.split('_')[0] not in ('add', 'change', 'delete', 'view', 'audit'):
|
||||
special_perms.append(perm)
|
||||
for perm in special_perms:
|
||||
action = perm.codename.split('_')[0]
|
||||
view_perm = Permission.objects.get(content_type=ct, codename__startswith='view_')
|
||||
perm_list = [perm, view_perm]
|
||||
# Handle special-case where adhoc role also listed use permission
|
||||
if action == 'adhoc':
|
||||
for other_perm in object_perms:
|
||||
if other_perm.codename == 'use_inventory':
|
||||
perm_list.append(other_perm)
|
||||
break
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['special'].format(cls=cls, action=action.title()),
|
||||
f'Has {action} permissions to a single {cls._meta.verbose_name}',
|
||||
ct,
|
||||
[perm, view_perm],
|
||||
perm_list,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
@@ -348,6 +361,41 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
)
|
||||
)
|
||||
|
||||
# Special "organization action" roles
|
||||
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
|
||||
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Audit',
|
||||
'Has permission to view all objects inside of a single organization',
|
||||
org_ct,
|
||||
audit_permissions,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_execute_permissions = {'view_jobtemplate', 'execute_jobtemplate', 'view_workflowjobtemplate', 'execute_workflowjobtemplate', 'view_organization'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Execute',
|
||||
'Has permission to execute all runnable objects in the organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_execute_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_approval_permissions = {'view_organization', 'view_workflowjobtemplate', 'approve_workflowjobtemplate'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Approval',
|
||||
'Has permission to approve any workflow steps within a single organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_approval_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
unexpected_role_definitions = RoleDefinition.objects.filter(managed=True).exclude(pk__in=[rd.pk for rd in managed_role_definitions])
|
||||
for role_definition in unexpected_role_definitions:
|
||||
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
|
||||
|
||||
@@ -176,17 +176,17 @@ pre_delete.connect(cleanup_created_modified_by, sender=User)
|
||||
|
||||
@property
|
||||
def user_get_organizations(user):
|
||||
return Organization.objects.filter(member_role__members=user)
|
||||
return Organization.access_qs(user, 'member')
|
||||
|
||||
|
||||
@property
|
||||
def user_get_admin_of_organizations(user):
|
||||
return Organization.objects.filter(admin_role__members=user)
|
||||
return Organization.access_qs(user, 'change')
|
||||
|
||||
|
||||
@property
|
||||
def user_get_auditor_of_organizations(user):
|
||||
return Organization.objects.filter(auditor_role__members=user)
|
||||
return Organization.access_qs(user, 'audit')
|
||||
|
||||
|
||||
@property
|
||||
|
||||
@@ -21,6 +21,10 @@ from django.conf import settings
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# DRF
|
||||
from rest_framework.serializers import ValidationError as DRFValidationError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -41,6 +45,7 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.models import Team, Organization
|
||||
from awx.main.utils import encrypt_field
|
||||
from . import injectors as builtin_injectors
|
||||
|
||||
@@ -315,6 +320,16 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
else:
|
||||
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
||||
|
||||
def validate_role_assignment(self, actor, role_definition):
|
||||
if self.organization:
|
||||
if isinstance(actor, User):
|
||||
if actor.is_superuser or Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists():
|
||||
return
|
||||
if isinstance(actor, Team):
|
||||
if actor.organization == self.organization:
|
||||
return
|
||||
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
|
||||
|
||||
|
||||
class CredentialType(CommonModelNameNotUnique):
|
||||
"""
|
||||
|
||||
@@ -4,11 +4,12 @@ import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models, DatabaseError
|
||||
from django.db import models, DatabaseError, transaction
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
||||
def _update_host_metrics(updated_hosts_list):
|
||||
from awx.main.models import HostMetric # circular import
|
||||
|
||||
# bulk-create
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
||||
)
|
||||
# bulk-update
|
||||
batch_start, batch_size = 0, 1000
|
||||
while batch_start <= len(updated_hosts_list):
|
||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
batch_start += batch_size
|
||||
|
||||
# FUTURE:
|
||||
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||
|
||||
args = [iter(updated_hosts_list)] * 500
|
||||
for hosts in itertools.zip_longest(*args):
|
||||
with transaction.atomic():
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||
)
|
||||
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
|
||||
@property
|
||||
def job_verbosity(self):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import CommonModel
|
||||
from awx.main.validators import validate_container_image_name
|
||||
@@ -12,6 +14,8 @@ __all__ = ['ExecutionEnvironment']
|
||||
class ExecutionEnvironment(CommonModel):
|
||||
class Meta:
|
||||
ordering = ('-created',)
|
||||
# Remove view permission, as a temporary solution, defer to organization read permission
|
||||
default_permissions = ('add', 'change', 'delete')
|
||||
|
||||
PULL_CHOICES = [
|
||||
('always', _("Always pull container before running.")),
|
||||
@@ -53,3 +57,12 @@ class ExecutionEnvironment(CommonModel):
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def validate_role_assignment(self, actor, role_definition):
|
||||
if self.managed:
|
||||
raise ValidationError({'object_id': _('Can not assign object roles to managed Execution Environments')})
|
||||
if self.organization_id is None:
|
||||
raise ValidationError({'object_id': _('Can not assign object roles to global Execution Environments')})
|
||||
|
||||
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
|
||||
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})
|
||||
|
||||
@@ -933,6 +933,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1042,7 +1043,7 @@ class InventorySourceOptions(BaseModel):
|
||||
def cloud_credential_validation(source, cred):
|
||||
if not source:
|
||||
return None
|
||||
if cred and source not in ('custom', 'scm'):
|
||||
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||
# If a credential was provided, it's important that it matches
|
||||
# the actual inventory source being used (Amazon requires Amazon
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
@@ -1051,12 +1052,14 @@ class InventorySourceOptions(BaseModel):
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
||||
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||
return _('Credential is required for a cloud source.')
|
||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||
return None
|
||||
|
||||
def get_cloud_credential(self):
|
||||
@@ -1660,7 +1663,7 @@ class terraform(PluginFileInjector):
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
|
||||
private_data = {'credentials': {}}
|
||||
gce_cred = credential.get_input('gce_credentials')
|
||||
gce_cred = credential.get_input('gce_credentials', default=None)
|
||||
if gce_cred:
|
||||
private_data['credentials'][credential] = gce_cred
|
||||
return private_data
|
||||
@@ -1669,7 +1672,7 @@ class terraform(PluginFileInjector):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
cred_data = private_data_files['credentials']
|
||||
if cred_data[credential]:
|
||||
if credential in cred_data:
|
||||
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
|
||||
return env
|
||||
|
||||
@@ -1693,6 +1696,16 @@ class insights(PluginFileInjector):
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class openshift_virtualization(PluginFileInjector):
|
||||
plugin_name = 'kubevirt'
|
||||
base_injector = 'template'
|
||||
namespace = 'kubevirt'
|
||||
collection = 'core'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'openshift_virtualization'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class constructed(PluginFileInjector):
|
||||
plugin_name = 'constructed'
|
||||
namespace = 'ansible'
|
||||
|
||||
@@ -31,6 +31,7 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
|
||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
from awx.main.notifications.awssns_backend import AWSSNSBackend
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.notifications')
|
||||
@@ -40,6 +41,7 @@ __all__ = ['NotificationTemplate', 'Notification']
|
||||
|
||||
class NotificationTemplate(CommonModelNameNotUnique):
|
||||
NOTIFICATION_TYPES = [
|
||||
('awssns', _('AWS SNS'), AWSSNSBackend),
|
||||
('email', _('Email'), CustomEmailBackend),
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
@@ -394,11 +396,11 @@ class JobNotificationMixin(object):
|
||||
'verbosity': 0,
|
||||
},
|
||||
'job_friendly_name': 'Job',
|
||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||
'url': 'https://platformhost/#/jobs/playbook/1010',
|
||||
'approval_status': 'approved',
|
||||
'approval_node_name': 'Approve Me',
|
||||
'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'workflow_url': 'https://platformhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://platformhost/$/jobs/playbook/13',
|
||||
'traceback': '',
|
||||
'status': 'running',
|
||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||
|
||||
@@ -10,6 +10,9 @@ import re
|
||||
# django-rest-framework
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
# crum to impersonate users
|
||||
from crum import impersonate
|
||||
|
||||
# Django
|
||||
from django.db import models, transaction, connection
|
||||
from django.db.models.signals import m2m_changed
|
||||
@@ -553,17 +556,22 @@ def get_role_definition(role):
|
||||
return
|
||||
f = obj._meta.get_field(role.role_field)
|
||||
action_name = f.name.rsplit("_", 1)[0]
|
||||
rd_name = f'{type(obj).__name__} {action_name.title()} Compat'
|
||||
model_print = type(obj).__name__
|
||||
rd_name = f'{model_print} {action_name.title()} Compat'
|
||||
perm_list = get_role_codenames(role)
|
||||
defaults = {'content_type_id': role.content_type_id}
|
||||
try:
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
except ValidationError:
|
||||
# This is a tricky case - practically speaking, users should not be allowed to create team roles
|
||||
# or roles that include the team member permission.
|
||||
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
|
||||
defaults['managed'] = True
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
defaults = {
|
||||
'content_type_id': role.content_type_id,
|
||||
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
|
||||
}
|
||||
with impersonate(None):
|
||||
try:
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
except ValidationError:
|
||||
# This is a tricky case - practically speaking, users should not be allowed to create team roles
|
||||
# or roles that include the team member permission.
|
||||
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
|
||||
defaults['managed'] = True
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
return rd
|
||||
|
||||
|
||||
@@ -583,14 +591,20 @@ def get_role_from_object_role(object_role):
|
||||
role_name = role_name.lower()
|
||||
model_cls = apps.get_model('main', target_model_name)
|
||||
target_model_name = get_type_for_model(model_cls)
|
||||
|
||||
# exception cases completely specific to one model naming convention
|
||||
if target_model_name == 'notification_template':
|
||||
target_model_name = 'notification' # total exception
|
||||
target_model_name = 'notification'
|
||||
elif target_model_name == 'workflow_job_template':
|
||||
target_model_name = 'workflow'
|
||||
|
||||
role_name = f'{target_model_name}_admin_role'
|
||||
elif rd.name.endswith(' Admin'):
|
||||
# cases like "project-admin"
|
||||
role_name = 'admin_role'
|
||||
elif rd.name == 'Organization Audit':
|
||||
role_name = 'auditor_role'
|
||||
else:
|
||||
print(rd.name)
|
||||
model_name, role_name = rd.name.split()
|
||||
role_name = role_name.lower()
|
||||
role_name += '_role'
|
||||
@@ -675,9 +689,15 @@ def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
|
||||
|
||||
for role_id in pk_set:
|
||||
if reverse:
|
||||
child_role = Role.objects.get(id=role_id)
|
||||
try:
|
||||
child_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
else:
|
||||
parent_role = Role.objects.get(id=role_id)
|
||||
try:
|
||||
parent_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
|
||||
# To a fault, we want to avoid running this if triggered from implicit_parents management
|
||||
# we only want to do anything if we know for sure this is a non-implicit team role
|
||||
|
||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models, connection
|
||||
from django.db import models, connection, transaction
|
||||
from django.core.exceptions import NON_FIELD_ERRORS
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
@@ -31,6 +31,7 @@ from rest_framework.exceptions import ParseError
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
||||
@@ -197,9 +198,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
|
||||
@classmethod
|
||||
def _submodels_with_roles(cls):
|
||||
ujt_classes = [c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
|
||||
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
|
||||
return [ct.id for ct in ct_dict.values()]
|
||||
return [c for c in cls.__subclasses__() if permission_registry.is_registered(c)]
|
||||
|
||||
@classmethod
|
||||
def accessible_pk_qs(cls, accessor, role_field):
|
||||
@@ -215,8 +214,16 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
|
||||
action = to_permissions[role_field]
|
||||
|
||||
# Special condition for super auditor
|
||||
role_subclasses = cls._submodels_with_roles()
|
||||
role_cts = ContentType.objects.get_for_models(*role_subclasses).values()
|
||||
all_codenames = {f'{action}_{cls._meta.model_name}' for cls in role_subclasses}
|
||||
if not (all_codenames - accessor.singleton_permissions()):
|
||||
qs = cls.objects.filter(polymorphic_ctype__in=role_cts)
|
||||
return qs.values_list('id', flat=True)
|
||||
|
||||
return (
|
||||
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__startswith=action, content_type_id__in=cls._submodels_with_roles())
|
||||
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__in=all_codenames, content_type_id__in=[ct.id for ct in role_cts])
|
||||
.values_list('object_id')
|
||||
.distinct()
|
||||
)
|
||||
@@ -273,7 +280,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
if new_next_schedule:
|
||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||
return # no-op, common for infrequent schedules
|
||||
self.next_schedule = new_next_schedule
|
||||
|
||||
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||
if transaction.get_autocommit():
|
||||
self.next_schedule = related_schedules.first()
|
||||
else:
|
||||
self.next_schedule = related_schedules.select_for_update().first()
|
||||
|
||||
self.next_job_run = new_next_schedule.next_run
|
||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||
|
||||
@@ -823,7 +837,7 @@ class UnifiedJob(
|
||||
update_fields.append(key)
|
||||
|
||||
if parent_instance:
|
||||
if self.status in ('pending', 'waiting', 'running'):
|
||||
if self.status in ('pending', 'running'):
|
||||
if parent_instance.current_job != self:
|
||||
parent_instance_set('current_job', self)
|
||||
# Update parent with all the 'good' states of it's child
|
||||
@@ -860,7 +874,7 @@ class UnifiedJob(
|
||||
# If this job already exists in the database, retrieve a copy of
|
||||
# the job in its prior state.
|
||||
# If update_fields are given without status, then that indicates no change
|
||||
if self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
self_before = self.__class__.objects.get(pk=self.pk)
|
||||
if self_before.status != self.status:
|
||||
status_before = self_before.status
|
||||
@@ -902,7 +916,8 @@ class UnifiedJob(
|
||||
update_fields.append('elapsed')
|
||||
|
||||
# Ensure that the job template information is current.
|
||||
if self.unified_job_template != self._get_parent_instance():
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
|
||||
self.unified_job_template = self._get_parent_instance()
|
||||
if 'unified_job_template' not in update_fields:
|
||||
update_fields.append('unified_job_template')
|
||||
@@ -915,8 +930,9 @@ class UnifiedJob(
|
||||
# Okay; we're done. Perform the actual save.
|
||||
result = super(UnifiedJob, self).save(*args, **kwargs)
|
||||
|
||||
# If status changed, update the parent instance.
|
||||
if self.status != status_before:
|
||||
# If status changed, update the parent instance
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != status_before and self.status != 'waiting':
|
||||
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
||||
# This dodges lock contention at the expense of the foreign key not being
|
||||
# completely correct.
|
||||
|
||||
70
awx/main/notifications/awssns_backend.py
Normal file
70
awx/main/notifications/awssns_backend.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import json
|
||||
import logging
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.awssns_backend')
|
||||
WEBSOCKET_TIMEOUT = 30
|
||||
|
||||
|
||||
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
init_parameters = {
|
||||
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
|
||||
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
|
||||
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
|
||||
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
|
||||
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
|
||||
}
|
||||
recipient_parameter = "sns_topic_arn"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
|
||||
session = boto3.session.Session()
|
||||
client_config = {"service_name": 'sns'}
|
||||
if aws_region:
|
||||
client_config["region_name"] = aws_region
|
||||
if aws_secret_access_key:
|
||||
client_config["aws_secret_access_key"] = aws_secret_access_key
|
||||
if aws_access_key_id:
|
||||
client_config["aws_access_key_id"] = aws_access_key_id
|
||||
if aws_session_token:
|
||||
client_config["aws_session_token"] = aws_session_token
|
||||
self.client = session.client(**client_config)
|
||||
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
def _sns_publish(self, topic_arn, message):
|
||||
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
|
||||
|
||||
def format_body(self, body):
|
||||
if isinstance(body, str):
|
||||
try:
|
||||
body = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
# convert dict body to json string
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
for message in messages:
|
||||
sns_topic_arn = str(message.recipients()[0])
|
||||
try:
|
||||
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
|
||||
sent_messages += 1
|
||||
except ClientError as error:
|
||||
if not self.fail_silently:
|
||||
raise error
|
||||
|
||||
return sent_messages
|
||||
@@ -32,3 +32,15 @@ class CustomNotificationBase(object):
|
||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
||||
},
|
||||
}
|
||||
|
||||
job_metadata_messages = {
|
||||
"started": {"body": "{{ job_metadata }}"},
|
||||
"success": {"body": "{{ job_metadata }}"},
|
||||
"error": {"body": "{{ job_metadata }}"},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,17 +27,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {
|
||||
"started": {"body": DEFAULT_BODY},
|
||||
"success": {"body": DEFAULT_BODY},
|
||||
"error": {"body": DEFAULT_BODY},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||
self.http_method = http_method
|
||||
|
||||
@@ -63,6 +63,10 @@ websocket_urlpatterns = [
|
||||
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
|
||||
|
||||
websocket_relay_urlpatterns = [
|
||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
@@ -138,7 +138,8 @@ class TaskBase:
|
||||
|
||||
# Lock
|
||||
with task_manager_bulk_reschedule():
|
||||
with advisory_lock(f"{self.prefix}_lock", wait=False) as acquired:
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000 # convert to milliseconds
|
||||
with advisory_lock(f"{self.prefix}_lock", lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
with transaction.atomic():
|
||||
if acquired is False:
|
||||
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")
|
||||
|
||||
@@ -405,10 +405,11 @@ class AWXReceptorJob:
|
||||
finally:
|
||||
# Make sure to always release the work unit if we established it
|
||||
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
|
||||
try:
|
||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||
except Exception:
|
||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||
if settings.RECPETOR_KEEP_WORK_ON_ERROR and getattr(res, 'status', 'error') == 'error':
|
||||
try:
|
||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||
except Exception:
|
||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||
|
||||
def _run_internal(self, receptor_ctl):
|
||||
# Create a socketpair. Where the left side will be used for writing our payload
|
||||
|
||||
@@ -36,6 +36,9 @@ import ansible_runner.cleanup
|
||||
# dateutil
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.access import access_registry
|
||||
@@ -51,7 +54,7 @@ from awx.main.models import (
|
||||
Job,
|
||||
convert_jsonfields,
|
||||
)
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.constants import ACTIVE_STATES, ERROR_STATES
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_task_queuename, reaper
|
||||
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
||||
@@ -682,6 +685,8 @@ def awx_receptor_workunit_reaper():
|
||||
|
||||
unit_ids = [id for id in receptor_work_list]
|
||||
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
||||
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||
jobs_with_unreleased_receptor_units = jobs_with_unreleased_receptor_units.exclude(status__in=ERROR_STATES)
|
||||
for job in jobs_with_unreleased_receptor_units:
|
||||
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
||||
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
|
||||
@@ -701,7 +706,10 @@ def awx_k8s_reaper():
|
||||
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
||||
pods = PodManager.list_active_jobs(group)
|
||||
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
|
||||
for job in UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES):
|
||||
reap_job_candidates = UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES)
|
||||
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||
reap_job_candidates = reap_job_candidates.exclude(status__in=ERROR_STATES)
|
||||
for job in reap_job_candidates:
|
||||
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
||||
try:
|
||||
pm = PodManager(job)
|
||||
@@ -712,7 +720,8 @@ def awx_k8s_reaper():
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def awx_periodic_scheduler():
|
||||
with advisory_lock('awx_periodic_scheduler_lock', wait=False) as acquired:
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000
|
||||
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic scheduler, another task holds lock")
|
||||
return
|
||||
@@ -964,3 +973,27 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
permission_check_func(creater, copy_mapping.values())
|
||||
if isinstance(new_obj, Inventory):
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
return
|
||||
|
||||
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||
return
|
||||
logger.debug("Running periodic resource sync")
|
||||
|
||||
executor = SyncExecutor()
|
||||
executor.run()
|
||||
for key, item_list in executor.results.items():
|
||||
if not item_list or key == 'noop':
|
||||
continue
|
||||
# Log creations and conflicts
|
||||
if len(item_list) > 10 and settings.LOG_AGGREGATOR_LEVEL != 'DEBUG':
|
||||
logger.info(f'Periodic resource sync {key}, first 10 items:\n{item_list[:10]}')
|
||||
else:
|
||||
logger.info(f'Periodic resource sync {key}:\n{item_list}')
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||
"K8S_AUTH_API_KEY": "fooo",
|
||||
"K8S_AUTH_VERIFY_SSL": "False"
|
||||
}
|
||||
@@ -9,8 +9,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": team.pk}
|
||||
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
||||
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access)
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def test_idempotent_credential_type_setup():
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh):
|
||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'inputs': {'username': 'someusername'},
|
||||
@@ -81,7 +81,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh):
|
||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'inputs': {'username': 'someusername'},
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
def __init__(self):
|
||||
self.environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_proxy_ip_allowed(get, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||
# should just pass through
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
@@ -45,6 +53,51 @@ def test_proxy_ip_allowed(get, patch, admin):
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTrustedProxyAllowListIntegration:
|
||||
@pytest.fixture
|
||||
def url(self, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
|
||||
return url
|
||||
|
||||
@pytest.fixture
|
||||
def middleware(self):
|
||||
return HeaderTrackingMiddleware()
|
||||
|
||||
def test_x_trusted_proxy_valid_signature(self, get, admin, rsa_keypair, url, middleware): # noqa: F811
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public, PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature(self, get, admin, url, patch, middleware):
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with override_settings(PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature_valid_proxy(self, get, admin, url, middleware):
|
||||
# A valid explicit proxy SHOULD result in sensitive headers NOT being deleted, regardless of the trusted proxy signature results
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'REMOTE_ADDR': 'my.proxy.example.org',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteViews:
|
||||
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
||||
|
||||
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestImmutableSharedFields:
|
||||
@pytest.fixture(autouse=True)
|
||||
def configure_settings(self, settings):
|
||||
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
|
||||
|
||||
def test_create_raises_permission_denied(self, admin_user, post):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
resp = post(
|
||||
url=reverse('api:team_list'),
|
||||
data={'name': 'teamA', 'organization': orgA.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Creation of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
resp = delete(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Deletion of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_update(self, admin_user, patch):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
# allow patching non-shared fields
|
||||
patch(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
data={"description": "can change this field"},
|
||||
user=admin_user,
|
||||
expect=200,
|
||||
)
|
||||
orgB = Organization.objects.create(name='orgB')
|
||||
# prevent patching shared fields
|
||||
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
|
||||
assert "Cannot change shared field" in resp.data['organization']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'role',
|
||||
['admin_role', 'member_role'],
|
||||
)
|
||||
@pytest.mark.parametrize('resource', ['organization', 'team'])
|
||||
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
if resource == 'organization':
|
||||
role = getattr(orgA, role)
|
||||
elif resource == 'team':
|
||||
teamA = orgA.teams.create(name='teamA')
|
||||
role = getattr(teamA, role)
|
||||
resp = post(
|
||||
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
|
||||
data={'id': role.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']
|
||||
@@ -32,13 +32,6 @@ def node_type_instance():
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group(job_factory):
|
||||
ig = InstanceGroup(name="east")
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_instance_group(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="container")
|
||||
|
||||
@@ -131,11 +131,11 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
|
||||
# Check that job is serialized correctly
|
||||
job_id = response.data['job']
|
||||
@@ -167,12 +167,12 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -187,11 +187,11 @@ def test_job_accept_empty_tags(job_template_prompts, post, admin_user, mocker):
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -203,14 +203,14 @@ def test_slice_timeout_forks_need_int(job_template_prompts, post, admin_user, mo
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -244,12 +244,12 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -641,18 +641,18 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -670,22 +670,22 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -697,22 +697,22 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t
|
||||
job_template.host_config_key = "foo"
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -725,9 +725,9 @@ def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_u
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -738,6 +738,6 @@ def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_templ
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer
|
||||
@@ -8,10 +9,15 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.test.utils import override_settings
|
||||
|
||||
# DRF
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
# DAB
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
@@ -369,3 +375,113 @@ def test_job_template_missing_inventory(project, inventory, admin_user, post):
|
||||
)
|
||||
assert r.status_code == 400
|
||||
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestJobTemplateCallbackProxyIntegration:
|
||||
"""
|
||||
Test the interaction of provision job template callback feature and:
|
||||
settings.PROXY_IP_ALLOWED_LIST
|
||||
x-trusted-proxy http header
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(self, inventory, project):
|
||||
jt = JobTemplate.objects.create(name='test-jt', inventory=inventory, project=project, playbook='helloworld.yml', host_config_key='abcd')
|
||||
return jt
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz',
|
||||
'REMOTE_HOST': 'baz',
|
||||
'REMOTE_ADDR': 'baz',
|
||||
}
|
||||
r = post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=400, **headers
|
||||
)
|
||||
assert r.data['msg'] == 'No matching host could be found!'
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'headers, expected',
|
||||
(
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'my.proxy.example.org',
|
||||
},
|
||||
201,
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'not-my-proxy.org',
|
||||
},
|
||||
400,
|
||||
),
|
||||
),
|
||||
)
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_proxy_ip_allowed_list(self, job_template, admin_user, post, headers, expected): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='my.proxy.example.org')
|
||||
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=expected,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=[])
|
||||
def test_no_proxy_trust_all_headers(self, job_template, admin_user, post):
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
post(url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=201, **headers)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar, my.proxy.example.org',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=201,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz, my.proxy.example.org',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
**headers
|
||||
)
|
||||
|
||||
@@ -165,8 +165,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
|
||||
inventory.admin_role.members.add(rando)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -174,8 +174,8 @@ class TestAccessListCapabilities:
|
||||
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
|
||||
|
||||
def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
|
||||
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data, sublist='indirect_access')
|
||||
@@ -185,8 +185,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -198,8 +198,8 @@ class TestAccessListCapabilities:
|
||||
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
|
||||
# Did we assess whether team_member can remove team's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
|
||||
@@ -212,8 +212,8 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
|
||||
organization.member_role.members.add(alice)
|
||||
organization.member_role.members.add(bob)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
|
||||
# Did we assess whether bob can remove alice's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -43,9 +43,9 @@ def run_command(name, *args, **options):
|
||||
],
|
||||
)
|
||||
def test_update_password_command(mocker, username, password, expected, changed):
|
||||
with mocker.patch.object(UpdatePassword, 'update_password', return_value=changed):
|
||||
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
||||
if result is None:
|
||||
assert stdout == expected
|
||||
else:
|
||||
assert str(result) == expected
|
||||
mocker.patch.object(UpdatePassword, 'update_password', return_value=changed)
|
||||
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
||||
if result is None:
|
||||
assert stdout == expected
|
||||
else:
|
||||
assert str(result) == expected
|
||||
|
||||
@@ -16,9 +16,11 @@ from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
@@ -90,6 +92,17 @@ def deploy_jobtemplate(project, inventory, credential):
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def execution_environment():
|
||||
return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", managed=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_managed_roles():
|
||||
"Run the migration script to pre-create managed role definitions"
|
||||
setup_managed_role_definitions(apps, None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def team(organization):
|
||||
return organization.teams.create(name='test-team')
|
||||
@@ -722,6 +735,11 @@ def jt_linked(organization, project, inventory, machine_credential, credential,
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group():
|
||||
return InstanceGroup.objects.create(name="east")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job_template(organization):
|
||||
wjt = WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization)
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import pytest
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def managed_roles():
|
||||
"Run the migration script to pre-create managed role definitions"
|
||||
setup_managed_role_definitions(apps, None)
|
||||
@@ -109,3 +109,17 @@ def test_team_indirect_access(get, team, admin_user, inventory):
|
||||
assert len(by_username['u1']['summary_fields']['indirect_access']) == 0
|
||||
access_entry = by_username['u1']['summary_fields']['direct_access'][0]
|
||||
assert sorted(access_entry['descendant_roles']) == sorted(['adhoc_role', 'use_role', 'update_role', 'read_role', 'admin_role'])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_access_list(workflow_job_template, alice, bob, setup_managed_roles, get, admin_user):
|
||||
"""Basic verification that WFJT access_list is functional"""
|
||||
workflow_job_template.admin_role.members.add(alice)
|
||||
workflow_job_template.organization.workflow_admin_role.members.add(bob)
|
||||
|
||||
url = reverse('api:workflow_job_template_access_list', kwargs={'pk': workflow_job_template.pk})
|
||||
for u in (alice, bob, admin_user):
|
||||
response = get(url, user=u, expect=200)
|
||||
user_ids = [item['id'] for item in response.data['results']]
|
||||
assert alice.pk in user_ids
|
||||
assert bob.pk in user_ids
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.access import InstanceGroupAccess, NotificationTemplateAccess
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_object_role_delete(rando, instance_group, setup_managed_roles):
|
||||
"""Basic functionality of IG object-level admin role function AAP-25506"""
|
||||
rd = RoleDefinition.objects.get(name='InstanceGroup Admin')
|
||||
rd.give_permission(rando, instance_group)
|
||||
access = InstanceGroupAccess(rando)
|
||||
assert access.can_delete(instance_group)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_object_role_change(rando, notification_template, setup_managed_roles):
|
||||
"""Basic functionality of NT object-level admin role function AAP-25493"""
|
||||
rd = RoleDefinition.objects.get(name='NotificationTemplate Admin')
|
||||
rd.give_permission(rando, notification_template)
|
||||
access = NotificationTemplateAccess(rando)
|
||||
assert access.can_change(notification_template, {'name': 'new name'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_auditor_role(rando, setup_managed_roles, organization, inventory, project, jt_linked):
|
||||
obj_list = (inventory, project, jt_linked)
|
||||
for obj in obj_list:
|
||||
assert obj.organization == organization, obj # sanity
|
||||
|
||||
assert [rando.has_obj_perm(obj, 'view') for obj in obj_list] == [False for i in range(3)], obj_list
|
||||
|
||||
rd = RoleDefinition.objects.get(name='Organization Audit')
|
||||
rd.give_permission(rando, organization)
|
||||
|
||||
codename_set = set(rd.permissions.values_list('codename', flat=True))
|
||||
assert not ({'view_inventory', 'view_jobtemplate', 'audit_organization'} - codename_set) # sanity
|
||||
|
||||
assert [obj in type(obj).access_qs(rando) for obj in obj_list] == [True for i in range(3)], obj_list
|
||||
assert [rando.has_obj_perm(obj, 'view') for obj in obj_list] == [True for i in range(3)], obj_list
|
||||
@@ -1,45 +0,0 @@
|
||||
import pytest
|
||||
from django.apps import apps
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
INVENTORY_OBJ_PERMISSIONS = ['view_inventory', 'adhoc_inventory', 'use_inventory', 'change_inventory', 'delete_inventory', 'update_inventory']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_definitions_precreate():
|
||||
with override_settings(
|
||||
ANSIBLE_BASE_ROLE_PRECREATE={
|
||||
'object_admin': '{cls._meta.model_name}-admin',
|
||||
'org_admin': 'organization-admin',
|
||||
'org_children': 'organization-{cls._meta.model_name}-admin',
|
||||
'special': '{cls._meta.model_name}-{action}',
|
||||
}
|
||||
):
|
||||
setup_managed_role_definitions(apps, None)
|
||||
rd = RoleDefinition.objects.get(name='inventory-admin')
|
||||
assert rd.managed is True
|
||||
# add permissions do not go in the object-level admin
|
||||
assert set(rd.permissions.values_list('codename', flat=True)) == set(INVENTORY_OBJ_PERMISSIONS)
|
||||
|
||||
# test org-level object admin permissions
|
||||
rd = RoleDefinition.objects.get(name='organization-inventory-admin')
|
||||
assert rd.managed is True
|
||||
assert set(rd.permissions.values_list('codename', flat=True)) == set(['add_inventory', 'view_organization'] + INVENTORY_OBJ_PERMISSIONS)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_definitions_custom_obj_admin_name():
|
||||
with override_settings(
|
||||
ANSIBLE_BASE_ROLE_PRECREATE={
|
||||
'object_admin': 'foo-{cls._meta.model_name}-foo',
|
||||
}
|
||||
):
|
||||
setup_managed_role_definitions(apps, None)
|
||||
rd = RoleDefinition.objects.get(name='foo-inventory-foo')
|
||||
assert rd.managed is True
|
||||
# add permissions do not go in the object-level admin
|
||||
assert set(rd.permissions.values_list('codename', flat=True)) == set(INVENTORY_OBJ_PERMISSIONS)
|
||||
@@ -2,15 +2,17 @@ import pytest
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse as django_reverse
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import JobTemplate, Inventory, Organization
|
||||
from awx.main.access import JobTemplateAccess, WorkflowJobTemplateAccess
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_roles_created(managed_roles):
|
||||
def test_managed_roles_created(setup_managed_roles):
|
||||
"Managed RoleDefinitions are created in post_migration signal, we expect to see them here"
|
||||
for cls in (JobTemplate, Inventory):
|
||||
ct = ContentType.objects.get_for_model(cls)
|
||||
@@ -22,7 +24,7 @@ def test_managed_roles_created(managed_roles):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_custom_read_role(admin_user, post, managed_roles):
|
||||
def test_custom_read_role(admin_user, post, setup_managed_roles):
|
||||
rd_url = django_reverse('roledefinition-list')
|
||||
resp = post(
|
||||
url=rd_url, data={"name": "read role made for test", "content_type": "awx.inventory", "permissions": ['view_inventory']}, user=admin_user, expect=201
|
||||
@@ -40,7 +42,7 @@ def test_custom_system_roles_prohibited(admin_user, post):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post, managed_roles):
|
||||
def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post, setup_managed_roles):
|
||||
"Alice can not see rando, and so can not give them a role assignment"
|
||||
rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||
rd.give_permission(alice, inventory)
|
||||
@@ -51,7 +53,7 @@ def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post,
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_managed_role(admin_user, alice, rando, inventory, post, managed_roles, organization):
|
||||
def test_assign_managed_role(admin_user, alice, rando, inventory, post, setup_managed_roles, organization):
|
||||
rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||
rd.give_permission(alice, inventory)
|
||||
# When alice and rando are members of the same org, they can see each other
|
||||
@@ -66,19 +68,23 @@ def test_assign_managed_role(admin_user, alice, rando, inventory, post, managed_
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_custom_delete_role(admin_user, rando, inventory, delete, patch):
|
||||
# TODO: just a delete_inventory, without change_inventory
|
||||
rd, _ = RoleDefinition.objects.get_or_create(
|
||||
name='inventory-delete', permissions=['delete_inventory', 'view_inventory'], content_type=ContentType.objects.get_for_model(Inventory)
|
||||
name='inventory-delete',
|
||||
permissions=['delete_inventory', 'view_inventory', 'change_inventory'],
|
||||
content_type=ContentType.objects.get_for_model(Inventory),
|
||||
)
|
||||
rd.give_permission(rando, inventory)
|
||||
inv_id = inventory.pk
|
||||
inv_url = reverse('api:inventory_detail', kwargs={'pk': inv_id})
|
||||
patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
|
||||
# TODO: eventually this will be valid test, for now ignore
|
||||
# patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
|
||||
delete(url=inv_url, user=rando, expect=202)
|
||||
assert Inventory.objects.get(id=inv_id).pending_deletion
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_custom_add_role(admin_user, rando, organization, post, managed_roles):
|
||||
def test_assign_custom_add_role(admin_user, rando, organization, post, setup_managed_roles):
|
||||
rd, _ = RoleDefinition.objects.get_or_create(
|
||||
name='inventory-add', permissions=['add_inventory', 'view_organization'], content_type=ContentType.objects.get_for_model(Organization)
|
||||
)
|
||||
@@ -88,3 +94,63 @@ def test_assign_custom_add_role(admin_user, rando, organization, post, managed_r
|
||||
inv_id = r.data['id']
|
||||
inventory = Inventory.objects.get(id=inv_id)
|
||||
assert rando.has_obj_perm(inventory, 'change')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_jt_creation_permissions(setup_managed_roles, inventory, project, rando):
|
||||
"""This tests that if you assign someone required permissions in the new API
|
||||
using the managed roles, then that works to give permissions to create a job template"""
|
||||
inv_rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||
proj_rd = RoleDefinition.objects.get(name='Project Admin')
|
||||
# establish prior state
|
||||
access = JobTemplateAccess(rando)
|
||||
assert not access.can_add({'inventory': inventory.pk, 'project': project.pk, 'name': 'foo-jt'})
|
||||
|
||||
inv_rd.give_permission(rando, inventory)
|
||||
proj_rd.give_permission(rando, project)
|
||||
|
||||
assert access.can_add({'inventory': inventory.pk, 'project': project.pk, 'name': 'foo-jt'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_creation_permissions(setup_managed_roles, organization, workflow_job_template, rando):
|
||||
"""Similar to JT, assigning new roles gives creator permissions"""
|
||||
org_wf_rd = RoleDefinition.objects.get(name='Organization WorkflowJobTemplate Admin')
|
||||
assert workflow_job_template.organization == organization # sanity
|
||||
# establish prior state
|
||||
access = WorkflowJobTemplateAccess(rando)
|
||||
assert not access.can_add({'name': 'foo-flow', 'organization': organization.pk})
|
||||
org_wf_rd.give_permission(rando, organization)
|
||||
|
||||
assert access.can_add({'name': 'foo-flow', 'organization': organization.pk})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_credential_to_user_of_another_org(setup_managed_roles, credential, admin_user, rando, org_admin, organization, post):
|
||||
'''Test that a credential can only be assigned to a user in the same organization'''
|
||||
# cannot assign credential to rando, as rando is not in the same org as the credential
|
||||
rd = RoleDefinition.objects.get(name="Credential Admin")
|
||||
credential.organization = organization
|
||||
credential.save(update_fields=['organization'])
|
||||
assert credential.organization not in Organization.access_qs(rando, 'member')
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
resp = post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=400)
|
||||
assert "You cannot grant credential access to a User not in the credentials' organization" in str(resp.data)
|
||||
|
||||
# can assign credential to superuser
|
||||
rando.is_superuser = True
|
||||
rando.save()
|
||||
post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||
|
||||
# can assign credential to org_admin
|
||||
assert credential.organization in Organization.access_qs(org_admin, 'member')
|
||||
post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOW_LOCAL_RESOURCE_MANAGEMENT=False)
|
||||
def test_team_member_role_not_assignable(team, rando, post, admin_user, setup_managed_roles):
|
||||
member_rd = RoleDefinition.objects.get(name='Organization Member')
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
r = post(url, data={'object_id': team.id, 'role_definition': member_rd.id, 'user': rando.id}, user=admin_user, expect=400)
|
||||
assert 'Not managed locally' in str(r.data)
|
||||
|
||||
120
awx/main/tests/functional/dab_rbac/test_external_auditor.py
Normal file
120
awx/main/tests/functional/dab_rbac/test_external_auditor.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import pytest
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from ansible_base.rbac.managed import SystemAuditor
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
from awx.main.access import check_user_access, get_user_queryset
|
||||
from awx.main.models import User, AdHocCommandEvent
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ext_auditor_rd():
|
||||
info = SystemAuditor(overrides={'name': 'Alien Auditor', 'shortname': 'ext_auditor'})
|
||||
rd, _ = info.get_or_create(apps)
|
||||
return rd
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ext_auditor(ext_auditor_rd):
|
||||
u = User.objects.create(username='external-auditor-user')
|
||||
ext_auditor_rd.give_global_permission(u)
|
||||
return u
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def obj_factory(request):
|
||||
def _rf(fixture_name):
|
||||
obj = request.getfixturevalue(fixture_name)
|
||||
|
||||
# special case to make obj organization-scoped
|
||||
if obj._meta.model_name == 'executionenvironment':
|
||||
obj.organization = request.getfixturevalue('organization')
|
||||
obj.save(update_fields=['organization'])
|
||||
|
||||
return obj
|
||||
|
||||
return _rf
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_access_qs_external_auditor(ext_auditor_rd, rando, job_template):
|
||||
ext_auditor_rd.give_global_permission(rando)
|
||||
jt_cls = apps.get_model('main', 'JobTemplate')
|
||||
ujt_cls = apps.get_model('main', 'UnifiedJobTemplate')
|
||||
assert job_template in jt_cls.access_qs(rando)
|
||||
assert job_template.id in jt_cls.access_ids_qs(rando)
|
||||
assert job_template.id in ujt_cls.accessible_pk_qs(rando, 'read_role')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('model', sorted(permission_registry.all_registered_models, key=lambda cls: cls._meta.model_name))
|
||||
class TestExternalAuditorRoleAllModels:
|
||||
def test_access_can_read_method(self, obj_factory, model, ext_auditor, rando):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = obj_factory(fixture_name)
|
||||
|
||||
assert check_user_access(rando, model, 'read', obj) is False
|
||||
assert check_user_access(ext_auditor, model, 'read', obj) is True
|
||||
|
||||
def test_access_get_queryset(self, obj_factory, model, ext_auditor, rando):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = obj_factory(fixture_name)
|
||||
|
||||
assert obj not in get_user_queryset(rando, model)
|
||||
assert obj in get_user_queryset(ext_auditor, model)
|
||||
|
||||
def test_global_list(self, obj_factory, model, ext_auditor, rando, get):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj_factory(fixture_name)
|
||||
|
||||
url = reverse(f'api:{fixture_name}_list')
|
||||
r = get(url, user=rando, expect=200)
|
||||
initial_ct = r.data['count']
|
||||
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == initial_ct + 1
|
||||
|
||||
if fixture_name in ('job_template', 'workflow_job_template'):
|
||||
url = reverse('api:unified_job_template_list')
|
||||
r = get(url, user=rando, expect=200)
|
||||
initial_ct = r.data['count']
|
||||
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == initial_ct + 1
|
||||
|
||||
def test_detail_view(self, obj_factory, model, ext_auditor, rando, get):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = obj_factory(fixture_name)
|
||||
|
||||
url = reverse(f'api:{fixture_name}_detail', kwargs={'pk': obj.pk})
|
||||
get(url, user=rando, expect=403) # NOTE: should be 401
|
||||
get(url, user=ext_auditor, expect=200)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestExternalAuditorNonRoleModels:
|
||||
def test_ad_hoc_command_view(self, ad_hoc_command_factory, rando, ext_auditor, get):
|
||||
"""The AdHocCommandAccess class references is_system_auditor
|
||||
|
||||
this is to prove it works with other system-level view roles"""
|
||||
ad_hoc_command = ad_hoc_command_factory()
|
||||
url = reverse('api:ad_hoc_command_list')
|
||||
r = get(url, user=rando, expect=200)
|
||||
assert r.data['count'] == 0
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == 1
|
||||
assert r.data['results'][0]['id'] == ad_hoc_command.id
|
||||
|
||||
event = AdHocCommandEvent.objects.create(ad_hoc_command=ad_hoc_command)
|
||||
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': ad_hoc_command.id})
|
||||
r = get(url, user=rando, expect=403)
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == 1
|
||||
|
||||
url = reverse('api:ad_hoc_command_event_detail', kwargs={'pk': event.id})
|
||||
r = get(url, user=rando, expect=403)
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['id'] == event.id
|
||||
31
awx/main/tests/functional/dab_rbac/test_managed_roles.py
Normal file
31
awx/main/tests/functional/dab_rbac/test_managed_roles.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import pytest
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition, DABPermission
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_roles_to_not_create(setup_managed_roles):
|
||||
assert RoleDefinition.objects.filter(name='Organization Admin').count() == 1
|
||||
|
||||
SHOULD_NOT_EXIST = ('Organization Organization Admin', 'Organization Team Admin', 'Organization InstanceGroup Admin')
|
||||
|
||||
bad_rds = RoleDefinition.objects.filter(name__in=SHOULD_NOT_EXIST)
|
||||
if bad_rds.exists():
|
||||
bad_names = list(bad_rds.values_list('name', flat=True))
|
||||
raise Exception(f'Found RoleDefinitions that should not exist: {bad_names}')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_project_update_role(setup_managed_roles):
|
||||
"""Role to allow updating a project on the object-level should exist"""
|
||||
assert RoleDefinition.objects.filter(name='Project Update').count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_child_add_permission(setup_managed_roles):
|
||||
for model_name in ('Project', 'NotificationTemplate', 'WorkflowJobTemplate', 'Inventory'):
|
||||
rd = RoleDefinition.objects.get(name=f'Organization {model_name} Admin')
|
||||
assert 'add_' in str(rd.permissions.values_list('codename', flat=True)), f'The {rd.name} role definition expected to contain add_ permissions'
|
||||
|
||||
# special case for JobTemplate, anyone can create one with use permission to project/inventory
|
||||
assert not DABPermission.objects.filter(codename='add_jobtemplate').exists()
|
||||
@@ -1,20 +1,37 @@
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from crum import impersonate
|
||||
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions, get_role_codenames, get_role_definition
|
||||
from awx.main.models import User, Organization, WorkflowJobTemplate, WorkflowJobTemplateNode, Team
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from ansible_base.rbac.models import RoleUserAssignment
|
||||
from ansible_base.rbac.models import RoleUserAssignment, RoleDefinition
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'role_name',
|
||||
['execution_environment_admin_role', 'project_admin_role', 'admin_role', 'auditor_role', 'read_role', 'execute_role', 'notification_admin_role'],
|
||||
[
|
||||
'execution_environment_admin_role',
|
||||
'workflow_admin_role',
|
||||
'project_admin_role',
|
||||
'admin_role',
|
||||
'auditor_role',
|
||||
'read_role',
|
||||
'execute_role',
|
||||
'approval_role',
|
||||
'notification_admin_role',
|
||||
],
|
||||
)
|
||||
def test_round_trip_roles(organization, rando, role_name, managed_roles):
|
||||
def test_round_trip_roles(organization, rando, role_name, setup_managed_roles):
|
||||
"""
|
||||
Make an assignment with the old-style role,
|
||||
get the equivelent new role
|
||||
@@ -22,13 +39,75 @@ def test_round_trip_roles(organization, rando, role_name, managed_roles):
|
||||
"""
|
||||
getattr(organization, role_name).members.add(rando)
|
||||
assignment = RoleUserAssignment.objects.get(user=rando)
|
||||
print(assignment.role_definition.name)
|
||||
old_role = get_role_from_object_role(assignment.object_role)
|
||||
assert old_role.id == getattr(organization, role_name).id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_level_permissions(organization, inventory, managed_roles):
|
||||
@pytest.mark.parametrize('model', sorted(permission_registry.all_registered_models, key=lambda cls: cls._meta.model_name))
|
||||
def test_role_migration_matches(request, model, setup_managed_roles):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = request.getfixturevalue(fixture_name)
|
||||
role_ct = 0
|
||||
for field in obj._meta.get_fields():
|
||||
if isinstance(field, ImplicitRoleField):
|
||||
if field.name == 'read_role':
|
||||
continue # intentionally left as "Compat" roles
|
||||
role_ct += 1
|
||||
old_role = getattr(obj, field.name)
|
||||
old_codenames = set(get_role_codenames(old_role))
|
||||
rd = get_role_definition(old_role)
|
||||
new_codenames = set(rd.permissions.values_list('codename', flat=True))
|
||||
# all the old roles should map to a non-Compat role definition
|
||||
if 'Compat' not in rd.name:
|
||||
model_rds = RoleDefinition.objects.filter(content_type=ContentType.objects.get_for_model(obj))
|
||||
rd_data = {}
|
||||
for rd in model_rds:
|
||||
rd_data[rd.name] = list(rd.permissions.values_list('codename', flat=True))
|
||||
assert (
|
||||
'Compat' not in rd.name
|
||||
), f'Permissions for old vs new roles did not match.\nold {field.name}: {old_codenames}\nnew:\n{json.dumps(rd_data, indent=2)}'
|
||||
assert new_codenames == set(old_codenames)
|
||||
|
||||
# In the old system these models did not have object-level roles, all others expect some model roles
|
||||
if model._meta.model_name not in ('notificationtemplate', 'executionenvironment'):
|
||||
assert role_ct > 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_role_naming(setup_managed_roles):
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='dmin')
|
||||
assert qs.count() == 1 # sanity
|
||||
rd = qs.first()
|
||||
assert rd.name == 'JobTemplate Admin'
|
||||
assert rd.description
|
||||
assert rd.created_by is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_action_role_naming(setup_managed_roles):
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='ecute')
|
||||
assert qs.count() == 1 # sanity
|
||||
rd = qs.first()
|
||||
assert rd.name == 'JobTemplate Execute'
|
||||
assert rd.description
|
||||
assert rd.created_by is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_compat_role_naming(setup_managed_roles, job_template, rando, alice):
|
||||
with impersonate(alice):
|
||||
job_template.read_role.members.add(rando)
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='ompat')
|
||||
assert qs.count() == 1 # sanity
|
||||
rd = qs.first()
|
||||
assert rd.name == 'JobTemplate Read Compat'
|
||||
assert rd.description
|
||||
assert rd.created_by is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_level_permissions(organization, inventory, setup_managed_roles):
|
||||
u1 = User.objects.create(username='alice')
|
||||
u2 = User.objects.create(username='bob')
|
||||
|
||||
@@ -58,14 +137,14 @@ def test_organization_level_permissions(organization, inventory, managed_roles):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_execute_role(organization, rando, managed_roles):
|
||||
def test_organization_execute_role(organization, rando, setup_managed_roles):
|
||||
organization.execute_role.members.add(rando)
|
||||
assert rando in organization.execute_role
|
||||
assert set(Organization.accessible_objects(rando, 'execute_role')) == set([organization])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_approval_list(get, post, admin_user, managed_roles):
|
||||
def test_workflow_approval_list(get, post, admin_user, setup_managed_roles):
|
||||
workflow_job_template = WorkflowJobTemplate.objects.create()
|
||||
approval_node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
|
||||
url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
|
||||
@@ -79,14 +158,14 @@ def test_workflow_approval_list(get, post, admin_user, managed_roles):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_creator_permission(rando, admin_user, inventory, managed_roles):
|
||||
def test_creator_permission(rando, admin_user, inventory, setup_managed_roles):
|
||||
give_creator_permissions(rando, inventory)
|
||||
assert rando in inventory.admin_role
|
||||
assert rando in inventory.admin_role.members.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_team_read_role(rando, team, admin_user, post, managed_roles):
|
||||
def test_team_team_read_role(rando, team, admin_user, post, setup_managed_roles):
|
||||
orgs = [Organization.objects.create(name=f'foo-{i}') for i in range(2)]
|
||||
teams = [Team.objects.create(name=f'foo-{i}', organization=orgs[i]) for i in range(2)]
|
||||
teams[1].member_role.members.add(rando)
|
||||
@@ -105,3 +184,11 @@ def test_implicit_parents_no_assignments(organization):
|
||||
with mock.patch('awx.main.models.rbac.give_or_remove_permission') as mck:
|
||||
Team.objects.create(name='random team', organization=organization)
|
||||
mck.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_auditor_rel(organization, rando, setup_managed_roles):
|
||||
assert rando not in organization.auditor_role
|
||||
audit_rd = RoleDefinition.objects.get(name='Organization Audit')
|
||||
audit_rd.give_permission(rando, organization)
|
||||
assert list(rando.auditor_of_organizations) == [organization]
|
||||
|
||||
@@ -21,13 +21,13 @@ class TestComputedFields:
|
||||
def test_computed_fields_normal_use(self, mocker, inventory):
|
||||
job = Job.objects.create(name='fake-job', inventory=inventory)
|
||||
with immediate_on_commit():
|
||||
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
|
||||
mocker.patch.object(update_inventory_computed_fields, 'delay')
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
|
||||
|
||||
def test_disable_computed_fields(self, mocker, inventory):
|
||||
job = Job.objects.create(name='fake-job', inventory=inventory)
|
||||
with disable_computed_fields():
|
||||
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_not_called()
|
||||
mocker.patch.object(update_inventory_computed_fields, 'delay')
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_not_called()
|
||||
|
||||
@@ -4,25 +4,19 @@ import pytest
|
||||
# CRUM
|
||||
from crum import impersonate
|
||||
|
||||
# Django
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJobTemplate, Job, JobTemplate, WorkflowJobTemplate, WorkflowApprovalTemplate, Project, WorkflowJob, Schedule, Credential
|
||||
from awx.main.models import UnifiedJobTemplate, Job, JobTemplate, WorkflowJobTemplate, Project, WorkflowJob, Schedule, Credential
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.constants import JOB_VARIABLE_PREFIXES
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_subclass_types():
|
||||
assert set(UnifiedJobTemplate._submodels_with_roles()) == set(
|
||||
[
|
||||
ContentType.objects.get_for_model(JobTemplate).id,
|
||||
ContentType.objects.get_for_model(Project).id,
|
||||
ContentType.objects.get_for_model(WorkflowJobTemplate).id,
|
||||
ContentType.objects.get_for_model(WorkflowApprovalTemplate).id,
|
||||
]
|
||||
)
|
||||
assert set(UnifiedJobTemplate._submodels_with_roles()) == {
|
||||
JobTemplate,
|
||||
Project,
|
||||
WorkflowJobTemplate,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -21,13 +21,13 @@ def test_multi_group_basic_job_launch(instance_factory, controlplane_instance_gr
|
||||
j2 = create_job(objects2.job_template)
|
||||
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
|
||||
mock_task_impact.return_value = 500
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, mocker, instance_group_factory, job_template_factory):
|
||||
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, instance_group_factory, job_template_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
ig1 = instance_group_factory("ig1", instances=[i1])
|
||||
@@ -50,7 +50,7 @@ def test_multi_group_with_shared_dependency(instance_factory, controlplane_insta
|
||||
objects2 = job_template_factory('jt2', organization=objects1.organization, project=p, inventory='inv2', credential='cred2')
|
||||
objects2.job_template.instance_groups.add(ig2)
|
||||
j2 = create_job(objects2.job_template, dependencies_processed=False)
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
DependencyManager().schedule()
|
||||
TaskManager().schedule()
|
||||
pu = p.project_updates.first()
|
||||
@@ -73,10 +73,10 @@ def test_workflow_job_no_instancegroup(workflow_job_template_factory, controlpla
|
||||
wfj = wfjt.create_unified_job()
|
||||
wfj.status = "pending"
|
||||
wfj.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, None)
|
||||
assert wfj.instance_group is None
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, None)
|
||||
assert wfj.instance_group is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -16,9 +16,9 @@ def test_single_job_scheduler_launch(hybrid_instance, controlplane_instance_grou
|
||||
instance = controlplane_instance_group.instances.all()[0]
|
||||
objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred')
|
||||
j = create_job(objects.job_template)
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -46,6 +46,8 @@ def generate_fake_var(element):
|
||||
|
||||
def credential_kind(source):
|
||||
"""Given the inventory source kind, return expected credential kind"""
|
||||
if source == 'openshift_virtualization':
|
||||
return 'kubernetes_bearer_token'
|
||||
return source.replace('ec2', 'aws')
|
||||
|
||||
|
||||
|
||||
@@ -85,3 +85,17 @@ class TestMigrationSmoke:
|
||||
|
||||
RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||
assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists()
|
||||
|
||||
# Regression testing for bug that comes from current vs past models mismatch
|
||||
RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition')
|
||||
assert not RoleDefinition.objects.filter(name='Organization Organization Admin').exists()
|
||||
# Test special cases in managed role creation
|
||||
assert not RoleDefinition.objects.filter(name='Organization Team Admin').exists()
|
||||
assert not RoleDefinition.objects.filter(name='Organization InstanceGroup Admin').exists()
|
||||
|
||||
# Test that a removed EE model permission has been deleted
|
||||
new_state = migrator.apply_tested_migration(
|
||||
('main', '0195_EE_permissions'),
|
||||
)
|
||||
DABPermission = new_state.apps.get_model('dab_rbac', 'DABPermission')
|
||||
assert not DABPermission.objects.filter(codename='view_executionenvironment').exists()
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.middleware import URLModificationMiddleware
|
||||
from awx.main.models import ( # noqa
|
||||
@@ -121,7 +119,7 @@ def test_notification_template(get, admin_user):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance(get, admin_user):
|
||||
def test_instance(get, admin_user, settings):
|
||||
test_instance = Instance.objects.create(uuid=settings.SYSTEM_UUID, hostname="localhost", capacity=100)
|
||||
url = reverse('api:instance_detail', kwargs={'pk': test_instance.pk})
|
||||
response = get(url, user=admin_user, expect=200)
|
||||
@@ -205,3 +203,65 @@ def test_403_vs_404(get):
|
||||
|
||||
get(f'/api/v2/users/{cindy.pk}/', expect=401)
|
||||
get('/api/v2/users/cindy/', expect=404)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestConvertNamedUrl:
|
||||
@pytest.mark.parametrize(
|
||||
"url",
|
||||
(
|
||||
"/api/",
|
||||
"/api/v2/",
|
||||
"/api/v2/hosts/",
|
||||
"/api/v2/hosts/1/",
|
||||
"/api/v2/organizations/1/inventories/",
|
||||
"/api/foo/",
|
||||
"/api/foo/v2/",
|
||||
"/api/foo/v2/organizations/",
|
||||
"/api/foo/v2/organizations/1/",
|
||||
"/api/foo/v2/organizations/1/inventories/",
|
||||
"/api/foobar/",
|
||||
"/api/foobar/v2/",
|
||||
"/api/foobar/v2/organizations/",
|
||||
"/api/foobar/v2/organizations/1/",
|
||||
"/api/foobar/v2/organizations/1/inventories/",
|
||||
"/api/foobar/v2/organizations/1/inventories/",
|
||||
),
|
||||
)
|
||||
def test_noop(self, url, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = ''
|
||||
assert URLModificationMiddleware._convert_named_url(url) == url
|
||||
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'foo'
|
||||
assert URLModificationMiddleware._convert_named_url(url) == url
|
||||
|
||||
def test_named_org(self):
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url('/api/v2/organizations/test_org/') == f'/api/v2/organizations/{test_org.pk}/'
|
||||
|
||||
def test_named_org_optional_api_urlpattern_prefix_interaction(self, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'bar'
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url('/api/bar/v2/organizations/test_org/') == f'/api/bar/v2/organizations/{test_org.pk}/'
|
||||
|
||||
@pytest.mark.parametrize("prefix", ['', 'bar'])
|
||||
def test_named_org_not_found(self, prefix, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
|
||||
if prefix:
|
||||
prefix += '/'
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/does-not-exist/') == f'/api/{prefix}v2/organizations/0/'
|
||||
|
||||
@pytest.mark.parametrize("prefix", ['', 'bar'])
|
||||
def test_named_sub_resource(self, prefix, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
if prefix:
|
||||
prefix += '/'
|
||||
|
||||
assert (
|
||||
URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/test_org/inventories/')
|
||||
== f'/api/{prefix}v2/organizations/{test_org.pk}/inventories/'
|
||||
)
|
||||
|
||||
@@ -187,7 +187,7 @@ def test_remove_role_from_user(role, post, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True)
|
||||
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True, ANSIBLE_BASE_ALLOW_TEAM_ORG_MEMBER=True)
|
||||
def test_get_teams_roles_list(get, team, organization, admin):
|
||||
team.member_role.children.add(organization.admin_role)
|
||||
url = reverse('api:team_roles_list', kwargs={'pk': team.id})
|
||||
|
||||
148
awx/main/tests/functional/test_rbac_execution_environment.py
Normal file
148
awx/main/tests/functional/test_rbac_execution_environment.py
Normal file
@@ -0,0 +1,148 @@
|
||||
import pytest
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from awx.main.access import ExecutionEnvironmentAccess
|
||||
from awx.main.models import ExecutionEnvironment, Organization, Team
|
||||
from awx.main.models.rbac import get_role_codenames
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from django.urls import reverse as django_reverse
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ee_rd():
|
||||
return RoleDefinition.objects.create_from_permissions(
|
||||
name='EE object admin',
|
||||
permissions=['change_executionenvironment', 'delete_executionenvironment'],
|
||||
content_type=ContentType.objects.get_for_model(ExecutionEnvironment),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_ee_rd():
|
||||
return RoleDefinition.objects.create_from_permissions(
|
||||
name='EE org admin',
|
||||
permissions=['add_executionenvironment', 'change_executionenvironment', 'delete_executionenvironment', 'view_organization'],
|
||||
content_type=ContentType.objects.get_for_model(Organization),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_old_ee_role_maps_to_correct_permissions(organization):
|
||||
assert set(get_role_codenames(organization.execution_environment_admin_role)) == {
|
||||
'view_organization',
|
||||
'add_executionenvironment',
|
||||
'change_executionenvironment',
|
||||
'delete_executionenvironment',
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_ee(organization):
|
||||
return ExecutionEnvironment.objects.create(name='some user ee', organization=organization)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_capabilities(get, setup_managed_roles):
|
||||
def _rf(user, obj, expected):
|
||||
url = reverse('api:execution_environment_list')
|
||||
r = get(url, user=user, expect=200)
|
||||
for item in r.data['results']:
|
||||
if item['id'] == obj.pk:
|
||||
assert expected == item['summary_fields']['user_capabilities']
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(f'Could not find expected object ({obj}) in EE list result: {r.data}')
|
||||
|
||||
return _rf
|
||||
|
||||
|
||||
# ___ begin tests ___
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_any_user_can_view_global_ee(control_plane_execution_environment, rando):
|
||||
assert ExecutionEnvironmentAccess(rando).can_read(control_plane_execution_environment)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_ee_not_assignable(control_plane_execution_environment, ee_rd, rando, admin_user, post):
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
r = post(url, {'role_definition': ee_rd.pk, 'user': rando.id, 'object_id': control_plane_execution_environment.pk}, user=admin_user, expect=400)
|
||||
assert 'Can not assign object roles to managed Execution Environment' in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_member_required_for_assignment(org_ee, ee_rd, rando, admin_user, post):
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
r = post(url, {'role_definition': ee_rd.pk, 'user': rando.id, 'object_id': org_ee.pk}, user=admin_user, expect=400)
|
||||
assert 'User must have view permission to Execution Environment organization' in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_can_have_permission(org_ee, ee_rd, rando, admin_user, post):
|
||||
org2 = Organization.objects.create(name='a different team')
|
||||
team = Team.objects.create(name='a team', organization=org2)
|
||||
team.member_role.members.add(rando)
|
||||
assert org_ee not in ExecutionEnvironmentAccess(rando).get_queryset() # user can not view the EE
|
||||
|
||||
url = django_reverse('roleteamassignment-list')
|
||||
|
||||
# can give object roles to the team now
|
||||
post(url, {'role_definition': ee_rd.pk, 'team': team.id, 'object_id': org_ee.pk}, user=admin_user, expect=201)
|
||||
assert rando.has_obj_perm(org_ee, 'change')
|
||||
assert org_ee in ExecutionEnvironmentAccess(rando).get_queryset() # user can view the EE now
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_give_object_permission_to_ee(org_ee, ee_rd, org_member, check_user_capabilities):
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
assert access.can_read(org_ee) # by virtue of being an org member
|
||||
assert not access.can_change(org_ee, {'name': 'new'})
|
||||
check_user_capabilities(org_member, org_ee, {'edit': False, 'delete': False, 'copy': False})
|
||||
|
||||
ee_rd.give_permission(org_member, org_ee)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization.id})
|
||||
|
||||
check_user_capabilities(org_member, org_ee, {'edit': True, 'delete': True, 'copy': False})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_need_related_organization_access(org_ee, ee_rd, org_member):
|
||||
org2 = Organization.objects.create(name='another organization')
|
||||
ee_rd.give_permission(org_member, org_ee)
|
||||
org2.member_role.members.add(org_member)
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization.id})
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': org2.id})
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': org2})
|
||||
|
||||
# User can make the change if they have relevant permission to the new organization
|
||||
org_ee.organization.execution_environment_admin_role.members.add(org_member)
|
||||
org2.execution_environment_admin_role.members.add(org_member)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org2.id})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org2})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('style', ['new', 'old'])
|
||||
def test_give_org_permission_to_ee(org_ee, organization, org_member, check_user_capabilities, style, org_ee_rd):
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
assert not access.can_change(org_ee, {'name': 'new'})
|
||||
check_user_capabilities(org_member, org_ee, {'edit': False, 'delete': False, 'copy': False})
|
||||
|
||||
if style == 'new':
|
||||
org_ee_rd.give_permission(org_member, organization)
|
||||
assert org_member.has_obj_perm(org_ee.organization, 'add_executionenvironment') # sanity
|
||||
else:
|
||||
organization.execution_environment_admin_role.members.add(org_member)
|
||||
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': organization.id})
|
||||
check_user_capabilities(org_member, org_ee, {'edit': True, 'delete': True, 'copy': True})
|
||||
|
||||
# Extra check, user can not remove the EE from the organization
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': None})
|
||||
@@ -165,7 +165,7 @@ class TestOrphanJobTemplate:
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_permissions
|
||||
def test_job_template_creator_access(project, organization, rando, post):
|
||||
def test_job_template_creator_access(project, organization, rando, post, setup_managed_roles):
|
||||
project.use_role.members.add(rando)
|
||||
response = post(
|
||||
url=reverse('api:job_template_list'),
|
||||
@@ -182,8 +182,14 @@ def test_job_template_creator_access(project, organization, rando, post):
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_permissions
|
||||
@pytest.mark.parametrize('lacking', ['project', 'inventory'])
|
||||
def test_job_template_insufficient_creator_permissions(lacking, project, inventory, organization, rando, post):
|
||||
@pytest.mark.parametrize(
|
||||
'lacking,reason',
|
||||
[
|
||||
('project', 'You do not have use permission on Project'),
|
||||
('inventory', 'You do not have use permission on Inventory'),
|
||||
],
|
||||
)
|
||||
def test_job_template_insufficient_creator_permissions(lacking, reason, project, inventory, organization, rando, post):
|
||||
if lacking != 'project':
|
||||
project.use_role.members.add(rando)
|
||||
else:
|
||||
@@ -192,12 +198,13 @@ def test_job_template_insufficient_creator_permissions(lacking, project, invento
|
||||
inventory.use_role.members.add(rando)
|
||||
else:
|
||||
inventory.read_role.members.add(rando)
|
||||
post(
|
||||
response = post(
|
||||
url=reverse('api:job_template_list'),
|
||||
data=dict(name='newly-created-jt', inventory=inventory.id, project=project.pk, playbook='helloworld.yml'),
|
||||
user=rando,
|
||||
expect=403,
|
||||
)
|
||||
assert reason in response.data[lacking]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -99,7 +99,9 @@ def test_notification_template_access_org_user(notification_template, user):
|
||||
@pytest.mark.django_db
|
||||
def test_notificaiton_template_orphan_access_org_admin(notification_template, organization, org_admin):
|
||||
notification_template.organization = None
|
||||
notification_template.save(update_fields=['organization'])
|
||||
access = NotificationTemplateAccess(org_admin)
|
||||
assert not org_admin.has_obj_perm(notification_template, 'change')
|
||||
assert not access.can_change(notification_template, {'organization': organization.id})
|
||||
|
||||
|
||||
|
||||
@@ -48,3 +48,17 @@ def test_org_resource_role(ext_auth, organization, rando, org_admin):
|
||||
assert access.can_attach(organization, rando, 'member_role.members') == ext_auth
|
||||
organization.member_role.members.add(rando)
|
||||
assert access.can_unattach(organization, rando, 'member_role.members') == ext_auth
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_org_while_workflow_active(workflow_job_template):
|
||||
'''
|
||||
Delete org while workflow job is active (i.e. changing status)
|
||||
'''
|
||||
assert workflow_job_template.organization # sanity check
|
||||
wj = workflow_job_template.create_unified_job() # status should be new
|
||||
workflow_job_template.organization.delete()
|
||||
wj.refresh_from_db()
|
||||
assert wj.status != 'pending' # sanity check
|
||||
wj.status = 'pending' # status needs to change in order to trigger workflow_job_template.save()
|
||||
wj.save(update_fields=['status'])
|
||||
|
||||
@@ -35,6 +35,13 @@ class TestWorkflowJobTemplateAccess:
|
||||
assert org_member in wfjt.execute_role
|
||||
assert org_member in wfjt.read_role
|
||||
|
||||
def test_non_super_admin_no_add_without_org(self, wfjt, organization, rando):
|
||||
organization.member_role.members.add(rando)
|
||||
wfjt.admin_role.members.add(rando)
|
||||
access = WorkflowJobTemplateAccess(rando, save_messages=True)
|
||||
assert not access.can_add({'name': 'without org'})
|
||||
assert 'An organization is required to create a workflow job template for normal user' in access.messages['organization']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplateNodeAccess:
|
||||
|
||||
@@ -76,15 +76,15 @@ class TestJobTemplateSerializerGetRelated:
|
||||
class TestJobTemplateSerializerGetSummaryFields:
|
||||
def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template):
|
||||
job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'}
|
||||
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
|
||||
mock_rj.return_value = []
|
||||
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
|
||||
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
|
||||
mock_rj.return_value = []
|
||||
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
|
||||
|
||||
def test_survey_spec_absent(self, get_summary_fields_mock_and_run, mocker, job_template):
|
||||
job_template.survey_spec = None
|
||||
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
|
||||
mock_rj.return_value = []
|
||||
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
|
||||
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
|
||||
mock_rj.return_value = []
|
||||
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
|
||||
assert 'survey' not in summary
|
||||
|
||||
def test_copy_edit_standard(self, mocker, job_template_factory):
|
||||
@@ -107,10 +107,10 @@ class TestJobTemplateSerializerGetSummaryFields:
|
||||
view.kwargs = {}
|
||||
serializer.context['view'] = view
|
||||
|
||||
with mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie'):
|
||||
with mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar'):
|
||||
with mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo'):
|
||||
response = serializer.get_summary_fields(jt_obj)
|
||||
mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie')
|
||||
mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar')
|
||||
mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo')
|
||||
response = serializer.get_summary_fields(jt_obj)
|
||||
|
||||
assert response['user_capabilities']['copy'] == 'foo'
|
||||
assert response['user_capabilities']['edit'] == 'foobar'
|
||||
|
||||
@@ -189,8 +189,8 @@ class TestWorkflowJobTemplateNodeSerializerSurveyPasswords:
|
||||
serializer = WorkflowJobTemplateNodeSerializer()
|
||||
wfjt = WorkflowJobTemplate.objects.create(name='fake-wfjt')
|
||||
serializer.instance = WorkflowJobTemplateNode(workflow_job_template=wfjt, unified_job_template=jt, extra_data={'var1': '$encrypted$foooooo'})
|
||||
with mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo'):
|
||||
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
|
||||
mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo')
|
||||
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
|
||||
assert 'survey_passwords' in attrs
|
||||
assert 'var1' in attrs['survey_passwords']
|
||||
assert attrs['extra_data']['var1'] == '$encrypted$foooooo'
|
||||
|
||||
@@ -191,16 +191,16 @@ class TestResourceAccessList:
|
||||
|
||||
def test_parent_access_check_failed(self, mocker, mock_organization):
|
||||
mock_access = mocker.MagicMock(__name__='for logger', return_value=False)
|
||||
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
|
||||
with pytest.raises(PermissionDenied):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
|
||||
with pytest.raises(PermissionDenied):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
|
||||
def test_parent_access_check_worked(self, mocker, mock_organization):
|
||||
mock_access = mocker.MagicMock(__name__='for logger', return_value=True)
|
||||
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
|
||||
|
||||
def test_related_search_reverse_FK_field():
|
||||
|
||||
@@ -66,7 +66,7 @@ class TestJobTemplateLabelList:
|
||||
mock_request = mock.MagicMock()
|
||||
|
||||
super(JobTemplateLabelList, view).unattach(mock_request, None, None)
|
||||
assert mixin_unattach.called_with(mock_request, None, None)
|
||||
mixin_unattach.assert_called_with(mock_request, None, None)
|
||||
|
||||
|
||||
class TestInventoryInventorySourcesUpdate:
|
||||
@@ -108,15 +108,16 @@ class TestInventoryInventorySourcesUpdate:
|
||||
mock_request = mocker.MagicMock()
|
||||
mock_request.user.can_access.return_value = can_access
|
||||
|
||||
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj):
|
||||
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None):
|
||||
with mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer') as serializer_class:
|
||||
serializer = serializer_class.return_value
|
||||
serializer.to_representation.return_value = {}
|
||||
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj)
|
||||
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None)
|
||||
serializer_class = mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer')
|
||||
|
||||
view = InventoryInventorySourcesUpdate()
|
||||
response = view.post(mock_request)
|
||||
assert response.data == expected
|
||||
serializer = serializer_class.return_value
|
||||
serializer.to_representation.return_value = {}
|
||||
|
||||
view = InventoryInventorySourcesUpdate()
|
||||
response = view.post(mock_request)
|
||||
assert response.data == expected
|
||||
|
||||
|
||||
class TestSurveySpecValidation:
|
||||
|
||||
@@ -155,35 +155,35 @@ def test_node_getter_and_setters():
|
||||
class TestWorkflowJobCreate:
|
||||
def test_create_no_prompts(self, wfjt_node_no_prompts, workflow_job_unit, mocker):
|
||||
mock_create = mocker.MagicMock()
|
||||
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
|
||||
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_no_prompts.char_prompts,
|
||||
inventory=None,
|
||||
unified_job_template=wfjt_node_no_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
|
||||
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_no_prompts.char_prompts,
|
||||
inventory=None,
|
||||
unified_job_template=wfjt_node_no_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
|
||||
def test_create_with_prompts(self, wfjt_node_with_prompts, workflow_job_unit, credential, mocker):
|
||||
mock_create = mocker.MagicMock()
|
||||
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
|
||||
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_with_prompts.char_prompts,
|
||||
inventory=wfjt_node_with_prompts.inventory,
|
||||
unified_job_template=wfjt_node_with_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
|
||||
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_with_prompts.char_prompts,
|
||||
inventory=wfjt_node_with_prompts.inventory,
|
||||
unified_job_template=wfjt_node_with_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user