mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 12:04:44 -03:30
Compare commits
123 Commits
24.5.0
...
poc-inv-cr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
21d7ccc3ea | ||
|
|
75b9df3f33 | ||
|
|
6d0c47fdd0 | ||
|
|
54b4acbdfc | ||
|
|
a41766090e | ||
|
|
34fa897dda | ||
|
|
32df114e41 | ||
|
|
018f235a64 | ||
|
|
7e77235d5e | ||
|
|
139d8f0ae2 | ||
|
|
7691365aea | ||
|
|
59f61517d4 | ||
|
|
fa670e2d7f | ||
|
|
a87a044d64 | ||
|
|
381ade1148 | ||
|
|
864a30e3d4 | ||
|
|
5f42db67e6 | ||
|
|
ddf4f288d4 | ||
|
|
e75bc8bc1e | ||
|
|
bb533287b8 | ||
|
|
9979fc659e | ||
|
|
9e5babc093 | ||
|
|
c71e2524ed | ||
|
|
48b4c62186 | ||
|
|
853730acb9 | ||
|
|
f1448fced1 | ||
|
|
7697b6a69b | ||
|
|
22a491c32c | ||
|
|
cbd9dce940 | ||
|
|
a4fdcc1cca | ||
|
|
df95439008 | ||
|
|
acd834df8b | ||
|
|
587f0ecf98 | ||
|
|
5a2091f7bf | ||
|
|
fa7423819a | ||
|
|
fde8af9f11 | ||
|
|
209e7e27b1 | ||
|
|
6c7d29a982 | ||
|
|
282ba36839 | ||
|
|
b727d2c3b3 | ||
|
|
7fc3d5c7c7 | ||
|
|
4e055f46c4 | ||
|
|
f595985b7c | ||
|
|
ea232315bf | ||
|
|
ee251812b5 | ||
|
|
00ba1ea569 | ||
|
|
d91af132c1 | ||
|
|
94e5795dfc | ||
|
|
c4688d6298 | ||
|
|
6763badea3 | ||
|
|
2c4ad6ef0f | ||
|
|
37f44d7214 | ||
|
|
98bbc836a6 | ||
|
|
b59aff50dc | ||
|
|
a70b0c1ddc | ||
|
|
db72c9d5b8 | ||
|
|
4e0d19914f | ||
|
|
6f2307f50e | ||
|
|
dbc2215bb6 | ||
|
|
7c08b29827 | ||
|
|
407194d320 | ||
|
|
853af295d9 | ||
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 |
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -24,7 +24,7 @@ runs:
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
run: docker pull ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
|
||||
- name: Build image for current source checkout
|
||||
shell: bash
|
||||
|
||||
10
.github/actions/run_awx_devel/action.yml
vendored
10
.github/actions/run_awx_devel/action.yml
vendored
@@ -57,16 +57,6 @@ runs:
|
||||
awx-manage update_password --username=admin --password=password
|
||||
EOSH
|
||||
|
||||
- name: Build UI
|
||||
# This must be a string comparison in composite actions:
|
||||
# https://github.com/actions/runner/issues/2238
|
||||
if: ${{ inputs.build-ui == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
docker exec -i tools_awx_1 sh <<-EOSH
|
||||
make ui-devel
|
||||
EOSH
|
||||
|
||||
- name: Get instance data
|
||||
id: data
|
||||
shell: bash
|
||||
|
||||
2
.github/triage_replies.md
vendored
2
.github/triage_replies.md
vendored
@@ -1,7 +1,7 @@
|
||||
## General
|
||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
||||
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
|
||||
|
||||
|
||||
|
||||
|
||||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -38,7 +38,9 @@ jobs:
|
||||
- name: ui-test-general
|
||||
command: make ui-test-general
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Build awx_devel image for running checks
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
@@ -52,7 +54,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -70,13 +74,15 @@ jobs:
|
||||
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false\
|
||||
repository: ansible/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
@@ -130,7 +136,9 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||
- name: Upgrade ansible-core
|
||||
@@ -154,7 +162,9 @@ jobs:
|
||||
- name: r-z0-9
|
||||
regex: ^[r-z0-9]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -200,7 +210,9 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
57
.github/workflows/dab-release.yml
vendored
Normal file
57
.github/workflows/dab-release.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
name: django-ansible-base requirements update
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 6 * * *' # once an day @ 6 AM
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
jobs:
|
||||
dab-pin-newest:
|
||||
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: dab-release
|
||||
name: Get current django-ansible-base release version
|
||||
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
||||
with:
|
||||
owner: ansible
|
||||
repo: django-ansible-base
|
||||
excludes: prerelease, draft
|
||||
|
||||
- name: Check out respository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- id: dab-pinned
|
||||
name: Get current django-ansible-base pinned version
|
||||
run:
|
||||
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Update django-ansible-base pinned version to upstream release
|
||||
run:
|
||||
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||
with:
|
||||
base: devel
|
||||
branch: bump-django-ansible-base
|
||||
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
||||
body: |
|
||||
##### SUMMARY
|
||||
Automated .github/workflows/dab-release.yml
|
||||
|
||||
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
||||
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
||||
|
||||
##### ISSUE TYPE
|
||||
- Bug, Docs Fix or other nominal change
|
||||
|
||||
##### COMPONENT NAME
|
||||
- API
|
||||
|
||||
commit-message: |
|
||||
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
||||
add-paths:
|
||||
requirements/requirements_git.txt
|
||||
13
.github/workflows/devel_images.yml
vendored
13
.github/workflows/devel_images.yml
vendored
@@ -2,6 +2,7 @@
|
||||
name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
@@ -34,7 +35,9 @@ jobs:
|
||||
exit 0
|
||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
@@ -59,16 +62,14 @@ jobs:
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Setup node and npm
|
||||
- name: Setup node and npm for the new UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
node-version: '18'
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
|
||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -8,7 +8,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: install tox
|
||||
run: pip install tox
|
||||
|
||||
5
.github/workflows/label_issue.yml
vendored
5
.github/workflows/label_issue.yml
vendored
@@ -30,7 +30,10 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
name: Label Issue - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
5
.github/workflows/label_pr.yml
vendored
5
.github/workflows/label_pr.yml
vendored
@@ -29,7 +29,10 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
name: Label PR - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
4
.github/workflows/promote.yml
vendored
4
.github/workflows/promote.yml
vendored
@@ -32,7 +32,9 @@ jobs:
|
||||
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
26
.github/workflows/stage.yml
vendored
26
.github/workflows/stage.yml
vendored
@@ -45,19 +45,22 @@ jobs:
|
||||
exit 0
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Checkout awx-logos
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
@@ -86,17 +89,14 @@ jobs:
|
||||
run: |
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Setup node and npm
|
||||
- name: Setup node and npm for new UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
node-version: '18'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
run: make ui-next
|
||||
|
||||
- name: Set build env variables
|
||||
run: |
|
||||
@@ -136,9 +136,9 @@ jobs:
|
||||
- name: Pulling images for test deployment with awx-operator
|
||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||
run: |
|
||||
docker pull ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull ${AWX_EE_TEST_IMAGE}
|
||||
docker pull ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull -q ${AWX_EE_TEST_IMAGE}
|
||||
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
|
||||
4
.github/workflows/update_dependabot_prs.yml
vendored
4
.github/workflows/update_dependabot_prs.yml
vendored
@@ -13,7 +13,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout branch
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Update PR Body
|
||||
env:
|
||||
|
||||
6
.github/workflows/upload_schema.yml
vendored
6
.github/workflows/upload_schema.yml
vendored
@@ -18,7 +18,9 @@ jobs:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
@@ -34,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
|
||||
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
||||
|
||||
#### Frontend Development
|
||||
|
||||
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
||||
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
|
||||
|
||||
#### Fork and clone the AWX repo
|
||||
|
||||
@@ -121,7 +121,7 @@ If it has someone assigned to it then that person is the person responsible for
|
||||
|
||||
**NOTES**
|
||||
|
||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||
|
||||
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
@@ -132,7 +132,7 @@ If it has someone assigned to it then that person is the person responsible for
|
||||
|
||||
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||
|
||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||
|
||||
|
||||
## Submitting Pull Requests
|
||||
@@ -161,7 +161,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
||||
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
|
||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||
|
||||
## Getting Help
|
||||
|
||||
63
Makefile
63
Makefile
@@ -53,6 +53,8 @@ OTEL ?= false
|
||||
LOKI ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
# If set to true, use tls for postgres connection
|
||||
PG_TLS ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -61,6 +63,11 @@ DEV_DOCKER_OWNER ?= ansible
|
||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
||||
|
||||
# Common command to use for running ansible-playbook
|
||||
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||
|
||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
|
||||
@@ -69,7 +76,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -84,6 +91,18 @@ I18N_FLAG_FILE = .i18n_built
|
||||
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||
|
||||
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
|
||||
# DOCKER_CACHE=--no-cache make docker-compose-build
|
||||
ifeq ($(DOCKER_CACHE),)
|
||||
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
|
||||
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
|
||||
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
|
||||
else
|
||||
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
|
||||
endif
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
@@ -366,7 +385,7 @@ symlink_collection:
|
||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||
|
||||
awx_collection_build: $(shell find awx_collection -type f)
|
||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||
-e collection_version=$(COLLECTION_VERSION) \
|
||||
@@ -483,13 +502,7 @@ ui-test-general:
|
||||
$(NPM_BIN) run --prefix awx/ui pretest
|
||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||
|
||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
||||
HEADLESS ?= no
|
||||
ifeq ($(HEADLESS), yes)
|
||||
dist/$(SDIST_TAR_FILE):
|
||||
else
|
||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
||||
endif
|
||||
$(PYTHON) -m build -s
|
||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||
|
||||
@@ -520,10 +533,10 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
@@ -542,11 +555,12 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_otel=$(OTEL) \
|
||||
-e enable_loki=$(LOKI) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
-e pg_tls=$(PG_TLS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP); \
|
||||
@@ -589,7 +603,7 @@ docker-compose-container-group-clean:
|
||||
.PHONY: Dockerfile.dev
|
||||
## Generate Dockerfile.dev for awx_devel image
|
||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.dev \
|
||||
-e build_dev=True \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||
@@ -600,8 +614,7 @@ docker-compose-build: Dockerfile.dev
|
||||
-f Dockerfile.dev \
|
||||
-t $(DEVEL_IMAGE_NAME) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
$(DOCKER_DEVEL_CACHE_FLAG) .
|
||||
|
||||
.PHONY: docker-compose-buildx
|
||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||
@@ -611,7 +624,7 @@ docker-compose-buildx: Dockerfile.dev
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) \
|
||||
$(DOCKER_DEVEL_CACHE_FLAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEVEL_IMAGE_NAME) \
|
||||
-f Dockerfile.dev .
|
||||
@@ -664,7 +677,7 @@ version-for-buildyml:
|
||||
.PHONY: Dockerfile
|
||||
## Generate Dockerfile for awx image
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
-e headless=$(HEADLESS)
|
||||
|
||||
@@ -674,7 +687,8 @@ awx-kube-build: Dockerfile
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||
-t $(IMAGE_KUBE) .
|
||||
|
||||
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||
awx-kube-buildx: Dockerfile
|
||||
@@ -686,7 +700,8 @@ awx-kube-buildx: Dockerfile
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) \
|
||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||
--tag $(IMAGE_KUBE) \
|
||||
-f Dockerfile .
|
||||
- docker buildx rm awx-kube-buildx
|
||||
|
||||
@@ -694,7 +709,7 @@ awx-kube-buildx: Dockerfile
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.kube-dev \
|
||||
-e kube_dev=True \
|
||||
-e template_dest=_build_kube_dev \
|
||||
@@ -704,8 +719,8 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
awx-kube-dev-build: Dockerfile.kube-dev
|
||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
-t $(IMAGE_KUBE_DEV) .
|
||||
|
||||
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
@@ -714,14 +729,14 @@ awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
- docker buildx build \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
--platform=$(PLATFORMS) \
|
||||
--tag $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
--tag $(IMAGE_KUBE_DEV) \
|
||||
-f Dockerfile.kube-dev .
|
||||
- docker buildx rm awx-kube-dev-buildx
|
||||
|
||||
kind-dev-load: awx-kube-dev-build
|
||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
|
||||
|
||||
# Translation TASKS
|
||||
# --------------------------------------
|
||||
|
||||
@@ -33,8 +33,10 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# django-ansible-base
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
from ansible_base.lib.utils.requests import get_remote_host
|
||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
@@ -42,6 +44,7 @@ from awx.main.models.rbac import give_creator_permissions
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
@@ -93,8 +96,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||
ret.set_cookie(
|
||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||
)
|
||||
@@ -103,7 +107,7 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
@@ -151,22 +155,23 @@ class APIView(views.APIView):
|
||||
Store the Django REST Framework Request object as an attribute on the
|
||||
normal Django request, store time the request started.
|
||||
"""
|
||||
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
self.time_started = time.time()
|
||||
if getattr(settings, 'SQL_DEBUG', False):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||
else:
|
||||
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the allowed proxy list
|
||||
if all(
|
||||
[
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
]
|
||||
):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
request.environ.pop(custom_header, None)
|
||||
if settings.PROXY_IP_ALLOWED_LIST:
|
||||
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
@@ -211,17 +216,21 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
msg_data = {
|
||||
'status_code': response.status_code,
|
||||
'user_name': request.user,
|
||||
'url_path': request.path,
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||
'remote_addr': ip,
|
||||
}
|
||||
|
||||
if type(response.data) is dict:
|
||||
msg_data['error'] = response.data.get('error', response.status_text)
|
||||
elif type(response.data) is list:
|
||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||
if len(response.data) > 0 and isinstance(response.data[0], str):
|
||||
msg_data['error'] = str(response.data[0])
|
||||
else:
|
||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||
else:
|
||||
msg_data['error'] = response.status_text
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
default = field.get_default()
|
||||
if type(default) is UUID:
|
||||
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
|
||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost':
|
||||
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
||||
field_info['default'] = default
|
||||
except serializers.SkipField:
|
||||
|
||||
@@ -2,6 +2,12 @@
|
||||
- hosts: all
|
||||
become: yes
|
||||
tasks:
|
||||
- name: Create the receptor group
|
||||
group:
|
||||
{% verbatim %}
|
||||
name: "{{ receptor_group }}"
|
||||
{% endverbatim %}
|
||||
state: present
|
||||
- name: Create the receptor user
|
||||
user:
|
||||
{% verbatim %}
|
||||
|
||||
@@ -61,7 +61,9 @@ import pytz
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
@@ -128,6 +130,7 @@ from awx.api.views.mixin import (
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
|
||||
@@ -710,16 +713,81 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
def immutablesharedfields(cls):
|
||||
'''
|
||||
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||
|
||||
Works by overriding these view methods:
|
||||
- create
|
||||
- delete
|
||||
- perform_update
|
||||
create and delete are overridden to raise a PermissionDenied exception.
|
||||
perform_update is overridden to check if any shared fields are being modified,
|
||||
and raise a PermissionDenied exception if so.
|
||||
'''
|
||||
# create instead of perform_create because some of our views
|
||||
# override create instead of perform_create
|
||||
if hasattr(cls, 'create'):
|
||||
cls.original_create = cls.create
|
||||
|
||||
@functools.wraps(cls.create)
|
||||
def create_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_create(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||
|
||||
cls.create = create_wrapper
|
||||
|
||||
if hasattr(cls, 'delete'):
|
||||
cls.original_delete = cls.delete
|
||||
|
||||
@functools.wraps(cls.delete)
|
||||
def delete_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_delete(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||
|
||||
cls.delete = delete_wrapper
|
||||
|
||||
if hasattr(cls, 'perform_update'):
|
||||
cls.original_perform_update = cls.perform_update
|
||||
|
||||
@functools.wraps(cls.perform_update)
|
||||
def update_wrapper(*args, **kwargs):
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
view, serializer = args
|
||||
instance = view.get_object()
|
||||
if instance:
|
||||
if isinstance(instance, models.Organization):
|
||||
shared_fields = OrganizationType._declared_fields.keys()
|
||||
elif isinstance(instance, models.User):
|
||||
shared_fields = UserType._declared_fields.keys()
|
||||
elif isinstance(instance, models.Team):
|
||||
shared_fields = TeamType._declared_fields.keys()
|
||||
attrs = serializer.validated_data
|
||||
for field in shared_fields:
|
||||
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||
return cls.original_perform_update(*args, **kwargs)
|
||||
|
||||
cls.perform_update = update_wrapper
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamList(ListCreateAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamUsersList(BaseUsersList):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1101,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
||||
copy_return_serializer_class = serializers.ProjectSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1271,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(models.Role, pk=sub_id)
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
@@ -1343,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -2313,6 +2392,14 @@ class JobTemplateList(ListCreateAPIView):
|
||||
serializer_class = serializers.JobTemplateSerializer
|
||||
always_allow_superuser = False
|
||||
|
||||
def check_permissions(self, request):
|
||||
if request.method == 'POST':
|
||||
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||
if not can_access:
|
||||
self.permission_denied(request, message=messages)
|
||||
|
||||
super(JobTemplateList, self).check_permissions(request)
|
||||
|
||||
|
||||
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = models.JobTemplate
|
||||
@@ -2692,12 +2779,7 @@ class JobTemplateCallback(GenericAPIView):
|
||||
host for the current request.
|
||||
"""
|
||||
# Find the list of remote host names/IPs to check.
|
||||
remote_hosts = set()
|
||||
for header in settings.REMOTE_HOST_HEADERS:
|
||||
for value in self.request.META.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
remote_hosts = set(get_remote_hosts(self.request))
|
||||
# Add the reverse lookup of IP addresses.
|
||||
for rh in list(remote_hosts):
|
||||
try:
|
||||
@@ -3037,6 +3119,14 @@ class WorkflowJobTemplateList(ListCreateAPIView):
|
||||
serializer_class = serializers.WorkflowJobTemplateSerializer
|
||||
always_allow_superuser = False
|
||||
|
||||
def check_permissions(self, request):
|
||||
if request.method == 'POST':
|
||||
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||
if not can_access:
|
||||
self.permission_denied(request, message=messages)
|
||||
|
||||
super(WorkflowJobTemplateList, self).check_permissions(request)
|
||||
|
||||
|
||||
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = models.WorkflowJobTemplate
|
||||
@@ -4295,7 +4385,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
|
||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
|
||||
@@ -598,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
- a superuser
|
||||
- admin role on the Instance group
|
||||
I can add/delete Instance Groups:
|
||||
- a superuser(system administrator)
|
||||
- a superuser(system administrator), because these are not org-scoped
|
||||
I can use Instance Groups when I have:
|
||||
- use_role on the instance group
|
||||
"""
|
||||
@@ -627,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
return self.user.has_obj_perm(obj, 'delete')
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
@@ -1387,12 +1387,11 @@ class TeamAccess(BaseAccess):
|
||||
class ExecutionEnvironmentAccess(BaseAccess):
|
||||
"""
|
||||
I can see an execution environment when:
|
||||
- I'm a superuser
|
||||
- I'm a member of the same organization
|
||||
- it is a global ExecutionEnvironment
|
||||
- I can see its organization
|
||||
- It is a global ExecutionEnvironment
|
||||
I can create/change an execution environment when:
|
||||
- I'm a superuser
|
||||
- I'm an admin for the organization(s)
|
||||
- I have an organization or object role that gives access
|
||||
"""
|
||||
|
||||
model = ExecutionEnvironment
|
||||
@@ -1401,7 +1400,9 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
return ExecutionEnvironment.objects.filter(
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(organization__isnull=True)
|
||||
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
|
||||
).distinct()
|
||||
|
||||
@check_superuser
|
||||
@@ -1416,15 +1417,17 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
raise PermissionDenied
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
if not self.user.has_obj_perm(obj, 'change'):
|
||||
raise PermissionDenied
|
||||
return False
|
||||
else:
|
||||
if self.user not in obj.organization.execution_environment_admin_role:
|
||||
raise PermissionDenied
|
||||
if data and 'organization' in data:
|
||||
new_org = get_object_from_data('organization', Organization, data, obj=obj)
|
||||
if not new_org or self.user not in new_org.execution_environment_admin_role:
|
||||
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
|
||||
return False
|
||||
# Special case that check_related does not catch, org users can not remove the organization from the EE
|
||||
if data and ('organization' in data or 'organization_id' in data):
|
||||
if (not data.get('organization')) and (not data.get('organization_id')):
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
||||
return True
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj.managed:
|
||||
@@ -1596,6 +1599,8 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
inventory = get_value(Inventory, 'inventory')
|
||||
if inventory:
|
||||
if self.user not in inventory.use_role:
|
||||
if self.save_messages:
|
||||
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
|
||||
return False
|
||||
|
||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||
@@ -1604,11 +1609,16 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
project = get_value(Project, 'project')
|
||||
# If the user has admin access to the project (as an org admin), should
|
||||
# be able to proceed without additional checks.
|
||||
if project:
|
||||
return self.user in project.use_role
|
||||
else:
|
||||
if not project:
|
||||
return False
|
||||
|
||||
if self.user not in project.use_role:
|
||||
if self.save_messages:
|
||||
self.messages['project'] = [_('You do not have use permission on Project')]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
def can_copy_related(self, obj):
|
||||
"""
|
||||
@@ -2092,11 +2102,23 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||
|
||||
return bool(
|
||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
||||
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
||||
)
|
||||
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
|
||||
if data.get('organization', None) is None:
|
||||
if self.save_messages:
|
||||
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||
return False
|
||||
|
||||
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
|
||||
if self.save_messages:
|
||||
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||
return False
|
||||
|
||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||
if self.save_messages:
|
||||
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
@@ -2628,7 +2650,7 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
|
||||
class NotificationTemplateAccess(BaseAccess):
|
||||
"""
|
||||
I can see/use a notification_template if I have permission to
|
||||
Run standard logic from DAB RBAC
|
||||
"""
|
||||
|
||||
model = NotificationTemplate
|
||||
@@ -2649,10 +2671,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if obj.organization is None:
|
||||
# only superusers are allowed to edit orphan notification templates
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
||||
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||
|
||||
def can_admin(self, obj, data):
|
||||
return self.can_change(obj, data)
|
||||
@@ -2662,9 +2681,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if obj.organization is None:
|
||||
return False
|
||||
return self.user in obj.organization.notification_admin_role
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
|
||||
@@ -66,10 +66,8 @@ class FixedSlidingWindow:
|
||||
|
||||
|
||||
class RelayWebsocketStatsManager:
|
||||
def __init__(self, event_loop, local_hostname):
|
||||
def __init__(self, local_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
|
||||
self._event_loop = event_loop
|
||||
self._stats = dict()
|
||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||
|
||||
@@ -94,7 +92,10 @@ class RelayWebsocketStatsManager:
|
||||
self.start()
|
||||
|
||||
def start(self):
|
||||
self.async_task = self._event_loop.create_task(self.run_loop())
|
||||
self.async_task = asyncio.get_running_loop().create_task(
|
||||
self.run_loop(),
|
||||
name='RelayWebsocketStatsManager.run_loop',
|
||||
)
|
||||
return self.async_task
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -929,6 +929,16 @@ register(
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
register(
|
||||
'RECEPTOR_KEEP_WORK_ON_ERROR',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Keep receptor work on error'),
|
||||
default=False,
|
||||
help_text=_('Prevent receptor work from being released on when error is detected'),
|
||||
category=('Debug'),
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
@@ -43,6 +43,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
||||
}
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
ACTIVE_STATES = CAN_CANCEL
|
||||
ERROR_STATES = ('error',)
|
||||
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
||||
CENSOR_VALUE = '************'
|
||||
ENV_BLOCKLIST = frozenset(
|
||||
|
||||
@@ -102,7 +102,8 @@ def create_listener_connection():
|
||||
|
||||
# Apply overrides specifically for the listener connection
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
conf[k] = v
|
||||
if k != 'OPTIONS':
|
||||
conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
conf['OPTIONS'][k] = v
|
||||
|
||||
|
||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('on_delete', models.CASCADE)
|
||||
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from crum import impersonate
|
||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||
from awx.main.signals import disable_computed_fields
|
||||
@@ -13,6 +14,12 @@ class Command(BaseCommand):
|
||||
help = 'Creates a preload tower data if there is none.'
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# Wrap the operation in an atomic block, so we do not on accident
|
||||
# create the organization but not create the project, etc.
|
||||
with transaction.atomic():
|
||||
self._handle()
|
||||
|
||||
def _handle(self):
|
||||
changed = False
|
||||
|
||||
# Create a default organization as the first superuser found.
|
||||
@@ -43,10 +50,11 @@ class Command(BaseCommand):
|
||||
|
||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||
c, _ = Credential.objects.get_or_create(
|
||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': getattr(superuser, 'username', 'null')}, created_by=superuser
|
||||
)
|
||||
|
||||
c.admin_role.members.add(superuser)
|
||||
if superuser:
|
||||
c.admin_role.members.add(superuser)
|
||||
|
||||
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
||||
name='Ansible Galaxy',
|
||||
|
||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'admin_role',
|
||||
'execute_role',
|
||||
|
||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='approval_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.approval_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execution_environment_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
||||
model_name='instancegroup',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0193_alter_notification_notification_type_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
26
awx/main/migrations/0195_EE_permissions.py
Normal file
26
awx/main/migrations/0195_EE_permissions.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 4.2.6 on 2024-06-20 15:55
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def delete_execution_environment_read_role(apps, schema_editor):
|
||||
permission_classes = [apps.get_model('auth', 'Permission'), apps.get_model('dab_rbac', 'DABPermission')]
|
||||
for permission_cls in permission_classes:
|
||||
ee_read_perm = permission_cls.objects.filter(codename='view_executionenvironment').first()
|
||||
if ee_read_perm:
|
||||
ee_read_perm.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0194_alter_inventorysource_source_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='executionenvironment',
|
||||
options={'default_permissions': ('add', 'change', 'delete'), 'ordering': ('-created',)},
|
||||
),
|
||||
migrations.RunPython(delete_execution_environment_read_role, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -134,8 +134,7 @@ def get_permissions_for_role(role_field, children_map, apps):
|
||||
|
||||
# more special cases for those same above special org-level roles
|
||||
if role_field.name == 'auditor_role':
|
||||
for codename in ('view_notificationtemplate', 'view_executionenvironment'):
|
||||
perm_list.append(Permission.objects.get(codename=codename))
|
||||
perm_list.append(Permission.objects.get(codename='view_notificationtemplate'))
|
||||
|
||||
return perm_list
|
||||
|
||||
@@ -290,14 +289,15 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
managed_role_definitions = []
|
||||
|
||||
org_perms = set()
|
||||
for cls in permission_registry._registry:
|
||||
for cls in permission_registry.all_registered_models:
|
||||
ct = ContentType.objects.get_for_model(cls)
|
||||
cls_name = cls._meta.model_name
|
||||
object_perms = set(Permission.objects.filter(content_type=ct))
|
||||
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
|
||||
if cls._meta.model_name != 'instancegroup':
|
||||
if cls_name != 'instancegroup':
|
||||
org_perms.update(object_perms)
|
||||
|
||||
if 'object_admin' in to_create and cls != Organization:
|
||||
if 'object_admin' in to_create and cls_name != 'organization':
|
||||
indiv_perms = object_perms.copy()
|
||||
add_perms = [perm for perm in indiv_perms if perm.codename.startswith('add_')]
|
||||
if add_perms:
|
||||
@@ -310,7 +310,7 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
)
|
||||
)
|
||||
|
||||
if 'org_children' in to_create and cls != Organization:
|
||||
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
|
||||
org_child_perms = object_perms.copy()
|
||||
org_child_perms.add(Permission.objects.get(codename='view_organization'))
|
||||
|
||||
@@ -327,17 +327,25 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
if 'special' in to_create:
|
||||
special_perms = []
|
||||
for perm in object_perms:
|
||||
if perm.codename.split('_')[0] not in ('add', 'change', 'update', 'delete', 'view'):
|
||||
# Organization auditor is handled separately
|
||||
if perm.codename.split('_')[0] not in ('add', 'change', 'delete', 'view', 'audit'):
|
||||
special_perms.append(perm)
|
||||
for perm in special_perms:
|
||||
action = perm.codename.split('_')[0]
|
||||
view_perm = Permission.objects.get(content_type=ct, codename__startswith='view_')
|
||||
perm_list = [perm, view_perm]
|
||||
# Handle special-case where adhoc role also listed use permission
|
||||
if action == 'adhoc':
|
||||
for other_perm in object_perms:
|
||||
if other_perm.codename == 'use_inventory':
|
||||
perm_list.append(other_perm)
|
||||
break
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['special'].format(cls=cls, action=action.title()),
|
||||
f'Has {action} permissions to a single {cls._meta.verbose_name}',
|
||||
ct,
|
||||
[perm, view_perm],
|
||||
perm_list,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
@@ -353,6 +361,41 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
)
|
||||
)
|
||||
|
||||
# Special "organization action" roles
|
||||
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
|
||||
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Audit',
|
||||
'Has permission to view all objects inside of a single organization',
|
||||
org_ct,
|
||||
audit_permissions,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_execute_permissions = {'view_jobtemplate', 'execute_jobtemplate', 'view_workflowjobtemplate', 'execute_workflowjobtemplate', 'view_organization'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Execute',
|
||||
'Has permission to execute all runnable objects in the organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_execute_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_approval_permissions = {'view_organization', 'view_workflowjobtemplate', 'approve_workflowjobtemplate'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Approval',
|
||||
'Has permission to approve any workflow steps within a single organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_approval_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
unexpected_role_definitions = RoleDefinition.objects.filter(managed=True).exclude(pk__in=[rd.pk for rd in managed_role_definitions])
|
||||
for role_definition in unexpected_role_definitions:
|
||||
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
|
||||
|
||||
@@ -176,17 +176,17 @@ pre_delete.connect(cleanup_created_modified_by, sender=User)
|
||||
|
||||
@property
|
||||
def user_get_organizations(user):
|
||||
return Organization.objects.filter(member_role__members=user)
|
||||
return Organization.access_qs(user, 'member')
|
||||
|
||||
|
||||
@property
|
||||
def user_get_admin_of_organizations(user):
|
||||
return Organization.objects.filter(admin_role__members=user)
|
||||
return Organization.access_qs(user, 'change')
|
||||
|
||||
|
||||
@property
|
||||
def user_get_auditor_of_organizations(user):
|
||||
return Organization.objects.filter(auditor_role__members=user)
|
||||
return Organization.access_qs(user, 'audit')
|
||||
|
||||
|
||||
@property
|
||||
|
||||
@@ -15,12 +15,16 @@ from jinja2 import sandbox
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _, gettext_noop
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# DRF
|
||||
from rest_framework.serializers import ValidationError as DRFValidationError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -41,8 +45,9 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.models import Team, Organization
|
||||
from awx.main.utils import encrypt_field
|
||||
from . import injectors as builtin_injectors
|
||||
from awx_plugins.credentials import injectors as builtin_injectors
|
||||
|
||||
__all__ = ['Credential', 'CredentialType', 'CredentialInputSource', 'build_safe_env']
|
||||
|
||||
@@ -315,6 +320,16 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
else:
|
||||
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
||||
|
||||
def validate_role_assignment(self, actor, role_definition):
|
||||
if self.organization:
|
||||
if isinstance(actor, User):
|
||||
if actor.is_superuser or Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists():
|
||||
return
|
||||
if isinstance(actor, Team):
|
||||
if actor.organization == self.organization:
|
||||
return
|
||||
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
|
||||
|
||||
|
||||
class CredentialType(CommonModelNameNotUnique):
|
||||
"""
|
||||
@@ -586,666 +601,6 @@ class ManagedCredentialType(SimpleNamespace):
|
||||
return CredentialType(**self.get_creation_params())
|
||||
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='ssh',
|
||||
kind='ssh',
|
||||
name=gettext_noop('Machine'),
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
||||
{'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
|
||||
{
|
||||
'id': 'ssh_public_key_data',
|
||||
'label': gettext_noop('Signed SSH Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'secret': True,
|
||||
},
|
||||
{'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
||||
{
|
||||
'id': 'become_method',
|
||||
'label': gettext_noop('Privilege Escalation Method'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Specify a method for "become" operations. This is equivalent to specifying the --become-method Ansible parameter.'),
|
||||
},
|
||||
{
|
||||
'id': 'become_username',
|
||||
'label': gettext_noop('Privilege Escalation Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
{'id': 'become_password', 'label': gettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='scm',
|
||||
kind='scm',
|
||||
name=gettext_noop('Source Control'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True},
|
||||
{'id': 'ssh_key_data', 'label': gettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
|
||||
{'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='vault',
|
||||
kind='vault',
|
||||
name=gettext_noop('Vault'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'vault_password', 'label': gettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
||||
{
|
||||
'id': 'vault_id',
|
||||
'label': gettext_noop('Vault Identifier'),
|
||||
'type': 'string',
|
||||
'format': 'vault_id',
|
||||
'help_text': gettext_noop(
|
||||
'Specify an (optional) Vault ID. This is '
|
||||
'equivalent to specifying the --vault-id '
|
||||
'Ansible parameter for providing multiple Vault '
|
||||
'passwords. Note: this feature only works in '
|
||||
'Ansible 2.4+.'
|
||||
),
|
||||
},
|
||||
],
|
||||
'required': ['vault_password'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='net',
|
||||
kind='net',
|
||||
name=gettext_noop('Network'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
|
||||
{
|
||||
'id': 'ssh_key_unlock',
|
||||
'label': gettext_noop('Private Key Passphrase'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'authorize',
|
||||
'label': gettext_noop('Authorize'),
|
||||
'type': 'boolean',
|
||||
},
|
||||
{
|
||||
'id': 'authorize_password',
|
||||
'label': gettext_noop('Authorize Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
],
|
||||
'dependencies': {
|
||||
'authorize_password': ['authorize'],
|
||||
},
|
||||
'required': ['username'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='aws',
|
||||
kind='cloud',
|
||||
name=gettext_noop('Amazon Web Services'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': gettext_noop('Access Key'), 'type': 'string'},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Secret Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'security_token',
|
||||
'label': gettext_noop('STS Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop(
|
||||
'Security Token Service (STS) is a web service '
|
||||
'that enables you to request temporary, '
|
||||
'limited-privilege credentials for AWS Identity '
|
||||
'and Access Management (IAM) users.'
|
||||
),
|
||||
},
|
||||
],
|
||||
'required': ['username', 'password'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='openstack',
|
||||
kind='cloud',
|
||||
name=gettext_noop('OpenStack'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password (API Key)'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'host',
|
||||
'label': gettext_noop('Host (Authentication URL)'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The host to authenticate with. For example, https://openstack.business.com/v2.0/'),
|
||||
},
|
||||
{
|
||||
'id': 'project',
|
||||
'label': gettext_noop('Project (Tenant Name)'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'project_domain_name',
|
||||
'label': gettext_noop('Project (Domain Name)'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'domain',
|
||||
'label': gettext_noop('Domain Name'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop(
|
||||
'OpenStack domains define administrative boundaries. '
|
||||
'It is only needed for Keystone v3 authentication '
|
||||
'URLs. Refer to the documentation for '
|
||||
'common scenarios.'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'region',
|
||||
'label': gettext_noop('Region Name'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('For some cloud providers, like OVH, region must be specified'),
|
||||
},
|
||||
{
|
||||
'id': 'verify_ssl',
|
||||
'label': gettext_noop('Verify SSL'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},
|
||||
],
|
||||
'required': ['username', 'password', 'host', 'project'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='vmware',
|
||||
kind='cloud',
|
||||
name=gettext_noop('VMware vCenter'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'host',
|
||||
'label': gettext_noop('VCenter Host'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Enter the hostname or IP address that corresponds to your VMware vCenter.'),
|
||||
},
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
],
|
||||
'required': ['host', 'username', 'password'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='satellite6',
|
||||
kind='cloud',
|
||||
name=gettext_noop('Red Hat Satellite 6'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'host',
|
||||
'label': gettext_noop('Satellite 6 URL'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat Satellite 6 server. For example, https://satellite.example.org'),
|
||||
},
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
],
|
||||
'required': ['host', 'username', 'password'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='gce',
|
||||
kind='cloud',
|
||||
name=gettext_noop('Google Compute Engine'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'username',
|
||||
'label': gettext_noop('Service Account Email Address'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The email address assigned to the Google Compute Engine service account.'),
|
||||
},
|
||||
{
|
||||
'id': 'project',
|
||||
'label': 'Project',
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop(
|
||||
'The Project ID is the GCE assigned identification. '
|
||||
'It is often constructed as three words or two words '
|
||||
'followed by a three-digit number. Examples: project-id-000 '
|
||||
'and another-project-id'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'ssh_key_data',
|
||||
'label': gettext_noop('RSA Private Key'),
|
||||
'type': 'string',
|
||||
'format': 'ssh_private_key',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
'help_text': gettext_noop('Paste the contents of the PEM file associated with the service account email.'),
|
||||
},
|
||||
],
|
||||
'required': ['username', 'ssh_key_data'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='azure_rm',
|
||||
kind='cloud',
|
||||
name=gettext_noop('Microsoft Azure Resource Manager'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'subscription',
|
||||
'label': gettext_noop('Subscription ID'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Subscription ID is an Azure construct, which is mapped to a username.'),
|
||||
},
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{'id': 'client', 'label': gettext_noop('Client ID'), 'type': 'string'},
|
||||
{
|
||||
'id': 'secret',
|
||||
'label': gettext_noop('Client Secret'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{'id': 'tenant', 'label': gettext_noop('Tenant ID'), 'type': 'string'},
|
||||
{
|
||||
'id': 'cloud_environment',
|
||||
'label': gettext_noop('Azure Cloud Environment'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when using Azure GovCloud or Azure stack.'),
|
||||
},
|
||||
],
|
||||
'required': ['subscription'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='github_token',
|
||||
kind='token',
|
||||
name=gettext_noop('GitHub Personal Access Token'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'token',
|
||||
'label': gettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('This token needs to come from your profile settings in GitHub'),
|
||||
}
|
||||
],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='gitlab_token',
|
||||
kind='token',
|
||||
name=gettext_noop('GitLab Personal Access Token'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'token',
|
||||
'label': gettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('This token needs to come from your profile settings in GitLab'),
|
||||
}
|
||||
],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='bitbucket_dc_token',
|
||||
kind='token',
|
||||
name=gettext_noop('Bitbucket Data Center HTTP Access Token'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'token',
|
||||
'label': gettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('This token needs to come from your user settings in Bitbucket'),
|
||||
}
|
||||
],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='insights',
|
||||
kind='insights',
|
||||
name=gettext_noop('Insights'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True},
|
||||
],
|
||||
'required': ['username', 'password'],
|
||||
},
|
||||
injectors={
|
||||
'extra_vars': {
|
||||
"scm_username": "{{username}}",
|
||||
"scm_password": "{{password}}",
|
||||
},
|
||||
'env': {
|
||||
'INSIGHTS_USER': '{{username}}',
|
||||
'INSIGHTS_PASSWORD': '{{password}}',
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='rhv',
|
||||
kind='cloud',
|
||||
name=gettext_noop('Red Hat Virtualization'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'host', 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': gettext_noop('The host to authenticate with.')},
|
||||
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'ca_file',
|
||||
'label': gettext_noop('CA File'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Absolute file path to the CA file to use (optional)'),
|
||||
},
|
||||
],
|
||||
'required': ['host', 'username', 'password'],
|
||||
},
|
||||
injectors={
|
||||
# The duplication here is intentional; the ovirt4 inventory plugin
|
||||
# writes a .ini file for authentication, while the ansible modules for
|
||||
# ovirt4 use a separate authentication process that support
|
||||
# environment variables; by injecting both, we support both
|
||||
'file': {
|
||||
'template': '\n'.join(
|
||||
[
|
||||
'[ovirt]',
|
||||
'ovirt_url={{host}}',
|
||||
'ovirt_username={{username}}',
|
||||
'ovirt_password={{password}}',
|
||||
'{% if ca_file %}ovirt_ca_file={{ca_file}}{% endif %}',
|
||||
]
|
||||
)
|
||||
},
|
||||
'env': {'OVIRT_INI_PATH': '{{tower.filename}}', 'OVIRT_URL': '{{host}}', 'OVIRT_USERNAME': '{{username}}', 'OVIRT_PASSWORD': '{{password}}'},
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='controller',
|
||||
kind='cloud',
|
||||
name=gettext_noop('Red Hat Ansible Automation Platform'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'host',
|
||||
'label': gettext_noop('Red Hat Ansible Automation Platform'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'),
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': gettext_noop('Username'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop(
|
||||
'Red Hat Ansible Automation Platform username id to authenticate as.This should not be set if an OAuth token is being used.'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'oauth_token',
|
||||
'label': gettext_noop('OAuth Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('An OAuth token to use to authenticate with.This should not be set if username/password are being used.'),
|
||||
},
|
||||
{'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False},
|
||||
],
|
||||
'required': ['host'],
|
||||
},
|
||||
injectors={
|
||||
'env': {
|
||||
'TOWER_HOST': '{{host}}',
|
||||
'TOWER_USERNAME': '{{username}}',
|
||||
'TOWER_PASSWORD': '{{password}}',
|
||||
'TOWER_VERIFY_SSL': '{{verify_ssl}}',
|
||||
'TOWER_OAUTH_TOKEN': '{{oauth_token}}',
|
||||
'CONTROLLER_HOST': '{{host}}',
|
||||
'CONTROLLER_USERNAME': '{{username}}',
|
||||
'CONTROLLER_PASSWORD': '{{password}}',
|
||||
'CONTROLLER_VERIFY_SSL': '{{verify_ssl}}',
|
||||
'CONTROLLER_OAUTH_TOKEN': '{{oauth_token}}',
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='kubernetes_bearer_token',
|
||||
kind='kubernetes',
|
||||
name=gettext_noop('OpenShift or Kubernetes API Bearer Token'),
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'host',
|
||||
'label': gettext_noop('OpenShift or Kubernetes API Endpoint'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'),
|
||||
},
|
||||
{
|
||||
'id': 'bearer_token',
|
||||
'label': gettext_noop('API authentication bearer token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'verify_ssl',
|
||||
'label': gettext_noop('Verify SSL'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},
|
||||
{
|
||||
'id': 'ssl_ca_cert',
|
||||
'label': gettext_noop('Certificate Authority data'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
},
|
||||
],
|
||||
'required': ['host', 'bearer_token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='registry',
|
||||
kind='registry',
|
||||
name=gettext_noop('Container Registry'),
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'host',
|
||||
'label': gettext_noop('Authentication URL'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('Authentication endpoint for the container registry.'),
|
||||
'default': 'quay.io',
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': gettext_noop('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': gettext_noop('Password or Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('A password or token used to authenticate with'),
|
||||
},
|
||||
{
|
||||
'id': 'verify_ssl',
|
||||
'label': gettext_noop('Verify SSL'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},
|
||||
],
|
||||
'required': ['host'],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='galaxy_api_token',
|
||||
kind='galaxy',
|
||||
name=gettext_noop('Ansible Galaxy/Automation Hub API Token'),
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': gettext_noop('Galaxy Server URL'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The URL of the Galaxy instance to connect to.'),
|
||||
},
|
||||
{
|
||||
'id': 'auth_url',
|
||||
'label': gettext_noop('Auth Server URL'),
|
||||
'type': 'string',
|
||||
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using SSO auth.'),
|
||||
},
|
||||
{
|
||||
'id': 'token',
|
||||
'label': gettext_noop('API Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('A token to use for authentication against the Galaxy instance.'),
|
||||
},
|
||||
],
|
||||
'required': ['url'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='gpg_public_key',
|
||||
kind='cryptography',
|
||||
name=gettext_noop('GPG Public Key'),
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'gpg_public_key',
|
||||
'label': gettext_noop('GPG Public Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
'help_text': gettext_noop('GPG Public Key used to validate content signatures.'),
|
||||
},
|
||||
],
|
||||
'required': ['gpg_public_key'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='terraform',
|
||||
kind='cloud',
|
||||
name=gettext_noop('Terraform backend configuration'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'configuration',
|
||||
'label': gettext_noop('Backend configuration'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'),
|
||||
},
|
||||
{
|
||||
'id': 'gce_credentials',
|
||||
'label': gettext_noop('Google Cloud Platform account credentials'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
'help_text': gettext_noop('Google Cloud Platform account credentials in JSON format.'),
|
||||
},
|
||||
],
|
||||
'required': ['configuration'],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class CredentialInputSource(PrimordialModel):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@@ -1309,6 +664,7 @@ class CredentialInputSource(PrimordialModel):
|
||||
view_name = 'api:credential_input_source_detail'
|
||||
return reverse(view_name, kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
from awx_plugins.credentials.plugins import *
|
||||
|
||||
for ns, plugin in credential_plugins.items():
|
||||
CredentialType.load_plugin(ns, plugin)
|
||||
|
||||
@@ -4,11 +4,12 @@ import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models, DatabaseError
|
||||
from django.db import models, DatabaseError, transaction
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
||||
def _update_host_metrics(updated_hosts_list):
|
||||
from awx.main.models import HostMetric # circular import
|
||||
|
||||
# bulk-create
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
||||
)
|
||||
# bulk-update
|
||||
batch_start, batch_size = 0, 1000
|
||||
while batch_start <= len(updated_hosts_list):
|
||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
batch_start += batch_size
|
||||
|
||||
# FUTURE:
|
||||
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||
|
||||
args = [iter(updated_hosts_list)] * 500
|
||||
for hosts in itertools.zip_longest(*args):
|
||||
with transaction.atomic():
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||
)
|
||||
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
|
||||
@property
|
||||
def job_verbosity(self):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import CommonModel
|
||||
from awx.main.validators import validate_container_image_name
|
||||
@@ -12,6 +14,8 @@ __all__ = ['ExecutionEnvironment']
|
||||
class ExecutionEnvironment(CommonModel):
|
||||
class Meta:
|
||||
ordering = ('-created',)
|
||||
# Remove view permission, as a temporary solution, defer to organization read permission
|
||||
default_permissions = ('add', 'change', 'delete')
|
||||
|
||||
PULL_CHOICES = [
|
||||
('always', _("Always pull container before running.")),
|
||||
@@ -53,3 +57,12 @@ class ExecutionEnvironment(CommonModel):
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def validate_role_assignment(self, actor, role_definition):
|
||||
if self.managed:
|
||||
raise ValidationError({'object_id': _('Can not assign object roles to managed Execution Environments')})
|
||||
if self.organization_id is None:
|
||||
raise ValidationError({'object_id': _('Can not assign object roles to global Execution Environments')})
|
||||
|
||||
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
|
||||
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})
|
||||
|
||||
@@ -10,10 +10,6 @@ import copy
|
||||
import os.path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import yaml
|
||||
import tempfile
|
||||
import stat
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models, connection
|
||||
@@ -28,6 +24,7 @@ from django.db.models import Q
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
from awx_plugins.inventory.plugins import PluginFileInjector
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -52,11 +49,9 @@ from awx.main.models.notifications import (
|
||||
NotificationTemplate,
|
||||
JobNotificationMixin,
|
||||
)
|
||||
from awx.main.models.credential.injectors import _openstack_data
|
||||
from awx.main.utils import _inventory_updates
|
||||
from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
from awx.main.utils.execution_environments import to_container_path, get_control_plane_execution_environment
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.utils.execution_environments import get_control_plane_execution_environment
|
||||
|
||||
|
||||
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'SmartInventoryMembership', 'HostMetric', 'HostMetricSummaryMonthly']
|
||||
@@ -933,6 +928,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1042,7 +1038,7 @@ class InventorySourceOptions(BaseModel):
|
||||
def cloud_credential_validation(source, cred):
|
||||
if not source:
|
||||
return None
|
||||
if cred and source not in ('custom', 'scm'):
|
||||
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||
# If a credential was provided, it's important that it matches
|
||||
# the actual inventory source being used (Amazon requires Amazon
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
@@ -1051,12 +1047,14 @@ class InventorySourceOptions(BaseModel):
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
||||
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||
return _('Credential is required for a cloud source.')
|
||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||
return None
|
||||
|
||||
def get_cloud_credential(self):
|
||||
@@ -1424,287 +1422,5 @@ class CustomInventoryScript(CommonModelNameNotUnique):
|
||||
return reverse('api:inventory_script_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
|
||||
class PluginFileInjector(object):
|
||||
plugin_name = None # Ansible core name used to reference plugin
|
||||
# base injector should be one of None, "managed", or "template"
|
||||
# this dictates which logic to borrow from playbook injectors
|
||||
base_injector = None
|
||||
# every source should have collection, these are for the collection name
|
||||
namespace = None
|
||||
collection = None
|
||||
collection_migration = '2.9' # Starting with this version, we use collections
|
||||
use_fqcn = False # plugin: name versus plugin: namespace.collection.name
|
||||
|
||||
# TODO: delete this method and update unit tests
|
||||
@classmethod
|
||||
def get_proper_name(cls):
|
||||
if cls.plugin_name is None:
|
||||
return None
|
||||
return f'{cls.namespace}.{cls.collection}.{cls.plugin_name}'
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
"""Inventory filename for using the inventory plugin
|
||||
This is created dynamically, but the auto plugin requires this exact naming
|
||||
"""
|
||||
return '{0}.yml'.format(self.plugin_name)
|
||||
|
||||
def inventory_contents(self, inventory_update, private_data_dir):
|
||||
"""Returns a string that is the content for the inventory file for the inventory plugin"""
|
||||
return yaml.safe_dump(self.inventory_as_dict(inventory_update, private_data_dir), default_flow_style=False, width=1000)
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
source_vars = dict(inventory_update.source_vars_dict) # make a copy
|
||||
'''
|
||||
None conveys that we should use the user-provided plugin.
|
||||
Note that a plugin value of '' should still be overridden.
|
||||
'''
|
||||
if self.plugin_name is not None:
|
||||
if hasattr(self, 'downstream_namespace') and server_product_name() != 'AWX':
|
||||
source_vars['plugin'] = f'{self.downstream_namespace}.{self.downstream_collection}.{self.plugin_name}'
|
||||
elif self.use_fqcn:
|
||||
source_vars['plugin'] = f'{self.namespace}.{self.collection}.{self.plugin_name}'
|
||||
else:
|
||||
source_vars['plugin'] = self.plugin_name
|
||||
return source_vars
|
||||
|
||||
def build_env(self, inventory_update, env, private_data_dir, private_data_files):
|
||||
injector_env = self.get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
env.update(injector_env)
|
||||
# All CLOUD_PROVIDERS sources implement as inventory plugin from collection
|
||||
env['ANSIBLE_INVENTORY_ENABLED'] = 'auto'
|
||||
return env
|
||||
|
||||
def _get_shared_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
"""By default, we will apply the standard managed injectors"""
|
||||
injected_env = {}
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
# some sources may have no credential, specifically ec2
|
||||
if credential is None:
|
||||
return injected_env
|
||||
if self.base_injector in ('managed', 'template'):
|
||||
injected_env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk) # so injector knows this is inventory
|
||||
if self.base_injector == 'managed':
|
||||
from awx.main.models.credential import injectors as builtin_injectors
|
||||
|
||||
cred_kind = inventory_update.source.replace('ec2', 'aws')
|
||||
if cred_kind in dir(builtin_injectors):
|
||||
getattr(builtin_injectors, cred_kind)(credential, injected_env, private_data_dir)
|
||||
elif self.base_injector == 'template':
|
||||
safe_env = injected_env.copy()
|
||||
args = []
|
||||
credential.credential_type.inject_credential(credential, injected_env, safe_env, args, private_data_dir)
|
||||
# NOTE: safe_env is handled externally to injector class by build_safe_env static method
|
||||
# that means that managed injectors must only inject detectable env keys
|
||||
# enforcement of this is accomplished by tests
|
||||
return injected_env
|
||||
|
||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
env = self._get_shared_env(inventory_update, private_data_dir, private_data_files)
|
||||
return env
|
||||
|
||||
def build_private_data(self, inventory_update, private_data_dir):
|
||||
return self.build_plugin_private_data(inventory_update, private_data_dir)
|
||||
|
||||
def build_plugin_private_data(self, inventory_update, private_data_dir):
|
||||
return None
|
||||
|
||||
|
||||
class azure_rm(PluginFileInjector):
|
||||
plugin_name = 'azure_rm'
|
||||
base_injector = 'managed'
|
||||
namespace = 'azure'
|
||||
collection = 'azcollection'
|
||||
|
||||
def get_plugin_env(self, *args, **kwargs):
|
||||
ret = super(azure_rm, self).get_plugin_env(*args, **kwargs)
|
||||
# We need native jinja2 types so that tags can give JSON null value
|
||||
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
|
||||
return ret
|
||||
|
||||
|
||||
class ec2(PluginFileInjector):
|
||||
plugin_name = 'aws_ec2'
|
||||
base_injector = 'managed'
|
||||
namespace = 'amazon'
|
||||
collection = 'aws'
|
||||
|
||||
def get_plugin_env(self, *args, **kwargs):
|
||||
ret = super(ec2, self).get_plugin_env(*args, **kwargs)
|
||||
# We need native jinja2 types so that ec2_state_code will give integer
|
||||
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
|
||||
return ret
|
||||
|
||||
|
||||
class gce(PluginFileInjector):
|
||||
plugin_name = 'gcp_compute'
|
||||
base_injector = 'managed'
|
||||
namespace = 'google'
|
||||
collection = 'cloud'
|
||||
|
||||
def get_plugin_env(self, *args, **kwargs):
|
||||
ret = super(gce, self).get_plugin_env(*args, **kwargs)
|
||||
# We need native jinja2 types so that ip addresses can give JSON null value
|
||||
ret['ANSIBLE_JINJA2_NATIVE'] = str(True)
|
||||
return ret
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
# InventorySource.source_vars take precedence over ENV vars
|
||||
if 'projects' not in ret:
|
||||
ret['projects'] = [credential.get_input('project', default='')]
|
||||
return ret
|
||||
|
||||
|
||||
class vmware(PluginFileInjector):
|
||||
plugin_name = 'vmware_vm_inventory'
|
||||
base_injector = 'managed'
|
||||
namespace = 'community'
|
||||
collection = 'vmware'
|
||||
|
||||
|
||||
class openstack(PluginFileInjector):
|
||||
plugin_name = 'openstack'
|
||||
namespace = 'openstack'
|
||||
collection = 'cloud'
|
||||
|
||||
def _get_clouds_dict(self, inventory_update, cred, private_data_dir):
|
||||
openstack_data = _openstack_data(cred)
|
||||
|
||||
openstack_data['clouds']['devstack']['private'] = inventory_update.source_vars_dict.get('private', True)
|
||||
ansible_variables = {
|
||||
'use_hostnames': True,
|
||||
'expand_hostvars': False,
|
||||
'fail_on_errors': True,
|
||||
}
|
||||
provided_count = 0
|
||||
for var_name in ansible_variables:
|
||||
if var_name in inventory_update.source_vars_dict:
|
||||
ansible_variables[var_name] = inventory_update.source_vars_dict[var_name]
|
||||
provided_count += 1
|
||||
if provided_count:
|
||||
# Must we provide all 3 because the user provides any 1 of these??
|
||||
# this probably results in some incorrect mangling of the defaults
|
||||
openstack_data['ansible'] = ansible_variables
|
||||
return openstack_data
|
||||
|
||||
def build_plugin_private_data(self, inventory_update, private_data_dir):
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
private_data = {'credentials': {}}
|
||||
|
||||
openstack_data = self._get_clouds_dict(inventory_update, credential, private_data_dir)
|
||||
private_data['credentials'][credential] = yaml.safe_dump(openstack_data, default_flow_style=False, allow_unicode=True)
|
||||
return private_data
|
||||
|
||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
env = super(openstack, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
cred_data = private_data_files['credentials']
|
||||
env['OS_CLIENT_CONFIG_FILE'] = to_container_path(cred_data[credential], private_data_dir)
|
||||
return env
|
||||
|
||||
|
||||
class rhv(PluginFileInjector):
|
||||
"""ovirt uses the custom credential templating, and that is all"""
|
||||
|
||||
plugin_name = 'ovirt'
|
||||
base_injector = 'template'
|
||||
initial_version = '2.9'
|
||||
namespace = 'ovirt'
|
||||
collection = 'ovirt'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'rhv'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class satellite6(PluginFileInjector):
|
||||
plugin_name = 'foreman'
|
||||
namespace = 'theforeman'
|
||||
collection = 'foreman'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'satellite'
|
||||
use_fqcn = True
|
||||
|
||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
# this assumes that this is merged
|
||||
# https://github.com/ansible/ansible/pull/52693
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
ret = super(satellite6, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
if credential:
|
||||
ret['FOREMAN_SERVER'] = credential.get_input('host', default='')
|
||||
ret['FOREMAN_USER'] = credential.get_input('username', default='')
|
||||
ret['FOREMAN_PASSWORD'] = credential.get_input('password', default='')
|
||||
return ret
|
||||
|
||||
|
||||
class terraform(PluginFileInjector):
|
||||
plugin_name = 'terraform_state'
|
||||
namespace = 'cloud'
|
||||
collection = 'terraform'
|
||||
use_fqcn = True
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
config_cred = credential.get_input('configuration')
|
||||
if config_cred:
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
with os.fdopen(handle, 'w') as f:
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
f.write(config_cred)
|
||||
ret['backend_config_files'] = to_container_path(path, private_data_dir)
|
||||
return ret
|
||||
|
||||
def build_plugin_private_data(self, inventory_update, private_data_dir):
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
|
||||
private_data = {'credentials': {}}
|
||||
gce_cred = credential.get_input('gce_credentials', default=None)
|
||||
if gce_cred:
|
||||
private_data['credentials'][credential] = gce_cred
|
||||
return private_data
|
||||
|
||||
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
cred_data = private_data_files['credentials']
|
||||
if credential in cred_data:
|
||||
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
|
||||
return env
|
||||
|
||||
|
||||
class controller(PluginFileInjector):
|
||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||
base_injector = 'template'
|
||||
namespace = 'awx'
|
||||
collection = 'awx'
|
||||
downstream_namespace = 'ansible'
|
||||
downstream_collection = 'controller'
|
||||
|
||||
|
||||
class insights(PluginFileInjector):
|
||||
plugin_name = 'insights'
|
||||
base_injector = 'template'
|
||||
namespace = 'redhatinsights'
|
||||
collection = 'insights'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'insights'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class constructed(PluginFileInjector):
|
||||
plugin_name = 'constructed'
|
||||
namespace = 'ansible'
|
||||
collection = 'builtin'
|
||||
|
||||
def build_env(self, *args, **kwargs):
|
||||
env = super().build_env(*args, **kwargs)
|
||||
# Enable script inventory plugin so we pick up the script files from source inventories
|
||||
env['ANSIBLE_INVENTORY_ENABLED'] += ',script'
|
||||
env['ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED'] = 'True'
|
||||
return env
|
||||
|
||||
|
||||
for cls in PluginFileInjector.__subclasses__():
|
||||
InventorySourceOptions.injectors[cls.__name__] = cls
|
||||
|
||||
@@ -396,11 +396,11 @@ class JobNotificationMixin(object):
|
||||
'verbosity': 0,
|
||||
},
|
||||
'job_friendly_name': 'Job',
|
||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||
'url': 'https://platformhost/#/jobs/playbook/1010',
|
||||
'approval_status': 'approved',
|
||||
'approval_node_name': 'Approve Me',
|
||||
'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'workflow_url': 'https://platformhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://platformhost/$/jobs/playbook/13',
|
||||
'traceback': '',
|
||||
'status': 'running',
|
||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||
|
||||
@@ -591,14 +591,20 @@ def get_role_from_object_role(object_role):
|
||||
role_name = role_name.lower()
|
||||
model_cls = apps.get_model('main', target_model_name)
|
||||
target_model_name = get_type_for_model(model_cls)
|
||||
|
||||
# exception cases completely specific to one model naming convention
|
||||
if target_model_name == 'notification_template':
|
||||
target_model_name = 'notification' # total exception
|
||||
target_model_name = 'notification'
|
||||
elif target_model_name == 'workflow_job_template':
|
||||
target_model_name = 'workflow'
|
||||
|
||||
role_name = f'{target_model_name}_admin_role'
|
||||
elif rd.name.endswith(' Admin'):
|
||||
# cases like "project-admin"
|
||||
role_name = 'admin_role'
|
||||
elif rd.name == 'Organization Audit':
|
||||
role_name = 'auditor_role'
|
||||
else:
|
||||
print(rd.name)
|
||||
model_name, role_name = rd.name.split()
|
||||
role_name = role_name.lower()
|
||||
role_name += '_role'
|
||||
@@ -683,9 +689,15 @@ def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
|
||||
|
||||
for role_id in pk_set:
|
||||
if reverse:
|
||||
child_role = Role.objects.get(id=role_id)
|
||||
try:
|
||||
child_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
else:
|
||||
parent_role = Role.objects.get(id=role_id)
|
||||
try:
|
||||
parent_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
|
||||
# To a fault, we want to avoid running this if triggered from implicit_parents management
|
||||
# we only want to do anything if we know for sure this is a non-implicit team role
|
||||
|
||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models, connection
|
||||
from django.db import models, connection, transaction
|
||||
from django.core.exceptions import NON_FIELD_ERRORS
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
@@ -31,6 +31,7 @@ from rest_framework.exceptions import ParseError
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
||||
@@ -197,9 +198,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
|
||||
@classmethod
|
||||
def _submodels_with_roles(cls):
|
||||
ujt_classes = [c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
|
||||
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
|
||||
return [ct.id for ct in ct_dict.values()]
|
||||
return [c for c in cls.__subclasses__() if permission_registry.is_registered(c)]
|
||||
|
||||
@classmethod
|
||||
def accessible_pk_qs(cls, accessor, role_field):
|
||||
@@ -215,8 +214,16 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
|
||||
action = to_permissions[role_field]
|
||||
|
||||
# Special condition for super auditor
|
||||
role_subclasses = cls._submodels_with_roles()
|
||||
role_cts = ContentType.objects.get_for_models(*role_subclasses).values()
|
||||
all_codenames = {f'{action}_{cls._meta.model_name}' for cls in role_subclasses}
|
||||
if not (all_codenames - accessor.singleton_permissions()):
|
||||
qs = cls.objects.filter(polymorphic_ctype__in=role_cts)
|
||||
return qs.values_list('id', flat=True)
|
||||
|
||||
return (
|
||||
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__startswith=action, content_type_id__in=cls._submodels_with_roles())
|
||||
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__in=all_codenames, content_type_id__in=[ct.id for ct in role_cts])
|
||||
.values_list('object_id')
|
||||
.distinct()
|
||||
)
|
||||
@@ -273,7 +280,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
if new_next_schedule:
|
||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||
return # no-op, common for infrequent schedules
|
||||
self.next_schedule = new_next_schedule
|
||||
|
||||
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||
if transaction.get_autocommit():
|
||||
self.next_schedule = related_schedules.first()
|
||||
else:
|
||||
self.next_schedule = related_schedules.select_for_update().first()
|
||||
|
||||
self.next_job_run = new_next_schedule.next_run
|
||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||
|
||||
|
||||
@@ -138,7 +138,8 @@ class TaskBase:
|
||||
|
||||
# Lock
|
||||
with task_manager_bulk_reschedule():
|
||||
with advisory_lock(f"{self.prefix}_lock", wait=False) as acquired:
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000 # convert to milliseconds
|
||||
with advisory_lock(f"{self.prefix}_lock", lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
with transaction.atomic():
|
||||
if acquired is False:
|
||||
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")
|
||||
|
||||
@@ -405,10 +405,11 @@ class AWXReceptorJob:
|
||||
finally:
|
||||
# Make sure to always release the work unit if we established it
|
||||
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
|
||||
try:
|
||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||
except Exception:
|
||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||
if settings.RECPETOR_KEEP_WORK_ON_ERROR and getattr(res, 'status', 'error') == 'error':
|
||||
try:
|
||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||
except Exception:
|
||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||
|
||||
def _run_internal(self, receptor_ctl):
|
||||
# Create a socketpair. Where the left side will be used for writing our payload
|
||||
|
||||
@@ -36,6 +36,9 @@ import ansible_runner.cleanup
|
||||
# dateutil
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.access import access_registry
|
||||
@@ -51,7 +54,7 @@ from awx.main.models import (
|
||||
Job,
|
||||
convert_jsonfields,
|
||||
)
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.constants import ACTIVE_STATES, ERROR_STATES
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_task_queuename, reaper
|
||||
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
||||
@@ -682,6 +685,8 @@ def awx_receptor_workunit_reaper():
|
||||
|
||||
unit_ids = [id for id in receptor_work_list]
|
||||
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
||||
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||
jobs_with_unreleased_receptor_units = jobs_with_unreleased_receptor_units.exclude(status__in=ERROR_STATES)
|
||||
for job in jobs_with_unreleased_receptor_units:
|
||||
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
||||
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
|
||||
@@ -701,7 +706,10 @@ def awx_k8s_reaper():
|
||||
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
||||
pods = PodManager.list_active_jobs(group)
|
||||
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
|
||||
for job in UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES):
|
||||
reap_job_candidates = UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES)
|
||||
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||
reap_job_candidates = reap_job_candidates.exclude(status__in=ERROR_STATES)
|
||||
for job in reap_job_candidates:
|
||||
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
||||
try:
|
||||
pm = PodManager(job)
|
||||
@@ -712,7 +720,8 @@ def awx_k8s_reaper():
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def awx_periodic_scheduler():
|
||||
with advisory_lock('awx_periodic_scheduler_lock', wait=False) as acquired:
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000
|
||||
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic scheduler, another task holds lock")
|
||||
return
|
||||
@@ -964,3 +973,27 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
permission_check_func(creater, copy_mapping.values())
|
||||
if isinstance(new_obj, Inventory):
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
return
|
||||
|
||||
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||
return
|
||||
logger.debug("Running periodic resource sync")
|
||||
|
||||
executor = SyncExecutor()
|
||||
executor.run()
|
||||
for key, item_list in executor.results.items():
|
||||
if not item_list or key == 'noop':
|
||||
continue
|
||||
# Log creations and conflicts
|
||||
if len(item_list) > 10 and settings.LOG_AGGREGATOR_LEVEL != 'DEBUG':
|
||||
logger.info(f'Periodic resource sync {key}, first 10 items:\n{item_list[:10]}')
|
||||
else:
|
||||
logger.info(f'Periodic resource sync {key}:\n{item_list}')
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||
"K8S_AUTH_API_KEY": "fooo",
|
||||
"K8S_AUTH_VERIFY_SSL": "False"
|
||||
}
|
||||
@@ -1,22 +1,30 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
def __init__(self):
|
||||
self.environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_proxy_ip_allowed(get, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||
# should just pass through
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
@@ -45,6 +53,51 @@ def test_proxy_ip_allowed(get, patch, admin):
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTrustedProxyAllowListIntegration:
|
||||
@pytest.fixture
|
||||
def url(self, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
|
||||
return url
|
||||
|
||||
@pytest.fixture
|
||||
def middleware(self):
|
||||
return HeaderTrackingMiddleware()
|
||||
|
||||
def test_x_trusted_proxy_valid_signature(self, get, admin, rsa_keypair, url, middleware): # noqa: F811
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public, PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature(self, get, admin, url, patch, middleware):
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with override_settings(PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature_valid_proxy(self, get, admin, url, middleware):
|
||||
# A valid explicit proxy SHOULD result in sensitive headers NOT being deleted, regardless of the trusted proxy signature results
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'REMOTE_ADDR': 'my.proxy.example.org',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteViews:
|
||||
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
||||
|
||||
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestImmutableSharedFields:
|
||||
@pytest.fixture(autouse=True)
|
||||
def configure_settings(self, settings):
|
||||
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
|
||||
|
||||
def test_create_raises_permission_denied(self, admin_user, post):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
resp = post(
|
||||
url=reverse('api:team_list'),
|
||||
data={'name': 'teamA', 'organization': orgA.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Creation of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
resp = delete(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Deletion of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_update(self, admin_user, patch):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
# allow patching non-shared fields
|
||||
patch(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
data={"description": "can change this field"},
|
||||
user=admin_user,
|
||||
expect=200,
|
||||
)
|
||||
orgB = Organization.objects.create(name='orgB')
|
||||
# prevent patching shared fields
|
||||
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
|
||||
assert "Cannot change shared field" in resp.data['organization']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'role',
|
||||
['admin_role', 'member_role'],
|
||||
)
|
||||
@pytest.mark.parametrize('resource', ['organization', 'team'])
|
||||
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
if resource == 'organization':
|
||||
role = getattr(orgA, role)
|
||||
elif resource == 'team':
|
||||
teamA = orgA.teams.create(name='teamA')
|
||||
role = getattr(teamA, role)
|
||||
resp = post(
|
||||
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
|
||||
data={'id': role.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']
|
||||
@@ -32,13 +32,6 @@ def node_type_instance():
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group(job_factory):
|
||||
ig = InstanceGroup(name="east")
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_instance_group(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="container")
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer
|
||||
@@ -8,10 +9,15 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.test.utils import override_settings
|
||||
|
||||
# DRF
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
# DAB
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
@@ -369,3 +375,113 @@ def test_job_template_missing_inventory(project, inventory, admin_user, post):
|
||||
)
|
||||
assert r.status_code == 400
|
||||
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestJobTemplateCallbackProxyIntegration:
|
||||
"""
|
||||
Test the interaction of provision job template callback feature and:
|
||||
settings.PROXY_IP_ALLOWED_LIST
|
||||
x-trusted-proxy http header
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(self, inventory, project):
|
||||
jt = JobTemplate.objects.create(name='test-jt', inventory=inventory, project=project, playbook='helloworld.yml', host_config_key='abcd')
|
||||
return jt
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz',
|
||||
'REMOTE_HOST': 'baz',
|
||||
'REMOTE_ADDR': 'baz',
|
||||
}
|
||||
r = post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=400, **headers
|
||||
)
|
||||
assert r.data['msg'] == 'No matching host could be found!'
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'headers, expected',
|
||||
(
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'my.proxy.example.org',
|
||||
},
|
||||
201,
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'not-my-proxy.org',
|
||||
},
|
||||
400,
|
||||
),
|
||||
),
|
||||
)
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_proxy_ip_allowed_list(self, job_template, admin_user, post, headers, expected): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='my.proxy.example.org')
|
||||
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=expected,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=[])
|
||||
def test_no_proxy_trust_all_headers(self, job_template, admin_user, post):
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
post(url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=201, **headers)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar, my.proxy.example.org',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=201,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz, my.proxy.example.org',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
**headers
|
||||
)
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -20,7 +20,7 @@ from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
@@ -92,6 +92,11 @@ def deploy_jobtemplate(project, inventory, credential):
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def execution_environment():
|
||||
return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", managed=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_managed_roles():
|
||||
"Run the migration script to pre-create managed role definitions"
|
||||
@@ -730,6 +735,11 @@ def jt_linked(organization, project, inventory, machine_credential, credential,
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group():
|
||||
return InstanceGroup.objects.create(name="east")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job_template(organization):
|
||||
wjt = WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization)
|
||||
|
||||
@@ -109,3 +109,17 @@ def test_team_indirect_access(get, team, admin_user, inventory):
|
||||
assert len(by_username['u1']['summary_fields']['indirect_access']) == 0
|
||||
access_entry = by_username['u1']['summary_fields']['direct_access'][0]
|
||||
assert sorted(access_entry['descendant_roles']) == sorted(['adhoc_role', 'use_role', 'update_role', 'read_role', 'admin_role'])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_access_list(workflow_job_template, alice, bob, setup_managed_roles, get, admin_user):
|
||||
"""Basic verification that WFJT access_list is functional"""
|
||||
workflow_job_template.admin_role.members.add(alice)
|
||||
workflow_job_template.organization.workflow_admin_role.members.add(bob)
|
||||
|
||||
url = reverse('api:workflow_job_template_access_list', kwargs={'pk': workflow_job_template.pk})
|
||||
for u in (alice, bob, admin_user):
|
||||
response = get(url, user=u, expect=200)
|
||||
user_ids = [item['id'] for item in response.data['results']]
|
||||
assert alice.pk in user_ids
|
||||
assert bob.pk in user_ids
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.access import InstanceGroupAccess, NotificationTemplateAccess
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_object_role_delete(rando, instance_group, setup_managed_roles):
|
||||
"""Basic functionality of IG object-level admin role function AAP-25506"""
|
||||
rd = RoleDefinition.objects.get(name='InstanceGroup Admin')
|
||||
rd.give_permission(rando, instance_group)
|
||||
access = InstanceGroupAccess(rando)
|
||||
assert access.can_delete(instance_group)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_object_role_change(rando, notification_template, setup_managed_roles):
|
||||
"""Basic functionality of NT object-level admin role function AAP-25493"""
|
||||
rd = RoleDefinition.objects.get(name='NotificationTemplate Admin')
|
||||
rd.give_permission(rando, notification_template)
|
||||
access = NotificationTemplateAccess(rando)
|
||||
assert access.can_change(notification_template, {'name': 'new name'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_auditor_role(rando, setup_managed_roles, organization, inventory, project, jt_linked):
|
||||
obj_list = (inventory, project, jt_linked)
|
||||
for obj in obj_list:
|
||||
assert obj.organization == organization, obj # sanity
|
||||
|
||||
assert [rando.has_obj_perm(obj, 'view') for obj in obj_list] == [False for i in range(3)], obj_list
|
||||
|
||||
rd = RoleDefinition.objects.get(name='Organization Audit')
|
||||
rd.give_permission(rando, organization)
|
||||
|
||||
codename_set = set(rd.permissions.values_list('codename', flat=True))
|
||||
assert not ({'view_inventory', 'view_jobtemplate', 'audit_organization'} - codename_set) # sanity
|
||||
|
||||
assert [obj in type(obj).access_qs(rando) for obj in obj_list] == [True for i in range(3)], obj_list
|
||||
assert [rando.has_obj_perm(obj, 'view') for obj in obj_list] == [True for i in range(3)], obj_list
|
||||
@@ -2,9 +2,11 @@ import pytest
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse as django_reverse
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import JobTemplate, Inventory, Organization
|
||||
from awx.main.access import JobTemplateAccess, WorkflowJobTemplateAccess
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
@@ -66,13 +68,17 @@ def test_assign_managed_role(admin_user, alice, rando, inventory, post, setup_ma
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_custom_delete_role(admin_user, rando, inventory, delete, patch):
|
||||
# TODO: just a delete_inventory, without change_inventory
|
||||
rd, _ = RoleDefinition.objects.get_or_create(
|
||||
name='inventory-delete', permissions=['delete_inventory', 'view_inventory'], content_type=ContentType.objects.get_for_model(Inventory)
|
||||
name='inventory-delete',
|
||||
permissions=['delete_inventory', 'view_inventory', 'change_inventory'],
|
||||
content_type=ContentType.objects.get_for_model(Inventory),
|
||||
)
|
||||
rd.give_permission(rando, inventory)
|
||||
inv_id = inventory.pk
|
||||
inv_url = reverse('api:inventory_detail', kwargs={'pk': inv_id})
|
||||
patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
|
||||
# TODO: eventually this will be valid test, for now ignore
|
||||
# patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
|
||||
delete(url=inv_url, user=rando, expect=202)
|
||||
assert Inventory.objects.get(id=inv_id).pending_deletion
|
||||
|
||||
@@ -88,3 +94,63 @@ def test_assign_custom_add_role(admin_user, rando, organization, post, setup_man
|
||||
inv_id = r.data['id']
|
||||
inventory = Inventory.objects.get(id=inv_id)
|
||||
assert rando.has_obj_perm(inventory, 'change')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_jt_creation_permissions(setup_managed_roles, inventory, project, rando):
|
||||
"""This tests that if you assign someone required permissions in the new API
|
||||
using the managed roles, then that works to give permissions to create a job template"""
|
||||
inv_rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||
proj_rd = RoleDefinition.objects.get(name='Project Admin')
|
||||
# establish prior state
|
||||
access = JobTemplateAccess(rando)
|
||||
assert not access.can_add({'inventory': inventory.pk, 'project': project.pk, 'name': 'foo-jt'})
|
||||
|
||||
inv_rd.give_permission(rando, inventory)
|
||||
proj_rd.give_permission(rando, project)
|
||||
|
||||
assert access.can_add({'inventory': inventory.pk, 'project': project.pk, 'name': 'foo-jt'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_creation_permissions(setup_managed_roles, organization, workflow_job_template, rando):
|
||||
"""Similar to JT, assigning new roles gives creator permissions"""
|
||||
org_wf_rd = RoleDefinition.objects.get(name='Organization WorkflowJobTemplate Admin')
|
||||
assert workflow_job_template.organization == organization # sanity
|
||||
# establish prior state
|
||||
access = WorkflowJobTemplateAccess(rando)
|
||||
assert not access.can_add({'name': 'foo-flow', 'organization': organization.pk})
|
||||
org_wf_rd.give_permission(rando, organization)
|
||||
|
||||
assert access.can_add({'name': 'foo-flow', 'organization': organization.pk})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_credential_to_user_of_another_org(setup_managed_roles, credential, admin_user, rando, org_admin, organization, post):
|
||||
'''Test that a credential can only be assigned to a user in the same organization'''
|
||||
# cannot assign credential to rando, as rando is not in the same org as the credential
|
||||
rd = RoleDefinition.objects.get(name="Credential Admin")
|
||||
credential.organization = organization
|
||||
credential.save(update_fields=['organization'])
|
||||
assert credential.organization not in Organization.access_qs(rando, 'member')
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
resp = post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=400)
|
||||
assert "You cannot grant credential access to a User not in the credentials' organization" in str(resp.data)
|
||||
|
||||
# can assign credential to superuser
|
||||
rando.is_superuser = True
|
||||
rando.save()
|
||||
post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||
|
||||
# can assign credential to org_admin
|
||||
assert credential.organization in Organization.access_qs(org_admin, 'member')
|
||||
post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ALLOW_LOCAL_RESOURCE_MANAGEMENT=False)
|
||||
def test_team_member_role_not_assignable(team, rando, post, admin_user, setup_managed_roles):
|
||||
member_rd = RoleDefinition.objects.get(name='Organization Member')
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
r = post(url, data={'object_id': team.id, 'role_definition': member_rd.id, 'user': rando.id}, user=admin_user, expect=400)
|
||||
assert 'Not managed locally' in str(r.data)
|
||||
|
||||
120
awx/main/tests/functional/dab_rbac/test_external_auditor.py
Normal file
120
awx/main/tests/functional/dab_rbac/test_external_auditor.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import pytest
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from ansible_base.rbac.managed import SystemAuditor
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
from awx.main.access import check_user_access, get_user_queryset
|
||||
from awx.main.models import User, AdHocCommandEvent
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ext_auditor_rd():
|
||||
info = SystemAuditor(overrides={'name': 'Alien Auditor', 'shortname': 'ext_auditor'})
|
||||
rd, _ = info.get_or_create(apps)
|
||||
return rd
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ext_auditor(ext_auditor_rd):
|
||||
u = User.objects.create(username='external-auditor-user')
|
||||
ext_auditor_rd.give_global_permission(u)
|
||||
return u
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def obj_factory(request):
|
||||
def _rf(fixture_name):
|
||||
obj = request.getfixturevalue(fixture_name)
|
||||
|
||||
# special case to make obj organization-scoped
|
||||
if obj._meta.model_name == 'executionenvironment':
|
||||
obj.organization = request.getfixturevalue('organization')
|
||||
obj.save(update_fields=['organization'])
|
||||
|
||||
return obj
|
||||
|
||||
return _rf
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_access_qs_external_auditor(ext_auditor_rd, rando, job_template):
|
||||
ext_auditor_rd.give_global_permission(rando)
|
||||
jt_cls = apps.get_model('main', 'JobTemplate')
|
||||
ujt_cls = apps.get_model('main', 'UnifiedJobTemplate')
|
||||
assert job_template in jt_cls.access_qs(rando)
|
||||
assert job_template.id in jt_cls.access_ids_qs(rando)
|
||||
assert job_template.id in ujt_cls.accessible_pk_qs(rando, 'read_role')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('model', sorted(permission_registry.all_registered_models, key=lambda cls: cls._meta.model_name))
|
||||
class TestExternalAuditorRoleAllModels:
|
||||
def test_access_can_read_method(self, obj_factory, model, ext_auditor, rando):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = obj_factory(fixture_name)
|
||||
|
||||
assert check_user_access(rando, model, 'read', obj) is False
|
||||
assert check_user_access(ext_auditor, model, 'read', obj) is True
|
||||
|
||||
def test_access_get_queryset(self, obj_factory, model, ext_auditor, rando):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = obj_factory(fixture_name)
|
||||
|
||||
assert obj not in get_user_queryset(rando, model)
|
||||
assert obj in get_user_queryset(ext_auditor, model)
|
||||
|
||||
def test_global_list(self, obj_factory, model, ext_auditor, rando, get):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj_factory(fixture_name)
|
||||
|
||||
url = reverse(f'api:{fixture_name}_list')
|
||||
r = get(url, user=rando, expect=200)
|
||||
initial_ct = r.data['count']
|
||||
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == initial_ct + 1
|
||||
|
||||
if fixture_name in ('job_template', 'workflow_job_template'):
|
||||
url = reverse('api:unified_job_template_list')
|
||||
r = get(url, user=rando, expect=200)
|
||||
initial_ct = r.data['count']
|
||||
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == initial_ct + 1
|
||||
|
||||
def test_detail_view(self, obj_factory, model, ext_auditor, rando, get):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = obj_factory(fixture_name)
|
||||
|
||||
url = reverse(f'api:{fixture_name}_detail', kwargs={'pk': obj.pk})
|
||||
get(url, user=rando, expect=403) # NOTE: should be 401
|
||||
get(url, user=ext_auditor, expect=200)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestExternalAuditorNonRoleModels:
|
||||
def test_ad_hoc_command_view(self, ad_hoc_command_factory, rando, ext_auditor, get):
|
||||
"""The AdHocCommandAccess class references is_system_auditor
|
||||
|
||||
this is to prove it works with other system-level view roles"""
|
||||
ad_hoc_command = ad_hoc_command_factory()
|
||||
url = reverse('api:ad_hoc_command_list')
|
||||
r = get(url, user=rando, expect=200)
|
||||
assert r.data['count'] == 0
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == 1
|
||||
assert r.data['results'][0]['id'] == ad_hoc_command.id
|
||||
|
||||
event = AdHocCommandEvent.objects.create(ad_hoc_command=ad_hoc_command)
|
||||
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': ad_hoc_command.id})
|
||||
r = get(url, user=rando, expect=403)
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['count'] == 1
|
||||
|
||||
url = reverse('api:ad_hoc_command_event_detail', kwargs={'pk': event.id})
|
||||
r = get(url, user=rando, expect=403)
|
||||
r = get(url, user=ext_auditor, expect=200)
|
||||
assert r.data['id'] == event.id
|
||||
31
awx/main/tests/functional/dab_rbac/test_managed_roles.py
Normal file
31
awx/main/tests/functional/dab_rbac/test_managed_roles.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import pytest
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition, DABPermission
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_roles_to_not_create(setup_managed_roles):
|
||||
assert RoleDefinition.objects.filter(name='Organization Admin').count() == 1
|
||||
|
||||
SHOULD_NOT_EXIST = ('Organization Organization Admin', 'Organization Team Admin', 'Organization InstanceGroup Admin')
|
||||
|
||||
bad_rds = RoleDefinition.objects.filter(name__in=SHOULD_NOT_EXIST)
|
||||
if bad_rds.exists():
|
||||
bad_names = list(bad_rds.values_list('name', flat=True))
|
||||
raise Exception(f'Found RoleDefinitions that should not exist: {bad_names}')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_project_update_role(setup_managed_roles):
|
||||
"""Role to allow updating a project on the object-level should exist"""
|
||||
assert RoleDefinition.objects.filter(name='Project Update').count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_child_add_permission(setup_managed_roles):
|
||||
for model_name in ('Project', 'NotificationTemplate', 'WorkflowJobTemplate', 'Inventory'):
|
||||
rd = RoleDefinition.objects.get(name=f'Organization {model_name} Admin')
|
||||
assert 'add_' in str(rd.permissions.values_list('codename', flat=True)), f'The {rd.name} role definition expected to contain add_ permissions'
|
||||
|
||||
# special case for JobTemplate, anyone can create one with use permission to project/inventory
|
||||
assert not DABPermission.objects.filter(codename='add_jobtemplate').exists()
|
||||
@@ -1,4 +1,5 @@
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -6,17 +7,29 @@ from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from crum import impersonate
|
||||
|
||||
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions, get_role_codenames, get_role_definition
|
||||
from awx.main.models import User, Organization, WorkflowJobTemplate, WorkflowJobTemplateNode, Team
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from ansible_base.rbac.models import RoleUserAssignment, RoleDefinition
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'role_name',
|
||||
['execution_environment_admin_role', 'project_admin_role', 'admin_role', 'auditor_role', 'read_role', 'execute_role', 'notification_admin_role'],
|
||||
[
|
||||
'execution_environment_admin_role',
|
||||
'workflow_admin_role',
|
||||
'project_admin_role',
|
||||
'admin_role',
|
||||
'auditor_role',
|
||||
'read_role',
|
||||
'execute_role',
|
||||
'approval_role',
|
||||
'notification_admin_role',
|
||||
],
|
||||
)
|
||||
def test_round_trip_roles(organization, rando, role_name, setup_managed_roles):
|
||||
"""
|
||||
@@ -26,11 +39,41 @@ def test_round_trip_roles(organization, rando, role_name, setup_managed_roles):
|
||||
"""
|
||||
getattr(organization, role_name).members.add(rando)
|
||||
assignment = RoleUserAssignment.objects.get(user=rando)
|
||||
print(assignment.role_definition.name)
|
||||
old_role = get_role_from_object_role(assignment.object_role)
|
||||
assert old_role.id == getattr(organization, role_name).id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('model', sorted(permission_registry.all_registered_models, key=lambda cls: cls._meta.model_name))
|
||||
def test_role_migration_matches(request, model, setup_managed_roles):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = request.getfixturevalue(fixture_name)
|
||||
role_ct = 0
|
||||
for field in obj._meta.get_fields():
|
||||
if isinstance(field, ImplicitRoleField):
|
||||
if field.name == 'read_role':
|
||||
continue # intentionally left as "Compat" roles
|
||||
role_ct += 1
|
||||
old_role = getattr(obj, field.name)
|
||||
old_codenames = set(get_role_codenames(old_role))
|
||||
rd = get_role_definition(old_role)
|
||||
new_codenames = set(rd.permissions.values_list('codename', flat=True))
|
||||
# all the old roles should map to a non-Compat role definition
|
||||
if 'Compat' not in rd.name:
|
||||
model_rds = RoleDefinition.objects.filter(content_type=ContentType.objects.get_for_model(obj))
|
||||
rd_data = {}
|
||||
for rd in model_rds:
|
||||
rd_data[rd.name] = list(rd.permissions.values_list('codename', flat=True))
|
||||
assert (
|
||||
'Compat' not in rd.name
|
||||
), f'Permissions for old vs new roles did not match.\nold {field.name}: {old_codenames}\nnew:\n{json.dumps(rd_data, indent=2)}'
|
||||
assert new_codenames == set(old_codenames)
|
||||
|
||||
# In the old system these models did not have object-level roles, all others expect some model roles
|
||||
if model._meta.model_name not in ('notificationtemplate', 'executionenvironment'):
|
||||
assert role_ct > 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_role_naming(setup_managed_roles):
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='dmin')
|
||||
@@ -141,3 +184,11 @@ def test_implicit_parents_no_assignments(organization):
|
||||
with mock.patch('awx.main.models.rbac.give_or_remove_permission') as mck:
|
||||
Team.objects.create(name='random team', organization=organization)
|
||||
mck.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_auditor_rel(organization, rando, setup_managed_roles):
|
||||
assert rando not in organization.auditor_role
|
||||
audit_rd = RoleDefinition.objects.get(name='Organization Audit')
|
||||
audit_rd.give_permission(rando, organization)
|
||||
assert list(rando.auditor_of_organizations) == [organization]
|
||||
|
||||
@@ -4,25 +4,19 @@ import pytest
|
||||
# CRUM
|
||||
from crum import impersonate
|
||||
|
||||
# Django
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJobTemplate, Job, JobTemplate, WorkflowJobTemplate, WorkflowApprovalTemplate, Project, WorkflowJob, Schedule, Credential
|
||||
from awx.main.models import UnifiedJobTemplate, Job, JobTemplate, WorkflowJobTemplate, Project, WorkflowJob, Schedule, Credential
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.constants import JOB_VARIABLE_PREFIXES
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_subclass_types():
|
||||
assert set(UnifiedJobTemplate._submodels_with_roles()) == set(
|
||||
[
|
||||
ContentType.objects.get_for_model(JobTemplate).id,
|
||||
ContentType.objects.get_for_model(Project).id,
|
||||
ContentType.objects.get_for_model(WorkflowJobTemplate).id,
|
||||
ContentType.objects.get_for_model(WorkflowApprovalTemplate).id,
|
||||
]
|
||||
)
|
||||
assert set(UnifiedJobTemplate._submodels_with_roles()) == {
|
||||
JobTemplate,
|
||||
Project,
|
||||
WorkflowJobTemplate,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -46,6 +46,8 @@ def generate_fake_var(element):
|
||||
|
||||
def credential_kind(source):
|
||||
"""Given the inventory source kind, return expected credential kind"""
|
||||
if source == 'openshift_virtualization':
|
||||
return 'kubernetes_bearer_token'
|
||||
return source.replace('ec2', 'aws')
|
||||
|
||||
|
||||
|
||||
@@ -85,3 +85,17 @@ class TestMigrationSmoke:
|
||||
|
||||
RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||
assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists()
|
||||
|
||||
# Regression testing for bug that comes from current vs past models mismatch
|
||||
RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition')
|
||||
assert not RoleDefinition.objects.filter(name='Organization Organization Admin').exists()
|
||||
# Test special cases in managed role creation
|
||||
assert not RoleDefinition.objects.filter(name='Organization Team Admin').exists()
|
||||
assert not RoleDefinition.objects.filter(name='Organization InstanceGroup Admin').exists()
|
||||
|
||||
# Test that a removed EE model permission has been deleted
|
||||
new_state = migrator.apply_tested_migration(
|
||||
('main', '0195_EE_permissions'),
|
||||
)
|
||||
DABPermission = new_state.apps.get_model('dab_rbac', 'DABPermission')
|
||||
assert not DABPermission.objects.filter(codename='view_executionenvironment').exists()
|
||||
|
||||
148
awx/main/tests/functional/test_rbac_execution_environment.py
Normal file
148
awx/main/tests/functional/test_rbac_execution_environment.py
Normal file
@@ -0,0 +1,148 @@
|
||||
import pytest
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from awx.main.access import ExecutionEnvironmentAccess
|
||||
from awx.main.models import ExecutionEnvironment, Organization, Team
|
||||
from awx.main.models.rbac import get_role_codenames
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from django.urls import reverse as django_reverse
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ee_rd():
|
||||
return RoleDefinition.objects.create_from_permissions(
|
||||
name='EE object admin',
|
||||
permissions=['change_executionenvironment', 'delete_executionenvironment'],
|
||||
content_type=ContentType.objects.get_for_model(ExecutionEnvironment),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_ee_rd():
|
||||
return RoleDefinition.objects.create_from_permissions(
|
||||
name='EE org admin',
|
||||
permissions=['add_executionenvironment', 'change_executionenvironment', 'delete_executionenvironment', 'view_organization'],
|
||||
content_type=ContentType.objects.get_for_model(Organization),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_old_ee_role_maps_to_correct_permissions(organization):
|
||||
assert set(get_role_codenames(organization.execution_environment_admin_role)) == {
|
||||
'view_organization',
|
||||
'add_executionenvironment',
|
||||
'change_executionenvironment',
|
||||
'delete_executionenvironment',
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_ee(organization):
|
||||
return ExecutionEnvironment.objects.create(name='some user ee', organization=organization)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_capabilities(get, setup_managed_roles):
|
||||
def _rf(user, obj, expected):
|
||||
url = reverse('api:execution_environment_list')
|
||||
r = get(url, user=user, expect=200)
|
||||
for item in r.data['results']:
|
||||
if item['id'] == obj.pk:
|
||||
assert expected == item['summary_fields']['user_capabilities']
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(f'Could not find expected object ({obj}) in EE list result: {r.data}')
|
||||
|
||||
return _rf
|
||||
|
||||
|
||||
# ___ begin tests ___
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_any_user_can_view_global_ee(control_plane_execution_environment, rando):
|
||||
assert ExecutionEnvironmentAccess(rando).can_read(control_plane_execution_environment)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_ee_not_assignable(control_plane_execution_environment, ee_rd, rando, admin_user, post):
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
r = post(url, {'role_definition': ee_rd.pk, 'user': rando.id, 'object_id': control_plane_execution_environment.pk}, user=admin_user, expect=400)
|
||||
assert 'Can not assign object roles to managed Execution Environment' in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_member_required_for_assignment(org_ee, ee_rd, rando, admin_user, post):
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
r = post(url, {'role_definition': ee_rd.pk, 'user': rando.id, 'object_id': org_ee.pk}, user=admin_user, expect=400)
|
||||
assert 'User must have view permission to Execution Environment organization' in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_can_have_permission(org_ee, ee_rd, rando, admin_user, post):
|
||||
org2 = Organization.objects.create(name='a different team')
|
||||
team = Team.objects.create(name='a team', organization=org2)
|
||||
team.member_role.members.add(rando)
|
||||
assert org_ee not in ExecutionEnvironmentAccess(rando).get_queryset() # user can not view the EE
|
||||
|
||||
url = django_reverse('roleteamassignment-list')
|
||||
|
||||
# can give object roles to the team now
|
||||
post(url, {'role_definition': ee_rd.pk, 'team': team.id, 'object_id': org_ee.pk}, user=admin_user, expect=201)
|
||||
assert rando.has_obj_perm(org_ee, 'change')
|
||||
assert org_ee in ExecutionEnvironmentAccess(rando).get_queryset() # user can view the EE now
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_give_object_permission_to_ee(org_ee, ee_rd, org_member, check_user_capabilities):
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
assert access.can_read(org_ee) # by virtue of being an org member
|
||||
assert not access.can_change(org_ee, {'name': 'new'})
|
||||
check_user_capabilities(org_member, org_ee, {'edit': False, 'delete': False, 'copy': False})
|
||||
|
||||
ee_rd.give_permission(org_member, org_ee)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization.id})
|
||||
|
||||
check_user_capabilities(org_member, org_ee, {'edit': True, 'delete': True, 'copy': False})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_need_related_organization_access(org_ee, ee_rd, org_member):
|
||||
org2 = Organization.objects.create(name='another organization')
|
||||
ee_rd.give_permission(org_member, org_ee)
|
||||
org2.member_role.members.add(org_member)
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization.id})
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': org2.id})
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': org2})
|
||||
|
||||
# User can make the change if they have relevant permission to the new organization
|
||||
org_ee.organization.execution_environment_admin_role.members.add(org_member)
|
||||
org2.execution_environment_admin_role.members.add(org_member)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org2.id})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org2})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('style', ['new', 'old'])
|
||||
def test_give_org_permission_to_ee(org_ee, organization, org_member, check_user_capabilities, style, org_ee_rd):
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
assert not access.can_change(org_ee, {'name': 'new'})
|
||||
check_user_capabilities(org_member, org_ee, {'edit': False, 'delete': False, 'copy': False})
|
||||
|
||||
if style == 'new':
|
||||
org_ee_rd.give_permission(org_member, organization)
|
||||
assert org_member.has_obj_perm(org_ee.organization, 'add_executionenvironment') # sanity
|
||||
else:
|
||||
organization.execution_environment_admin_role.members.add(org_member)
|
||||
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': organization.id})
|
||||
check_user_capabilities(org_member, org_ee, {'edit': True, 'delete': True, 'copy': True})
|
||||
|
||||
# Extra check, user can not remove the EE from the organization
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': None})
|
||||
@@ -182,8 +182,14 @@ def test_job_template_creator_access(project, organization, rando, post, setup_m
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_permissions
|
||||
@pytest.mark.parametrize('lacking', ['project', 'inventory'])
|
||||
def test_job_template_insufficient_creator_permissions(lacking, project, inventory, organization, rando, post):
|
||||
@pytest.mark.parametrize(
|
||||
'lacking,reason',
|
||||
[
|
||||
('project', 'You do not have use permission on Project'),
|
||||
('inventory', 'You do not have use permission on Inventory'),
|
||||
],
|
||||
)
|
||||
def test_job_template_insufficient_creator_permissions(lacking, reason, project, inventory, organization, rando, post):
|
||||
if lacking != 'project':
|
||||
project.use_role.members.add(rando)
|
||||
else:
|
||||
@@ -192,12 +198,13 @@ def test_job_template_insufficient_creator_permissions(lacking, project, invento
|
||||
inventory.use_role.members.add(rando)
|
||||
else:
|
||||
inventory.read_role.members.add(rando)
|
||||
post(
|
||||
response = post(
|
||||
url=reverse('api:job_template_list'),
|
||||
data=dict(name='newly-created-jt', inventory=inventory.id, project=project.pk, playbook='helloworld.yml'),
|
||||
user=rando,
|
||||
expect=403,
|
||||
)
|
||||
assert reason in response.data[lacking]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -99,7 +99,9 @@ def test_notification_template_access_org_user(notification_template, user):
|
||||
@pytest.mark.django_db
|
||||
def test_notificaiton_template_orphan_access_org_admin(notification_template, organization, org_admin):
|
||||
notification_template.organization = None
|
||||
notification_template.save(update_fields=['organization'])
|
||||
access = NotificationTemplateAccess(org_admin)
|
||||
assert not org_admin.has_obj_perm(notification_template, 'change')
|
||||
assert not access.can_change(notification_template, {'organization': organization.id})
|
||||
|
||||
|
||||
|
||||
@@ -48,3 +48,17 @@ def test_org_resource_role(ext_auth, organization, rando, org_admin):
|
||||
assert access.can_attach(organization, rando, 'member_role.members') == ext_auth
|
||||
organization.member_role.members.add(rando)
|
||||
assert access.can_unattach(organization, rando, 'member_role.members') == ext_auth
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_org_while_workflow_active(workflow_job_template):
|
||||
'''
|
||||
Delete org while workflow job is active (i.e. changing status)
|
||||
'''
|
||||
assert workflow_job_template.organization # sanity check
|
||||
wj = workflow_job_template.create_unified_job() # status should be new
|
||||
workflow_job_template.organization.delete()
|
||||
wj.refresh_from_db()
|
||||
assert wj.status != 'pending' # sanity check
|
||||
wj.status = 'pending' # status needs to change in order to trigger workflow_job_template.save()
|
||||
wj.save(update_fields=['status'])
|
||||
|
||||
@@ -35,6 +35,13 @@ class TestWorkflowJobTemplateAccess:
|
||||
assert org_member in wfjt.execute_role
|
||||
assert org_member in wfjt.read_role
|
||||
|
||||
def test_non_super_admin_no_add_without_org(self, wfjt, organization, rando):
|
||||
organization.member_role.members.add(rando)
|
||||
wfjt.admin_role.members.add(rando)
|
||||
access = WorkflowJobTemplateAccess(rando, save_messages=True)
|
||||
assert not access.can_add({'name': 'without org'})
|
||||
assert 'An organization is required to create a workflow job template for normal user' in access.messages['organization']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplateNodeAccess:
|
||||
|
||||
@@ -8,9 +8,22 @@ from django.db import connection
|
||||
|
||||
|
||||
@contextmanager
|
||||
def advisory_lock(*args, **kwargs):
|
||||
def advisory_lock(*args, lock_session_timeout_milliseconds=0, **kwargs):
|
||||
if connection.vendor == 'postgresql':
|
||||
cur = None
|
||||
idle_in_transaction_session_timeout = None
|
||||
idle_session_timeout = None
|
||||
if lock_session_timeout_milliseconds > 0:
|
||||
with connection.cursor() as cur:
|
||||
idle_in_transaction_session_timeout = cur.execute('SHOW idle_in_transaction_session_timeout').fetchone()[0]
|
||||
idle_session_timeout = cur.execute('SHOW idle_session_timeout').fetchone()[0]
|
||||
cur.execute(f"SET idle_in_transaction_session_timeout = '{lock_session_timeout_milliseconds}'")
|
||||
cur.execute(f"SET idle_session_timeout = '{lock_session_timeout_milliseconds}'")
|
||||
with django_pglocks_advisory_lock(*args, **kwargs) as internal_lock:
|
||||
yield internal_lock
|
||||
if lock_session_timeout_milliseconds > 0:
|
||||
with connection.cursor() as cur:
|
||||
cur.execute(f"SET idle_in_transaction_session_timeout = '{idle_in_transaction_session_timeout}'")
|
||||
cur.execute(f"SET idle_session_timeout = '{idle_session_timeout}'")
|
||||
else:
|
||||
yield True
|
||||
|
||||
48
awx/main/utils/proxy.py
Normal file
48
awx/main/utils/proxy.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2024 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
|
||||
# DRF
|
||||
from rest_framework.request import Request
|
||||
|
||||
|
||||
"""
|
||||
Note that these methods operate on request.environ. This data is from uwsgi.
|
||||
It is the source data from which request.headers (read-only) is constructed.
|
||||
"""
|
||||
|
||||
|
||||
def is_proxy_in_headers(request: Request, proxy_list: list[str], headers: list[str]) -> bool:
|
||||
"""
|
||||
Determine if the request went through at least one proxy in the list.
|
||||
Example:
|
||||
request.environ = {
|
||||
"HTTP_X_FOO": "8.8.8.8, 192.168.2.1",
|
||||
"REMOTE_ADDR": "192.168.2.1",
|
||||
"REMOTE_HOST": "foobar"
|
||||
}
|
||||
proxy_list = ["192.168.2.1"]
|
||||
headers = ["HTTP_X_FOO", "REMOTE_ADDR", "REMOTE_HOST"]
|
||||
|
||||
The above would return True since 192.168.2.1 is a value for the header HTTP_X_FOO
|
||||
|
||||
request: The DRF/Django request. request.environ dict will be used for searching for proxies
|
||||
proxy_list: A list of known and trusted proxies may be ip or hostnames
|
||||
headers: A list of keys for which to consider values that may contain a proxy
|
||||
"""
|
||||
|
||||
remote_hosts = set()
|
||||
|
||||
for header in headers:
|
||||
for value in request.environ.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
|
||||
return bool(remote_hosts.intersection(set(proxy_list)))
|
||||
|
||||
|
||||
def delete_headers_starting_with_http(request: Request, headers: list[str]):
|
||||
for header in headers:
|
||||
if header.startswith('HTTP_'):
|
||||
request.environ.pop(header, None)
|
||||
@@ -47,7 +47,6 @@ class WebsocketRelayConnection:
|
||||
verify_ssl: bool = settings.BROADCAST_WEBSOCKET_VERIFY_CERT,
|
||||
):
|
||||
self.name = name
|
||||
self.event_loop = asyncio.get_event_loop()
|
||||
self.stats = stats
|
||||
self.remote_host = remote_host
|
||||
self.remote_port = remote_port
|
||||
@@ -110,7 +109,10 @@ class WebsocketRelayConnection:
|
||||
self.stats.record_connection_lost()
|
||||
|
||||
def start(self):
|
||||
self.async_task = self.event_loop.create_task(self.connect())
|
||||
self.async_task = asyncio.get_running_loop().create_task(
|
||||
self.connect(),
|
||||
name=f"WebsocketRelayConnection.connect.{self.name}",
|
||||
)
|
||||
return self.async_task
|
||||
|
||||
def cancel(self):
|
||||
@@ -121,7 +123,10 @@ class WebsocketRelayConnection:
|
||||
# metrics messages
|
||||
# the "metrics" group is not subscribed to in the typical fashion, so we
|
||||
# just explicitly create it
|
||||
producer = self.event_loop.create_task(self.run_producer("metrics", websocket, "metrics"))
|
||||
producer = asyncio.get_running_loop().create_task(
|
||||
self.run_producer("metrics", websocket, "metrics"),
|
||||
name="WebsocketRelayConnection.run_producer.metrics",
|
||||
)
|
||||
self.producers["metrics"] = {"task": producer, "subscriptions": {"metrics"}}
|
||||
async for msg in websocket:
|
||||
self.stats.record_message_received()
|
||||
@@ -143,7 +148,10 @@ class WebsocketRelayConnection:
|
||||
name = f"{self.remote_host}-{group}"
|
||||
origin_channel = payload['origin_channel']
|
||||
if not self.producers.get(name):
|
||||
producer = self.event_loop.create_task(self.run_producer(name, websocket, group))
|
||||
producer = asyncio.get_running_loop().create_task(
|
||||
self.run_producer(name, websocket, group),
|
||||
name=f"WebsocketRelayConnection.run_producer.{name}",
|
||||
)
|
||||
self.producers[name] = {"task": producer, "subscriptions": {origin_channel}}
|
||||
logger.debug(f"Producer {name} started.")
|
||||
else:
|
||||
@@ -297,16 +305,15 @@ class WebSocketRelayManager(object):
|
||||
pass
|
||||
|
||||
async def run(self):
|
||||
event_loop = asyncio.get_running_loop()
|
||||
|
||||
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
|
||||
self.stats_mgr = RelayWebsocketStatsManager(self.local_hostname)
|
||||
self.stats_mgr.start()
|
||||
|
||||
database_conf = deepcopy(settings.DATABASES['default'])
|
||||
database_conf['OPTIONS'] = deepcopy(database_conf.get('OPTIONS', {}))
|
||||
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
database_conf[k] = v
|
||||
if k != 'OPTIONS':
|
||||
database_conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
database_conf['OPTIONS'][k] = v
|
||||
|
||||
@@ -322,7 +329,10 @@ class WebSocketRelayManager(object):
|
||||
)
|
||||
|
||||
await async_conn.set_autocommit(True)
|
||||
on_ws_heartbeat_task = event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
on_ws_heartbeat_task = asyncio.get_running_loop().create_task(
|
||||
self.on_ws_heartbeat(async_conn),
|
||||
name="WebSocketRelayManager.on_ws_heartbeat",
|
||||
)
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
while True:
|
||||
|
||||
@@ -262,6 +262,7 @@ START_TASK_LIMIT = 100
|
||||
# We have the grace period so the task manager can bail out before the timeout.
|
||||
TASK_MANAGER_TIMEOUT = 300
|
||||
TASK_MANAGER_TIMEOUT_GRACE_PERIOD = 60
|
||||
TASK_MANAGER_LOCK_TIMEOUT = TASK_MANAGER_TIMEOUT + TASK_MANAGER_TIMEOUT_GRACE_PERIOD
|
||||
|
||||
# Number of seconds _in addition to_ the task manager timeout a job can stay
|
||||
# in waiting without being reaped
|
||||
@@ -492,6 +493,7 @@ CELERYBEAT_SCHEDULE = {
|
||||
'cleanup_images': {'task': 'awx.main.tasks.system.cleanup_images_and_files', 'schedule': timedelta(hours=3)},
|
||||
'cleanup_host_metrics': {'task': 'awx.main.tasks.host_metrics.cleanup_host_metrics', 'schedule': timedelta(hours=3, minutes=30)},
|
||||
'host_metric_summary_monthly': {'task': 'awx.main.tasks.host_metrics.host_metric_summary_monthly', 'schedule': timedelta(hours=4)},
|
||||
'periodic_resource_sync': {'task': 'awx.main.tasks.system.periodic_resource_sync', 'schedule': timedelta(minutes=15)},
|
||||
}
|
||||
|
||||
# Django Caching Configuration
|
||||
@@ -656,6 +658,10 @@ AWX_ANSIBLE_CALLBACK_PLUGINS = ""
|
||||
# Automatically remove nodes that have missed their heartbeats after some time
|
||||
AWX_AUTO_DEPROVISION_INSTANCES = False
|
||||
|
||||
# If False, do not allow creation of resources that are shared with the platform ingress
|
||||
# e.g. organizations, teams, and users
|
||||
ALLOW_LOCAL_RESOURCE_MANAGEMENT = True
|
||||
|
||||
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
@@ -778,6 +784,11 @@ INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ------------------------
|
||||
# OpenShift Virtualization
|
||||
# ------------------------
|
||||
OPENSHIFT_VIRTUALIZATION_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
@@ -817,7 +828,7 @@ MANAGE_ORGANIZATION_AUTH = True
|
||||
DISABLE_LOCAL_AUTH = False
|
||||
|
||||
# Note: This setting may be overridden by database settings.
|
||||
TOWER_URL_BASE = "https://towerhost"
|
||||
TOWER_URL_BASE = "https://platformhost"
|
||||
|
||||
INSIGHTS_URL_BASE = "https://example.org"
|
||||
INSIGHTS_AGENT_MIME = 'application/example'
|
||||
@@ -998,6 +1009,7 @@ AWX_RUNNER_KEEPALIVE_SECONDS = 0
|
||||
|
||||
# Delete completed work units in receptor
|
||||
RECEPTOR_RELEASE_WORK = True
|
||||
RECPETOR_KEEP_WORK_ON_ERROR = False
|
||||
|
||||
# K8S only. Use receptor_log_level on AWX spec to set this properly
|
||||
RECEPTOR_LOG_LEVEL = 'info'
|
||||
|
||||
3004
awx/sso/conf.py
3004
awx/sso/conf.py
File diff suppressed because it is too large
Load Diff
@@ -64,7 +64,7 @@
|
||||
<div class="col-sm-6">
|
||||
</div>
|
||||
<div class="col-sm-6 footer-copyright">
|
||||
Copyright © 2021 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved.
|
||||
Copyright © 2024 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -59,7 +59,7 @@ function ActivityStream() {
|
||||
{
|
||||
page: 1,
|
||||
page_size: 20,
|
||||
order_by: '-timestamp',
|
||||
order_by: '-id',
|
||||
},
|
||||
['id', 'page', 'page_size']
|
||||
);
|
||||
|
||||
@@ -89,7 +89,7 @@
|
||||
"LC_ALL": "en_US.UTF-8",
|
||||
"MFLAGS": "-w",
|
||||
"OLDPWD": "/awx_devel",
|
||||
"AWX_HOST": "https://towerhost",
|
||||
"AWX_HOST": "https://platformhost",
|
||||
"HOSTNAME": "awx",
|
||||
"LANGUAGE": "en_US:en",
|
||||
"SDB_HOST": "0.0.0.0",
|
||||
|
||||
@@ -56,6 +56,10 @@ describe('<InventorySourceAdd />', () => {
|
||||
['satellite6', 'Red Hat Satellite 6'],
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
[
|
||||
'openshift_virtualization',
|
||||
'Red Hat OpenShift Virtualization',
|
||||
],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
],
|
||||
},
|
||||
|
||||
@@ -23,6 +23,8 @@ const ansibleDocUrls = {
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/main/docs/cloud.terraform.terraform_state_inventory.rst',
|
||||
openshift_virtualization:
|
||||
'https://kubevirt.io/kubevirt.core/latest/plugins/kubevirt.html',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -121,7 +123,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
@@ -140,7 +142,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
OpenShiftVirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
|
||||
const buildSourceChoiceOptions = (options) => {
|
||||
@@ -231,6 +232,15 @@ const InventorySourceFormFields = ({
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
openshift_virtualization: (
|
||||
<OpenShiftVirtualizationSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id ||
|
||||
source?.source !== 'openshift_virtualization'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
}[sourceField.value]
|
||||
}
|
||||
</FormColumnLayout>
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const OpenShiftVirtualizationSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="kubernetes_bearer_token"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(
|
||||
docsBaseUrl,
|
||||
'openshift_virtualization'
|
||||
)}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default OpenShiftVirtualizationSubForm;
|
||||
@@ -0,0 +1,65 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import VirtualizationSubForm from './VirtualizationSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
describe('<VirtualizationSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<VirtualizationSubForm />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'rhv',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -9,3 +9,4 @@ export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
export { default as OpenShiftVirtualizationSubForm } from './OpenShiftVirtualizationSubForm';
|
||||
|
||||
@@ -89,7 +89,7 @@
|
||||
"LC_ALL": "en_US.UTF-8",
|
||||
"MFLAGS": "-w",
|
||||
"OLDPWD": "/awx_devel",
|
||||
"AWX_HOST": "https://towerhost",
|
||||
"AWX_HOST": "https://platformhost",
|
||||
"HOSTNAME": "awx",
|
||||
"LANGUAGE": "en_US:en",
|
||||
"SDB_HOST": "0.0.0.0",
|
||||
|
||||
@@ -164,7 +164,7 @@
|
||||
"ANSIBLE_RETRY_FILES_ENABLED": "False",
|
||||
"MAX_EVENT_RES": "700000",
|
||||
"ANSIBLE_CALLBACK_PLUGINS": "/awx_devel/awx/plugins/callback",
|
||||
"AWX_HOST": "https://towerhost",
|
||||
"AWX_HOST": "https://platformhost",
|
||||
"ANSIBLE_SSH_CONTROL_PATH_DIR": "/tmp/awx_2_a4b1afiw/cp",
|
||||
"ANSIBLE_STDOUT_CALLBACK": "awx_display"
|
||||
},
|
||||
|
||||
@@ -120,7 +120,7 @@ function NotificationTemplatesList() {
|
||||
toolbarSearchColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -16,7 +16,7 @@ describe('<AzureAD />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/azuread-oauth2/',
|
||||
'https://platformhost/sso/complete/azuread-oauth2/',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {},
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<AzureADDetail />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/azuread-oauth2/',
|
||||
'https://platformhost/sso/complete/azuread-oauth2/',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {},
|
||||
@@ -62,7 +62,7 @@ describe('<AzureADDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'Azure AD OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/azuread-oauth2/'
|
||||
'https://platformhost/sso/complete/azuread-oauth2/'
|
||||
);
|
||||
assertDetail(wrapper, 'Azure AD OAuth2 Key', 'mock key');
|
||||
assertDetail(wrapper, 'Azure AD OAuth2 Secret', 'Encrypted');
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<AzureADEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/azuread-oauth2/',
|
||||
'https://platformhost/sso/complete/azuread-oauth2/',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {},
|
||||
|
||||
@@ -19,7 +19,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github/',
|
||||
'https://platformhost/sso/complete/github/',
|
||||
SOCIAL_AUTH_GITHUB_KEY: 'mock github key',
|
||||
SOCIAL_AUTH_GITHUB_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP: null,
|
||||
@@ -29,7 +29,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-org/',
|
||||
'https://platformhost/sso/complete/github-org/',
|
||||
SOCIAL_AUTH_GITHUB_ORG_KEY: '',
|
||||
SOCIAL_AUTH_GITHUB_ORG_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORG_NAME: '',
|
||||
@@ -40,7 +40,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-team/',
|
||||
'https://platformhost/sso/complete/github-team/',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_KEY: 'OAuth2 key (Client ID)',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_ID: 'team_id',
|
||||
@@ -51,7 +51,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise/',
|
||||
'https://platformhost/sso/complete/github-enterprise/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_URL: 'https://localhost/url',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL: 'https://localhost/apiurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY: 'ent_key',
|
||||
@@ -63,7 +63,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-org/',
|
||||
'https://platformhost/sso/complete/github-enterprise-org/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL: 'https://localhost/url',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL: 'https://localhost/apiurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY: 'ent_org_key',
|
||||
@@ -76,7 +76,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-team/',
|
||||
'https://platformhost/sso/complete/github-enterprise-team/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL: 'https://localhost/url',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL: 'https://localhost/apiurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY: 'ent_team_key',
|
||||
|
||||
@@ -22,7 +22,8 @@ jest.mock('../../../../api');
|
||||
|
||||
const mockDefault = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_CALLBACK_URL: 'https://towerhost/sso/complete/github/',
|
||||
SOCIAL_AUTH_GITHUB_CALLBACK_URL:
|
||||
'https://platformhost/sso/complete/github/',
|
||||
SOCIAL_AUTH_GITHUB_KEY: 'mock github key',
|
||||
SOCIAL_AUTH_GITHUB_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP: null,
|
||||
@@ -32,7 +33,7 @@ const mockDefault = {
|
||||
const mockOrg = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-org/',
|
||||
'https://platformhost/sso/complete/github-org/',
|
||||
SOCIAL_AUTH_GITHUB_ORG_KEY: '',
|
||||
SOCIAL_AUTH_GITHUB_ORG_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORG_NAME: '',
|
||||
@@ -43,7 +44,7 @@ const mockOrg = {
|
||||
const mockTeam = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-team/',
|
||||
'https://platformhost/sso/complete/github-team/',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_KEY: 'OAuth2 key (Client ID)',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_ID: 'team_id',
|
||||
@@ -54,7 +55,7 @@ const mockTeam = {
|
||||
const mockEnterprise = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise/',
|
||||
'https://platformhost/sso/complete/github-enterprise/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_URL: 'https://localhost/enterpriseurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL: 'https://localhost/enterpriseapi',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY: 'foobar',
|
||||
@@ -66,7 +67,7 @@ const mockEnterprise = {
|
||||
const mockEnterpriseOrg = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-org/',
|
||||
'https://platformhost/sso/complete/github-enterprise-org/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL: 'https://localhost/orgurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL: 'https://localhost/orgapi',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY: 'foobar',
|
||||
@@ -79,7 +80,7 @@ const mockEnterpriseOrg = {
|
||||
const mockEnterpriseTeam = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-team/',
|
||||
'https://platformhost/sso/complete/github-enterprise-team/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL: 'https://localhost/teamurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL: 'https://localhost/teamapi',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY: 'foobar',
|
||||
@@ -143,7 +144,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github/'
|
||||
'https://platformhost/sso/complete/github/'
|
||||
);
|
||||
assertDetail(wrapper, 'GitHub OAuth2 Key', 'mock github key');
|
||||
assertDetail(wrapper, 'GitHub OAuth2 Secret', 'Encrypted');
|
||||
@@ -218,7 +219,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Organization OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-org/'
|
||||
'https://platformhost/sso/complete/github-org/'
|
||||
);
|
||||
assertDetail(wrapper, 'GitHub Organization OAuth2 Key', 'Not configured');
|
||||
assertDetail(wrapper, 'GitHub Organization OAuth2 Secret', 'Encrypted');
|
||||
@@ -269,7 +270,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Team OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-team/'
|
||||
'https://platformhost/sso/complete/github-team/'
|
||||
);
|
||||
assertDetail(wrapper, 'GitHub Team OAuth2 Key', 'OAuth2 key (Client ID)');
|
||||
assertDetail(wrapper, 'GitHub Team OAuth2 Secret', 'Encrypted');
|
||||
@@ -316,7 +317,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Enterprise OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-enterprise/'
|
||||
'https://platformhost/sso/complete/github-enterprise/'
|
||||
);
|
||||
assertDetail(
|
||||
wrapper,
|
||||
@@ -343,7 +344,7 @@ describe('<GitHubDetail />', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enterprise Org', () => {
|
||||
describe('Enterprise Organization', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeAll(async () => {
|
||||
@@ -376,7 +377,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Enterprise Organization OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-enterprise-org/'
|
||||
'https://platformhost/sso/complete/github-enterprise-org/'
|
||||
);
|
||||
assertDetail(
|
||||
wrapper,
|
||||
@@ -445,7 +446,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Enterprise Team OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-enterprise-team/'
|
||||
'https://platformhost/sso/complete/github-enterprise-team/'
|
||||
);
|
||||
assertDetail(
|
||||
wrapper,
|
||||
@@ -476,23 +477,4 @@ describe('<GitHubDetail />', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Redirect', () => {
|
||||
test('should render redirect when user navigates to erroneous category', async () => {
|
||||
let wrapper;
|
||||
useRouteMatch.mockImplementation(() => ({
|
||||
url: '/settings/github/foo/details',
|
||||
path: '/settings/github/:category/details',
|
||||
params: { category: 'foo' },
|
||||
}));
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<SettingsProvider value={mockAllOptions.actions}>
|
||||
<GitHubDetail />
|
||||
</SettingsProvider>
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'Redirect');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubEnterpriseEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise/',
|
||||
'https://platformhost/sso/complete/github-enterprise/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubEnterpriseOrgEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-org/',
|
||||
'https://platformhost/sso/complete/github-enterprise-org/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubEnterpriseTeamEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-team/',
|
||||
'https://platformhost/sso/complete/github-enterprise-team/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubOrgEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-org/',
|
||||
'https://platformhost/sso/complete/github-org/',
|
||||
SOCIAL_AUTH_GITHUB_ORG_KEY: '',
|
||||
SOCIAL_AUTH_GITHUB_ORG_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORG_NAME: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubTeamEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-team/',
|
||||
'https://platformhost/sso/complete/github-team/',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_KEY: 'OAuth2 key (Client ID)',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_ID: 'team_id',
|
||||
|
||||
@@ -16,7 +16,7 @@ describe('<GoogleOAuth2 />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/google-oauth2/',
|
||||
'https://platformhost/sso/complete/google-oauth2/',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: [
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GoogleOAuth2Detail />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/google-oauth2/',
|
||||
'https://platformhost/sso/complete/google-oauth2/',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: [
|
||||
@@ -68,7 +68,7 @@ describe('<GoogleOAuth2Detail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'Google OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/google-oauth2/'
|
||||
'https://platformhost/sso/complete/google-oauth2/'
|
||||
);
|
||||
assertDetail(wrapper, 'Google OAuth2 Key', 'mock key');
|
||||
assertDetail(wrapper, 'Google OAuth2 Secret', 'Encrypted');
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GoogleOAuth2Edit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/google-oauth2/',
|
||||
'https://platformhost/sso/complete/google-oauth2/',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: [
|
||||
|
||||
@@ -26,7 +26,7 @@ describe('<MiscSystemDetail />', () => {
|
||||
ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC: false,
|
||||
ORG_ADMINS_CAN_SEE_ALL_USERS: true,
|
||||
MANAGE_ORGANIZATION_AUTH: true,
|
||||
TOWER_URL_BASE: 'https://towerhost',
|
||||
TOWER_URL_BASE: 'https://platformhost',
|
||||
REMOTE_HOST_HEADERS: [],
|
||||
PROXY_IP_ALLOWED_LIST: [],
|
||||
CSRF_TRUSTED_ORIGINS: [],
|
||||
@@ -94,7 +94,7 @@ describe('<MiscSystemDetail />', () => {
|
||||
'Automation Analytics upload URL',
|
||||
'https://example.com'
|
||||
);
|
||||
assertDetail(wrapper, 'Base URL of the service', 'https://towerhost');
|
||||
assertDetail(wrapper, 'Base URL of the service', 'https://platformhost');
|
||||
assertDetail(wrapper, 'Gather data for Automation Analytics', 'Off');
|
||||
assertDetail(
|
||||
wrapper,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user