Compare commits

..

1 Commits

Author SHA1 Message Date
David O Neill
e0acd9b111 Change failing PR to draft 2024-02-12 16:15:47 +00:00
474 changed files with 4763 additions and 14797 deletions

View File

@@ -11,12 +11,6 @@ runs:
shell: bash shell: bash
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Set lower case owner name
shell: bash
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
env:
OWNER: '${{ github.repository_owner }}'
- name: Log in to registry - name: Log in to registry
shell: bash shell: bash
run: | run: |
@@ -24,11 +18,11 @@ runs:
- name: Pre-pull latest devel image to warm cache - name: Pre-pull latest devel image to warm cache
shell: bash shell: bash
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }} run: docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
- name: Build image for current source checkout - name: Build image for current source checkout
shell: bash shell: bash
run: | run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \ DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
COMPOSE_TAG=${{ github.base_ref }} \ COMPOSE_TAG=${{ github.base_ref }} \
make docker-compose-build make docker-compose-build

View File

@@ -35,7 +35,7 @@ runs:
- name: Start AWX - name: Start AWX
shell: bash shell: bash
run: | run: |
DEV_DOCKER_OWNER=${{ github.repository_owner }} \ DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
COMPOSE_TAG=${{ github.base_ref }} \ COMPOSE_TAG=${{ github.base_ref }} \
COMPOSE_UP_OPTS="-d" \ COMPOSE_UP_OPTS="-d" \
make docker-compose make docker-compose
@@ -57,11 +57,21 @@ runs:
awx-manage update_password --username=admin --password=password awx-manage update_password --username=admin --password=password
EOSH EOSH
- name: Build UI
# This must be a string comparison in composite actions:
# https://github.com/actions/runner/issues/2238
if: ${{ inputs.build-ui == 'true' }}
shell: bash
run: |
docker exec -i tools_awx_1 sh <<-EOSH
make ui-devel
EOSH
- name: Get instance data - name: Get instance data
id: data id: data
shell: bash shell: bash
run: | run: |
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1) AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks._sources_awx.IPAddress}}' tools_awx_1)
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin) ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT

View File

@@ -15,4 +15,5 @@
"dependencies": "dependencies":
- any: ["awx/ui/package.json"] - any: ["awx/ui/package.json"]
- any: ["requirements/*"] - any: ["requirements/*.txt"]
- any: ["requirements/requirements.in"]

View File

@@ -1,7 +1,7 @@
## General ## General
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html - For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question? - Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html - You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html

View File

@@ -38,9 +38,7 @@ jobs:
- name: ui-test-general - name: ui-test-general
command: make ui-test-general command: make ui-test-general
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- name: Build awx_devel image for running checks - name: Build awx_devel image for running checks
uses: ./.github/actions/awx_devel_image uses: ./.github/actions/awx_devel_image
@@ -54,9 +52,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- uses: ./.github/actions/run_awx_devel - uses: ./.github/actions/run_awx_devel
id: awx id: awx
@@ -70,19 +66,15 @@ jobs:
awx-operator: awx-operator:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 60 timeout-minutes: 60
env:
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
steps: steps:
- name: Checkout awx - name: Checkout awx
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
show-progress: false
path: awx path: awx
- name: Checkout awx-operator - name: Checkout awx-operator
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
show-progress: false\
repository: ansible/awx-operator repository: ansible/awx-operator
path: awx-operator path: awx-operator
@@ -102,11 +94,11 @@ jobs:
- name: Build AWX image - name: Build AWX image
working-directory: awx working-directory: awx
run: | run: |
VERSION=`make version-for-buildyml` make awx-kube-build ansible-playbook -v tools/ansible/build.yml \
env: -e headless=yes \
COMPOSE_TAG: ci -e awx_image=awx \
DEV_DOCKER_TAG_BASE: local -e awx_image_tag=ci \
HEADLESS: yes -e ansible_python_interpreter=$(which python3)
- name: Run test deployment with awx-operator - name: Run test deployment with awx-operator
working-directory: awx-operator working-directory: awx-operator
@@ -115,19 +107,10 @@ jobs:
ansible-galaxy collection install -r molecule/requirements.yml ansible-galaxy collection install -r molecule/requirements.yml
sudo rm -f $(which kustomize) sudo rm -f $(which kustomize)
make kustomize make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
env: env:
AWX_TEST_IMAGE: local/awx AWX_TEST_IMAGE: awx
AWX_TEST_VERSION: ci AWX_TEST_VERSION: ci
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
STORE_DEBUG_OUTPUT: true
- name: Upload debug output
if: failure()
uses: actions/upload-artifact@v3
with:
name: awx-operator-debug-output
path: ${{ env.DEBUG_OUTPUT_DIR }}
collection-sanity: collection-sanity:
name: awx_collection sanity name: awx_collection sanity
@@ -136,9 +119,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version. # The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
- name: Upgrade ansible-core - name: Upgrade ansible-core
@@ -146,6 +127,10 @@ jobs:
- name: Run sanity tests - name: Run sanity tests
run: make test_collection_sanity run: make test_collection_sanity
env:
# needed due to cgroupsv2. This is fixed, but a stable release
# with the fix has not been made yet.
ANSIBLE_TEST_PREFER_PODMAN: 1
collection-integration: collection-integration:
name: awx_collection integration name: awx_collection integration
@@ -162,9 +147,7 @@ jobs:
- name: r-z0-9 - name: r-z0-9
regex: ^[r-z0-9] regex: ^[r-z0-9]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- uses: ./.github/actions/run_awx_devel - uses: ./.github/actions/run_awx_devel
id: awx id: awx
@@ -210,9 +193,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- name: Upgrade ansible-core - name: Upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core run: python3 -m pip install --upgrade ansible-core

View File

@@ -1,57 +0,0 @@
---
name: django-ansible-base requirements update
on:
workflow_dispatch:
schedule:
- cron: '0 6 * * *' # once an day @ 6 AM
permissions:
pull-requests: write
contents: write
jobs:
dab-pin-newest:
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
runs-on: ubuntu-latest
steps:
- id: dab-release
name: Get current django-ansible-base release version
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
with:
owner: ansible
repo: django-ansible-base
excludes: prerelease, draft
- name: Check out respository code
uses: actions/checkout@v4
- id: dab-pinned
name: Get current django-ansible-base pinned version
run:
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
- name: Update django-ansible-base pinned version to upstream release
run:
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
- name: Create Pull Request
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
with:
base: devel
branch: bump-django-ansible-base
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
body: |
##### SUMMARY
Automated .github/workflows/dab-release.yml
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
##### ISSUE TYPE
- Bug, Docs Fix or other nominal change
##### COMPONENT NAME
- API
commit-message: |
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
add-paths:
requirements/requirements_git.txt

View File

@@ -2,54 +2,29 @@
name: Build/Push Development Images name: Build/Push Development Images
env: env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
on: on:
workflow_dispatch:
push: push:
branches: branches:
- devel - devel
- release_* - release_*
- feature_* - feature_*
jobs: jobs:
push-development-images: push:
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 120 timeout-minutes: 60
permissions: permissions:
packages: write packages: write
contents: read contents: read
strategy:
fail-fast: false
matrix:
build-targets:
- image-name: awx_devel
make-target: docker-compose-buildx
- image-name: awx_kube_devel
make-target: awx-kube-dev-buildx
- image-name: awx
make-target: awx-kube-buildx
steps: steps:
- uses: actions/checkout@v3
- name: Skipping build of awx image for non-awx repository - name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Set lower case owner name
run: | run: |
echo "Skipping build of awx image for non-awx repository" echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
exit 0
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
- uses: actions/checkout@v4
with:
show-progress: false
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set GITHUB_ENV variables
run: |
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
env: env:
OWNER: '${{ github.repository_owner }}' OWNER: '${{ github.repository_owner }}'
@@ -62,17 +37,23 @@ jobs:
run: | run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Setup node and npm for the new UI build - name: Pre-pull image to warm build cache
uses: actions/setup-node@v2
with:
node-version: '18'
if: matrix.build-targets.image-name == 'awx'
- name: Prebuild new UI for awx image (to speed up build process)
run: | run: |
make ui-next docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
if: matrix.build-targets.image-name == 'awx' docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
- name: Build and push AWX devel images - name: Build images
run: | run: |
make ${{ matrix.build-targets.make-target }} DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
- name: Push development images
run: |
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
- name: Push AWX k8s image, only for upstream and feature branches
run: docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
if: endsWith(github.repository, '/awx')

View File

@@ -8,9 +8,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30 timeout-minutes: 30
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- name: install tox - name: install tox
run: pip install tox run: pip install tox

75
.github/workflows/e2e_test.yml vendored Normal file
View File

@@ -0,0 +1,75 @@
---
name: E2E Tests
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on:
pull_request_target:
types: [labeled]
jobs:
e2e-test:
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
runs-on: ubuntu-latest
timeout-minutes: 40
permissions:
packages: write
contents: read
strategy:
matrix:
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/run_awx_devel
id: awx
with:
build-ui: true
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Pull awx_cypress_base image
run: |
docker pull quay.io/awx/awx_cypress_base:latest
- name: Checkout test project
uses: actions/checkout@v3
with:
repository: ${{ github.repository_owner }}/tower-qa
ssh-key: ${{ secrets.QA_REPO_KEY }}
path: tower-qa
ref: devel
- name: Build cypress
run: |
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
docker build -t awx-pf-tests .
- name: Run E2E tests
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
run: |
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
export COMMIT_INFO_SHA=$GITHUB_SHA
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
AWX_IP=${{ steps.awx.outputs.ip }}
printenv > .env
echo "Executing tests:"
docker run \
--network '_sources_default' \
--ipc=host \
--env-file=.env \
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
-e CYPRESS_AWX_E2E_USERNAME=admin \
-e CYPRESS_AWX_E2E_PASSWORD='password' \
-e COMMAND="npm run cypress-concurrently-gha" \
-v /dev/shm:/dev/shm \
-v $PWD:/e2e \
-w /e2e \
awx-pf-tests run --project .
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
with:
log-filename: e2e-${{ matrix.job }}.log

View File

@@ -2,10 +2,12 @@
name: Feature branch deletion cleanup name: Feature branch deletion cleanup
env: env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: delete on:
delete:
branches:
- feature_**
jobs: jobs:
branch_delete: push:
if: ${{ github.event.ref_type == 'branch' && startsWith(github.event.ref, 'feature_') }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20 timeout-minutes: 20
permissions: permissions:
@@ -20,4 +22,6 @@ jobs:
run: | run: |
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}" ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \ ansible localhost -c local -m aws_s3 \
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read" -a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"

View File

@@ -30,10 +30,7 @@ jobs:
timeout-minutes: 20 timeout-minutes: 20
name: Label Issue - Community name: Label Issue - Community
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- uses: actions/setup-python@v4 - uses: actions/setup-python@v4
- name: Install python requests - name: Install python requests
run: pip install requests run: pip install requests

View File

@@ -24,15 +24,44 @@ jobs:
repo-token: "${{ secrets.GITHUB_TOKEN }}" repo-token: "${{ secrets.GITHUB_TOKEN }}"
configuration-path: .github/pr_labeler.yml configuration-path: .github/pr_labeler.yml
convert-to-draft:
runs-on: ubuntu-latest
name: Change failing PRS to draft
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 14
- name: Install dependencies
run: npm install -g github
- name: Check CI status
id: check-ci
run: |
status=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/${{ github.repository }}/commits/${{ github.sha }}/check-suites | \
jq -r '.check_suites[0].conclusion')
echo "CI Status: $status"
echo "::set-output name=ci_status::$status"
- name: Convert to Draft on CI Failure
if: steps.check-ci.outputs.ci_status == 'failure'
run: gh pr edit ${{ github.event.number }} --draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
community: community:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20 timeout-minutes: 20
name: Label PR - Community name: Label PR - Community
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- uses: actions/setup-python@v4 - uses: actions/setup-python@v4
- name: Install python requests - name: Install python requests
run: pip install requests run: pip install requests

View File

@@ -7,11 +7,7 @@ env:
on: on:
release: release:
types: [published] types: [published]
workflow_dispatch:
inputs:
tag_name:
description: 'Name for the tag of the release.'
required: true
permissions: permissions:
contents: read # to fetch code (actions/checkout) contents: read # to fetch code (actions/checkout)
@@ -21,20 +17,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 90 timeout-minutes: 90
steps: steps:
- name: Set GitHub Env vars for workflow_dispatch event
if: ${{ github.event_name == 'workflow_dispatch' }}
run: |
echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV
- name: Set GitHub Env vars if release event
if: ${{ github.event_name == 'release' }}
run: |
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
- name: Checkout awx - name: Checkout awx
uses: actions/checkout@v4 uses: actions/checkout@v3
with:
show-progress: false
- name: Get python version from Makefile - name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
@@ -59,21 +43,16 @@ jobs:
- name: Build collection and publish to galaxy - name: Build collection and publish to galaxy
env: env:
COLLECTION_NAMESPACE: ${{ env.collection_namespace }} COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
COLLECTION_VERSION: ${{ env.TAG_NAME }} COLLECTION_VERSION: ${{ github.event.release.tag_name }}
COLLECTION_TEMPLATE_VERSION: true COLLECTION_TEMPLATE_VERSION: true
run: | run: |
sudo apt-get install jq
make build_collection make build_collection
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count) if [ "$(curl -L --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
if [[ "$count" == "1" ]]; then echo "Galaxy release already done"; \
echo "Galaxy release already done"; else \
elif [[ "$count" == "0" ]]; then
ansible-galaxy collection publish \ ansible-galaxy collection publish \
--token=${{ secrets.GALAXY_TOKEN }} \ --token=${{ secrets.GALAXY_TOKEN }} \
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz; awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
else
echo "Unexpected count from galaxy search: $count";
exit 1;
fi fi
- name: Set official pypi info - name: Set official pypi info
@@ -85,8 +64,6 @@ jobs:
if: ${{ github.repository_owner != 'ansible' }} if: ${{ github.repository_owner != 'ansible' }}
- name: Build awxkit and upload to pypi - name: Build awxkit and upload to pypi
env:
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.TAG_NAME }}
run: | run: |
git reset --hard git reset --hard
cd awxkit && python3 setup.py sdist bdist_wheel cd awxkit && python3 setup.py sdist bdist_wheel
@@ -106,15 +83,11 @@ jobs:
- name: Re-tag and promote awx image - name: Re-tag and promote awx image
run: | run: |
docker buildx imagetools create \ docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \ docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
--tag quay.io/${{ github.repository }}:${{ env.TAG_NAME }} docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
docker buildx imagetools create \ docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \ docker push quay.io/${{ github.repository }}:latest
--tag quay.io/${{ github.repository }}:latest docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
- name: Re-tag and promote awx-ee image docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
run: |
docker buildx imagetools create \
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }} \
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }}

View File

@@ -45,27 +45,11 @@ jobs:
exit 0 exit 0
- name: Checkout awx - name: Checkout awx
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
show-progress: false
path: awx path: awx
- name: Checkout awx-operator
uses: actions/checkout@v4
with:
show-progress: false
repository: ${{ github.repository_owner }}/awx-operator
path: awx-operator
- name: Checkout awx-logos
uses: actions/checkout@v4
with:
show-progress: false
repository: ansible/awx-logos
path: awx-logos
- name: Get python version from Makefile - name: Get python version from Makefile
working-directory: awx
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }} - name: Install python ${{ env.py_version }}
@@ -73,72 +57,56 @@ jobs:
with: with:
python-version: ${{ env.py_version }} python-version: ${{ env.py_version }}
- name: Checkout awx-logos
uses: actions/checkout@v3
with:
repository: ansible/awx-logos
path: awx-logos
- name: Checkout awx-operator
uses: actions/checkout@v3
with:
repository: ${{ github.repository_owner }}/awx-operator
path: awx-operator
- name: Install playbook dependencies - name: Install playbook dependencies
run: | run: |
python3 -m pip install docker python3 -m pip install docker
- name: Log into registry ghcr.io
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Copy logos for inclusion in sdist for official build
working-directory: awx
run: |
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
- name: Setup node and npm for new UI build
uses: actions/setup-node@v2
with:
node-version: '18'
- name: Prebuild new UI for awx image (to speed up build process)
working-directory: awx
run: make ui-next
- name: Set build env variables
run: |
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
env:
OWNER: ${{ github.repository_owner }}
- name: Build and stage AWX - name: Build and stage AWX
working-directory: awx working-directory: awx
env:
DOCKER_BUILDX_PUSH: true
HEADLESS: false
PLATFORMS: linux/amd64,linux/arm64
run: | run: |
make awx-kube-buildx ansible-playbook -v tools/ansible/build.yml \
-e registry=ghcr.io \
-e registry_username=${{ github.actor }} \
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
-e awx_image=${{ github.repository }} \
-e awx_version=${{ github.event.inputs.version }} \
-e ansible_python_interpreter=$(which python3) \
-e push=yes \
-e awx_official=yes
- name: Log in to GHCR
run: |
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Log in to Quay
run: |
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
- name: tag awx-ee:latest with version input - name: tag awx-ee:latest with version input
run: | run: |
docker buildx imagetools create \ docker pull quay.io/ansible/awx-ee:latest
quay.io/ansible/awx-ee:latest \ docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
--tag ${AWX_EE_TEST_IMAGE} docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
- name: Stage awx-operator image - name: Build and stage awx-operator
working-directory: awx-operator working-directory: awx-operator
run: | run: |
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \ BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }} \
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \ --build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
IMG=${AWX_OPERATOR_TEST_IMAGE} \ IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \
make docker-buildx VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push
- name: Pulling images for test deployment with awx-operator
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
run: |
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
docker pull -q ${AWX_EE_TEST_IMAGE}
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
- name: Run test deployment with awx-operator - name: Run test deployment with awx-operator
working-directory: awx-operator working-directory: awx-operator
@@ -148,6 +116,10 @@ jobs:
sudo rm -f $(which kustomize) sudo rm -f $(which kustomize)
make kustomize make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
env:
AWX_TEST_IMAGE: ${{ github.repository }}
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
- name: Create draft release for AWX - name: Create draft release for AWX
working-directory: awx working-directory: awx

View File

@@ -13,9 +13,7 @@ jobs:
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@v4 uses: actions/checkout@v3
with:
show-progress: false
- name: Update PR Body - name: Update PR Body
env: env:

View File

@@ -18,9 +18,7 @@ jobs:
packages: write packages: write
contents: read contents: read
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with:
show-progress: false
- name: Get python version from Makefile - name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
@@ -36,7 +34,7 @@ jobs:
- name: Pre-pull image to warm build cache - name: Pre-pull image to warm build cache
run: | run: |
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || : docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
- name: Build image - name: Build image
run: | run: |

8
.gitignore vendored
View File

@@ -46,11 +46,6 @@ tools/docker-compose/overrides/
tools/docker-compose-minikube/_sources tools/docker-compose-minikube/_sources
tools/docker-compose/keycloak.awx.realm.json tools/docker-compose/keycloak.awx.realm.json
!tools/docker-compose/editable_dependencies
tools/docker-compose/editable_dependencies/*
!tools/docker-compose/editable_dependencies/README.md
!tools/docker-compose/editable_dependencies/install.sh
# Tower setup playbook testing # Tower setup playbook testing
setup/test/roles/postgresql setup/test/roles/postgresql
**/provision_docker **/provision_docker
@@ -174,6 +169,3 @@ awx/ui_next/build
# Docs build stuff # Docs build stuff
docs/docsite/build/ docs/docsite/build/
_readthedocs/ _readthedocs/
# Pyenv
.python-version

113
.vscode/launch.json vendored
View File

@@ -1,113 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "run_ws_heartbeat",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_ws_heartbeat"],
"django": true,
"preLaunchTask": "stop awx-ws-heartbeat",
"postDebugTask": "start awx-ws-heartbeat"
},
{
"name": "run_cache_clear",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_cache_clear"],
"django": true,
"preLaunchTask": "stop awx-cache-clear",
"postDebugTask": "start awx-cache-clear"
},
{
"name": "run_callback_receiver",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_callback_receiver"],
"django": true,
"preLaunchTask": "stop awx-receiver",
"postDebugTask": "start awx-receiver"
},
{
"name": "run_dispatcher",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_dispatcher"],
"django": true,
"preLaunchTask": "stop awx-dispatcher",
"postDebugTask": "start awx-dispatcher"
},
{
"name": "run_rsyslog_configurer",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_rsyslog_configurer"],
"django": true,
"preLaunchTask": "stop awx-rsyslog-configurer",
"postDebugTask": "start awx-rsyslog-configurer"
},
{
"name": "run_cache_clear",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_cache_clear"],
"django": true,
"preLaunchTask": "stop awx-cache-clear",
"postDebugTask": "start awx-cache-clear"
},
{
"name": "run_wsrelay",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_wsrelay"],
"django": true,
"preLaunchTask": "stop awx-wsrelay",
"postDebugTask": "start awx-wsrelay"
},
{
"name": "daphne",
"type": "debugpy",
"request": "launch",
"program": "/var/lib/awx/venv/awx/bin/daphne",
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
"django": true,
"preLaunchTask": "stop awx-daphne",
"postDebugTask": "start awx-daphne"
},
{
"name": "runserver(uwsgi alternative)",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["runserver", "127.0.0.1:8052"],
"django": true,
"preLaunchTask": "stop awx-uwsgi",
"postDebugTask": "start awx-uwsgi"
},
{
"name": "runserver_plus(uwsgi alternative)",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["runserver_plus", "127.0.0.1:8052"],
"django": true,
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
"postDebugTask": "start awx-uwsgi"
},
{
"name": "shell_plus",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["shell_plus"],
"django": true,
},
]
}

100
.vscode/tasks.json vendored
View File

@@ -1,100 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "start awx-cache-clear",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-cache-clear"
},
{
"label": "stop awx-cache-clear",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-cache-clear"
},
{
"label": "start awx-daphne",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-daphne"
},
{
"label": "stop awx-daphne",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-daphne"
},
{
"label": "start awx-dispatcher",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-dispatcher"
},
{
"label": "stop awx-dispatcher",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-dispatcher"
},
{
"label": "start awx-receiver",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-receiver"
},
{
"label": "stop awx-receiver",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-receiver"
},
{
"label": "start awx-rsyslog-configurer",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
},
{
"label": "stop awx-rsyslog-configurer",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
},
{
"label": "start awx-rsyslogd",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-rsyslogd"
},
{
"label": "stop awx-rsyslogd",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
},
{
"label": "start awx-uwsgi",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-uwsgi"
},
{
"label": "stop awx-uwsgi",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-uwsgi"
},
{
"label": "stop awx-uwsgi and install Werkzeug",
"type": "shell",
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
},
{
"label": "start awx-ws-heartbeat",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
},
{
"label": "stop awx-ws-heartbeat",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
},
{
"label": "start awx-wsrelay",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-wsrelay"
},
{
"label": "stop awx-wsrelay",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-wsrelay"
}
]
}

View File

@@ -11,8 +11,6 @@ ignore: |
# django template files # django template files
awx/api/templates/instance_install_bundle/** awx/api/templates/instance_install_bundle/**
.readthedocs.yaml .readthedocs.yaml
tools/loki
tools/otel
extends: default extends: default

View File

@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
#### Frontend Development #### Frontend Development
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md). See [the ui development documentation](awx/ui/CONTRIBUTING.md).
#### Fork and clone the AWX repo #### Fork and clone the AWX repo
@@ -121,7 +121,7 @@ If it has someone assigned to it then that person is the person responsible for
**NOTES** **NOTES**
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request. > Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project). > If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
@@ -132,7 +132,7 @@ If it has someone assigned to it then that person is the person responsible for
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language. At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution. If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
## Submitting Pull Requests ## Submitting Pull Requests
@@ -161,7 +161,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR. When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
## Reporting Issues ## Reporting Issues
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md). We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
## Getting Help ## Getting Help

View File

@@ -80,7 +80,7 @@ If any of those items are missing your pull request will still get the `needs_tr
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc... Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
These labels are determined by the template data. Please use the template and fill it out as accurately as possible. These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
The `state:needs_triage` label will remain on your pull request until a person has looked at it. The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request. You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
The comment will look something like `CC @matburt @wwitzel3 ...`. The comment will look something like `CC @matburt @wwitzel3 ...`.

134
Makefile
View File

@@ -1,8 +1,8 @@
-include awx/ui_next/Makefile -include awx/ui_next/Makefile
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q)) PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
SHELL := bash SHELL := bash
DOCKER_COMPOSE ?= docker compose DOCKER_COMPOSE ?= docker-compose
OFFICIAL ?= no OFFICIAL ?= no
NODE ?= node NODE ?= node
NPM_BIN ?= npm NPM_BIN ?= npm
@@ -10,7 +10,7 @@ KIND_BIN ?= $(shell which kind)
CHROMIUM_BIN=/tmp/chrome-linux/chrome CHROMIUM_BIN=/tmp/chrome-linux/chrome
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD) GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
MANAGEMENT_COMMAND ?= awx-manage MANAGEMENT_COMMAND ?= awx-manage
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py 2> /dev/null) VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py)
# ansible-test requires semver compatable version, so we allow overrides to hack it # ansible-test requires semver compatable version, so we allow overrides to hack it
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3) COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
@@ -47,14 +47,6 @@ VAULT ?= false
VAULT_TLS ?= false VAULT_TLS ?= false
# If set to true docker-compose will also start a tacacs+ instance # If set to true docker-compose will also start a tacacs+ instance
TACACS ?= false TACACS ?= false
# If set to true docker-compose will also start an OpenTelemetry Collector instance
OTEL ?= false
# If set to true docker-compose will also start a Loki instance
LOKI ?= false
# If set to true docker-compose will install editable dependencies
EDITABLE_DEPENDENCIES ?= false
# If set to true, use tls for postgres connection
PG_TLS ?= false
VENV_BASE ?= /var/lib/awx/venv VENV_BASE ?= /var/lib/awx/venv
@@ -63,11 +55,6 @@ DEV_DOCKER_OWNER ?= ansible
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z) DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER) DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
# Common command to use for running ansible-playbook
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
@@ -76,7 +63,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
# These should be upgraded in the AWX and Ansible venv before attempting # These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements # to install the actual requirements
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37 VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
NAME ?= awx NAME ?= awx
@@ -88,21 +75,6 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
I18N_FLAG_FILE = .i18n_built I18N_FLAG_FILE = .i18n_built
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
# DOCKER_CACHE=--no-cache make docker-compose-build
ifeq ($(DOCKER_CACHE),)
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
else
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
endif
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \ .PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
develop refresh adduser migrate dbchange \ develop refresh adduser migrate dbchange \
receiver test test_unit test_coverage coverage_html \ receiver test test_unit test_coverage coverage_html \
@@ -241,6 +213,8 @@ collectstatic:
fi; \ fi; \
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1 $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
uwsgi: collectstatic uwsgi: collectstatic
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
@@ -248,7 +222,7 @@ uwsgi: collectstatic
uwsgi /etc/tower/uwsgi.ini uwsgi /etc/tower/uwsgi.ini
awx-autoreload: awx-autoreload:
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx @/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
daphne: daphne:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
@@ -328,7 +302,7 @@ swagger: reports
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi; \ fi; \
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report) (set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
check: black check: black
@@ -385,7 +359,7 @@ symlink_collection:
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL) ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
awx_collection_build: $(shell find awx_collection -type f) awx_collection_build: $(shell find awx_collection -type f)
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \ ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
-e collection_package=$(COLLECTION_PACKAGE) \ -e collection_package=$(COLLECTION_PACKAGE) \
-e collection_namespace=$(COLLECTION_NAMESPACE) \ -e collection_namespace=$(COLLECTION_NAMESPACE) \
-e collection_version=$(COLLECTION_VERSION) \ -e collection_version=$(COLLECTION_VERSION) \
@@ -502,7 +476,13 @@ ui-test-general:
$(NPM_BIN) run --prefix awx/ui pretest $(NPM_BIN) run --prefix awx/ui pretest
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand $(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
HEADLESS ?= no
ifeq ($(HEADLESS), yes)
dist/$(SDIST_TAR_FILE): dist/$(SDIST_TAR_FILE):
else
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
endif
$(PYTHON) -m build -s $(PYTHON) -m build -s
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
@@ -533,10 +513,10 @@ endif
docker-compose-sources: .git/hooks/pre-commit docker-compose-sources: .git/hooks/pre-commit
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\ @if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \ ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
fi; fi;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \ ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \ -e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
-e awx_image_tag=$(COMPOSE_TAG) \ -e awx_image_tag=$(COMPOSE_TAG) \
-e receptor_image=$(RECEPTOR_IMAGE) \ -e receptor_image=$(RECEPTOR_IMAGE) \
@@ -552,26 +532,16 @@ docker-compose-sources: .git/hooks/pre-commit
-e enable_vault=$(VAULT) \ -e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \ -e vault_tls=$(VAULT_TLS) \
-e enable_tacacs=$(TACACS) \ -e enable_tacacs=$(TACACS) \
-e enable_otel=$(OTEL) \ $(EXTRA_SOURCES_ANSIBLE_OPTS)
-e enable_loki=$(LOKI) \
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
-e pg_tls=$(PG_TLS) \
$(EXTRA_SOURCES_ANSIBLE_OPTS)
docker-compose: awx/projects docker-compose-sources docker-compose: awx/projects docker-compose-sources
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml; ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \ ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
-e enable_vault=$(VAULT) \ -e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \ -e vault_tls=$(VAULT_TLS) \
-e enable_ldap=$(LDAP); \ -e enable_ldap=$(LDAP);
$(MAKE) docker-compose-up
docker-compose-up:
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
docker-compose-down:
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
docker-compose-credential-plugins: awx/projects docker-compose-sources docker-compose-credential-plugins: awx/projects docker-compose-sources
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m" echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
@@ -603,7 +573,7 @@ docker-compose-container-group-clean:
.PHONY: Dockerfile.dev .PHONY: Dockerfile.dev
## Generate Dockerfile.dev for awx_devel image ## Generate Dockerfile.dev for awx_devel image
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2 Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \ ansible-playbook tools/ansible/dockerfile.yml \
-e dockerfile_name=Dockerfile.dev \ -e dockerfile_name=Dockerfile.dev \
-e build_dev=True \ -e build_dev=True \
-e receptor_image=$(RECEPTOR_IMAGE) -e receptor_image=$(RECEPTOR_IMAGE)
@@ -614,28 +584,14 @@ docker-compose-build: Dockerfile.dev
-f Dockerfile.dev \ -f Dockerfile.dev \
-t $(DEVEL_IMAGE_NAME) \ -t $(DEVEL_IMAGE_NAME) \
--build-arg BUILDKIT_INLINE_CACHE=1 \ --build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_DEVEL_CACHE_FLAG) . --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
.PHONY: docker-compose-buildx
## Build awx_devel image for docker compose development environment for multiple architectures
docker-compose-buildx: Dockerfile.dev
- docker buildx create --name docker-compose-buildx
docker buildx use docker-compose-buildx
- docker buildx build \
--push \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_DEVEL_CACHE_FLAG) \
--platform=$(PLATFORMS) \
--tag $(DEVEL_IMAGE_NAME) \
-f Dockerfile.dev .
- docker buildx rm docker-compose-buildx
docker-clean: docker-clean:
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);) -$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);) -$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q) docker volume rm -f tools_awx_db tools_vault_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
docker-refresh: docker-clean docker-compose docker-refresh: docker-clean docker-compose
@@ -657,6 +613,9 @@ clean-elk:
docker rm tools_elasticsearch_1 docker rm tools_elasticsearch_1
docker rm tools_kibana_1 docker rm tools_kibana_1
psql-container:
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
VERSION: VERSION:
@echo "awx: $(VERSION)" @echo "awx: $(VERSION)"
@@ -677,7 +636,7 @@ version-for-buildyml:
.PHONY: Dockerfile .PHONY: Dockerfile
## Generate Dockerfile for awx image ## Generate Dockerfile for awx image
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2 Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \ ansible-playbook tools/ansible/dockerfile.yml \
-e receptor_image=$(RECEPTOR_IMAGE) \ -e receptor_image=$(RECEPTOR_IMAGE) \
-e headless=$(HEADLESS) -e headless=$(HEADLESS)
@@ -687,29 +646,12 @@ awx-kube-build: Dockerfile
--build-arg VERSION=$(VERSION) \ --build-arg VERSION=$(VERSION) \
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \ --build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
--build-arg HEADLESS=$(HEADLESS) \ --build-arg HEADLESS=$(HEADLESS) \
$(DOCKER_KUBE_CACHE_FLAG) \ -t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
-t $(IMAGE_KUBE) .
## Build multi-arch awx image for deployment on Kubernetes environment.
awx-kube-buildx: Dockerfile
- docker buildx create --name awx-kube-buildx
docker buildx use awx-kube-buildx
- docker buildx build \
--push \
--build-arg VERSION=$(VERSION) \
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
--build-arg HEADLESS=$(HEADLESS) \
--platform=$(PLATFORMS) \
$(DOCKER_KUBE_CACHE_FLAG) \
--tag $(IMAGE_KUBE) \
-f Dockerfile .
- docker buildx rm awx-kube-buildx
.PHONY: Dockerfile.kube-dev .PHONY: Dockerfile.kube-dev
## Generate Docker.kube-dev for awx_kube_devel image ## Generate Docker.kube-dev for awx_kube_devel image
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2 Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \ ansible-playbook tools/ansible/dockerfile.yml \
-e dockerfile_name=Dockerfile.kube-dev \ -e dockerfile_name=Dockerfile.kube-dev \
-e kube_dev=True \ -e kube_dev=True \
-e template_dest=_build_kube_dev \ -e template_dest=_build_kube_dev \
@@ -719,24 +661,12 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
awx-kube-dev-build: Dockerfile.kube-dev awx-kube-dev-build: Dockerfile.kube-dev
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \ DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
--build-arg BUILDKIT_INLINE_CACHE=1 \ --build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_KUBE_DEV_CACHE_FLAG) \ --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
-t $(IMAGE_KUBE_DEV) . -t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
awx-kube-dev-buildx: Dockerfile.kube-dev
- docker buildx create --name awx-kube-dev-buildx
docker buildx use awx-kube-dev-buildx
- docker buildx build \
--push \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
--platform=$(PLATFORMS) \
--tag $(IMAGE_KUBE_DEV) \
-f Dockerfile.kube-dev .
- docker buildx rm awx-kube-dev-buildx
kind-dev-load: awx-kube-dev-build kind-dev-load: awx-kube-dev-build
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV) $(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
# Translation TASKS # Translation TASKS
# -------------------------------------- # --------------------------------------

View File

@@ -154,12 +154,10 @@ def manage():
from django.conf import settings from django.conf import settings
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1 # enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
# The return of connection.pg_version is something like 12013
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development': if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
if (connection.pg_version // 10000) < 12: if (connection.pg_version // 10000) < 12:
sys.stderr.write("At a minimum, postgres version 12 is required\n") sys.stderr.write("Postgres version 12 is required\n")
sys.exit(1) sys.exit(1)
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover

View File

@@ -93,7 +93,6 @@ register(
default='', default='',
label=_('Login redirect override URL'), label=_('Login redirect override URL'),
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'), help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )

View File

@@ -30,21 +30,14 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import StaticHTMLRenderer from rest_framework.renderers import StaticHTMLRenderer
from rest_framework.negotiation import DefaultContentNegotiation from rest_framework.negotiation import DefaultContentNegotiation
# django-ansible-base
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
from ansible_base.lib.utils.models import get_all_field_names from ansible_base.lib.utils.models import get_all_field_names
from ansible_base.lib.utils.requests import get_remote_host
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
from ansible_base.rbac.permission_registry import permission_registry
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
# AWX # AWX
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
from awx.main.models.rbac import give_creator_permissions
from awx.main.access import optimize_queryset from awx.main.access import optimize_queryset
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
from awx.main.utils.licensing import server_product_name from awx.main.utils.licensing import server_product_name
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
from awx.main.views import ApiErrorView from awx.main.views import ApiErrorView
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
from awx.api.versioning import URLPathVersioning from awx.api.versioning import URLPathVersioning
@@ -96,26 +89,20 @@ class LoggedLoginView(auth_views.LoginView):
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
ret = super(LoggedLoginView, self).post(request, *args, **kwargs) ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
if request.user.is_authenticated: if request.user.is_authenticated:
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip))) logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
ret.set_cookie( ret.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
)
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
return ret return ret
else: else:
if 'username' in self.request.POST: if 'username' in self.request.POST:
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip))) logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
ret.status_code = 401 ret.status_code = 401
return ret return ret
class LoggedLogoutView(auth_views.LogoutView): class LoggedLogoutView(auth_views.LogoutView):
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
original_user = getattr(request, 'user', None) original_user = getattr(request, 'user', None)
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs) ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
@@ -155,23 +142,22 @@ class APIView(views.APIView):
Store the Django REST Framework Request object as an attribute on the Store the Django REST Framework Request object as an attribute on the
normal Django request, store time the request started. normal Django request, store time the request started.
""" """
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
self.time_started = time.time() self.time_started = time.time()
if getattr(settings, 'SQL_DEBUG', False): if getattr(settings, 'SQL_DEBUG', False):
self.queries_before = len(connection.queries) self.queries_before = len(connection.queries)
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
remote_headers = settings.REMOTE_HOST_HEADERS
else:
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure # If there are any custom headers in REMOTE_HOST_HEADERS, make sure
# they respect the allowed proxy list # they respect the allowed proxy list
if settings.PROXY_IP_ALLOWED_LIST: if all(
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers): [
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS) settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
]
):
for custom_header in settings.REMOTE_HOST_HEADERS:
if custom_header.startswith('HTTP_'):
request.environ.pop(custom_header, None)
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs) drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
request.drf_request = drf_request request.drf_request = drf_request
@@ -216,21 +202,17 @@ class APIView(views.APIView):
return response return response
if response.status_code >= 400: if response.status_code >= 400:
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
msg_data = { msg_data = {
'status_code': response.status_code, 'status_code': response.status_code,
'user_name': request.user, 'user_name': request.user,
'url_path': request.path, 'url_path': request.path,
'remote_addr': ip, 'remote_addr': request.META.get('REMOTE_ADDR', None),
} }
if type(response.data) is dict: if type(response.data) is dict:
msg_data['error'] = response.data.get('error', response.status_text) msg_data['error'] = response.data.get('error', response.status_text)
elif type(response.data) is list: elif type(response.data) is list:
if len(response.data) > 0 and isinstance(response.data[0], str): msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
msg_data['error'] = str(response.data[0])
else:
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
else: else:
msg_data['error'] = response.status_text msg_data['error'] = response.status_text
@@ -490,11 +472,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView): class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
# Base class for a list view that allows creating new objects. # Base class for a list view that allows creating new objects.
def perform_create(self, serializer): pass
super().perform_create(serializer)
if serializer.Meta.model in permission_registry.all_registered_models:
if self.request and self.request.user:
give_creator_permissions(self.request.user, serializer.instance)
class ParentMixin(object): class ParentMixin(object):
@@ -814,7 +792,6 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
class ResourceAccessList(ParentMixin, ListAPIView): class ResourceAccessList(ParentMixin, ListAPIView):
deprecated = True
serializer_class = ResourceAccessListElementSerializer serializer_class = ResourceAccessListElementSerializer
ordering = ('username',) ordering = ('username',)
@@ -822,15 +799,6 @@ class ResourceAccessList(ParentMixin, ListAPIView):
obj = self.get_parent_object() obj = self.get_parent_object()
content_type = ContentType.objects.get_for_model(obj) content_type = ContentType.objects.get_for_model(obj)
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
if auditor_role:
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
return qs.distinct()
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id)) roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
ancestors = set() ancestors = set()
@@ -990,7 +958,7 @@ class CopyAPIView(GenericAPIView):
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '') None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
) )
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all(): if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
give_creator_permissions(request.user, new_obj) new_obj.admin_role.members.add(request.user)
if sub_objs: if sub_objs:
permission_check_func = None permission_check_func = None
if hasattr(type(self), 'deep_copy_permission_check_func'): if hasattr(type(self), 'deep_copy_permission_check_func'):

View File

@@ -36,13 +36,11 @@ class Metadata(metadata.SimpleMetadata):
field_info = OrderedDict() field_info = OrderedDict()
field_info['type'] = self.label_lookup[field] field_info['type'] = self.label_lookup[field]
field_info['required'] = getattr(field, 'required', False) field_info['required'] = getattr(field, 'required', False)
field_info['hidden'] = getattr(field, 'hidden', False)
text_attrs = [ text_attrs = [
'read_only', 'read_only',
'label', 'label',
'help_text', 'help_text',
'warning_text',
'min_length', 'min_length',
'max_length', 'max_length',
'min_value', 'min_value',
@@ -103,7 +101,7 @@ class Metadata(metadata.SimpleMetadata):
default = field.get_default() default = field.get_default()
if type(default) is UUID: if type(default) is UUID:
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx' default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost': if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
default = '{}://{}'.format(self.request.scheme, self.request.get_host()) default = '{}://{}'.format(self.request.scheme, self.request.get_host())
field_info['default'] = default field_info['default'] = default
except serializers.SkipField: except serializers.SkipField:

View File

@@ -43,14 +43,11 @@ from rest_framework.utils.serializer_helpers import ReturnList
# Django-Polymorphic # Django-Polymorphic
from polymorphic.models import PolymorphicModel from polymorphic.models import PolymorphicModel
# django-ansible-base
from ansible_base.lib.utils.models import get_type_for_model from ansible_base.lib.utils.models import get_type_for_model
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
from ansible_base.rbac import permission_registry
# AWX # AWX
from awx.main.access import get_user_capabilities from awx.main.access import get_user_capabilities
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE, org_role_to_permission from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE
from awx.main.models import ( from awx.main.models import (
ActivityStream, ActivityStream,
AdHocCommand, AdHocCommand,
@@ -105,7 +102,7 @@ from awx.main.models import (
CLOUD_INVENTORY_SOURCES, CLOUD_INVENTORY_SOURCES,
) )
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
from awx.main.models.rbac import role_summary_fields_generator, give_creator_permissions, get_role_codenames, to_permissions, get_role_from_object_role from awx.main.models.rbac import role_summary_fields_generator, RoleAncestorEntry
from awx.main.fields import ImplicitRoleField from awx.main.fields import ImplicitRoleField
from awx.main.utils import ( from awx.main.utils import (
get_model_for_type, get_model_for_type,
@@ -194,7 +191,6 @@ SUMMARIZABLE_FK_FIELDS = {
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'), 'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'), 'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
'credential_type': DEFAULT_SUMMARY_FIELDS, 'credential_type': DEFAULT_SUMMARY_FIELDS,
'resource': ('ansible_id', 'resource_type'),
} }
@@ -2766,26 +2762,13 @@ class ResourceAccessListElementSerializer(UserSerializer):
team_content_type = ContentType.objects.get_for_model(Team) team_content_type = ContentType.objects.get_for_model(Team)
content_type = ContentType.objects.get_for_model(obj) content_type = ContentType.objects.get_for_model(obj)
reversed_org_map = {} def get_roles_on_resource(parent_role):
for k, v in org_role_to_permission.items(): "Returns a string list of the roles a parent_role has for current obj."
reversed_org_map[v] = k return list(
reversed_role_map = {} RoleAncestorEntry.objects.filter(ancestor=parent_role, content_type_id=content_type.id, object_id=obj.id)
for k, v in to_permissions.items(): .values_list('role_field', flat=True)
reversed_role_map[v] = k .distinct()
)
def get_roles_from_perms(perm_list):
"""given a list of permission codenames return a list of role names"""
role_names = set()
for codename in perm_list:
action = codename.split('_', 1)[0]
if action in reversed_role_map:
role_names.add(reversed_role_map[action])
elif codename in reversed_org_map:
if isinstance(obj, Organization):
role_names.add(reversed_org_map[codename])
if 'view_organization' not in role_names:
role_names.add('read_role')
return list(role_names)
def format_role_perm(role): def format_role_perm(role):
role_dict = {'id': role.id, 'name': role.name, 'description': role.description} role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
@@ -2802,21 +2785,13 @@ class ResourceAccessListElementSerializer(UserSerializer):
else: else:
# Singleton roles should not be managed from this view, as per copy/edit rework spec # Singleton roles should not be managed from this view, as per copy/edit rework spec
role_dict['user_capabilities'] = {'unattach': False} role_dict['user_capabilities'] = {'unattach': False}
return {'role': role_dict, 'descendant_roles': get_roles_on_resource(role)}
model_name = content_type.model
if isinstance(obj, Organization):
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name) or codename.startswith('add_')]
else:
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name)]
return {'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)}
def format_team_role_perm(naive_team_role, permissive_role_ids): def format_team_role_perm(naive_team_role, permissive_role_ids):
ret = [] ret = []
team = naive_team_role.content_object
team_role = naive_team_role team_role = naive_team_role
if naive_team_role.role_field == 'admin_role': if naive_team_role.role_field == 'admin_role':
team_role = team.member_role team_role = naive_team_role.content_object.member_role
for role in team_role.children.filter(id__in=permissive_role_ids).all(): for role in team_role.children.filter(id__in=permissive_role_ids).all():
role_dict = { role_dict = {
'id': role.id, 'id': role.id,
@@ -2836,87 +2811,10 @@ class ResourceAccessListElementSerializer(UserSerializer):
else: else:
# Singleton roles should not be managed from this view, as per copy/edit rework spec # Singleton roles should not be managed from this view, as per copy/edit rework spec
role_dict['user_capabilities'] = {'unattach': False} role_dict['user_capabilities'] = {'unattach': False}
ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(team_role)})
descendant_perms = list(
RoleEvaluation.objects.filter(role__in=team.has_roles.all(), object_id=obj.id, content_type_id=content_type.id)
.values_list('codename', flat=True)
.distinct()
)
ret.append({'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)})
return ret
gfk_kwargs = dict(content_type_id=content_type.id, object_id=obj.id)
direct_permissive_role_ids = Role.objects.filter(**gfk_kwargs).values_list('id', flat=True)
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
ret['summary_fields']['direct_access'] = []
ret['summary_fields']['indirect_access'] = []
new_roles_seen = set()
all_team_roles = set()
all_permissive_role_ids = set()
for evaluation in RoleEvaluation.objects.filter(role__in=user.has_roles.all(), **gfk_kwargs).prefetch_related('role'):
new_role = evaluation.role
if new_role.id in new_roles_seen:
continue
new_roles_seen.add(new_role.id)
old_role = get_role_from_object_role(new_role)
all_permissive_role_ids.add(old_role.id)
if int(new_role.object_id) == obj.id and new_role.content_type_id == content_type.id:
ret['summary_fields']['direct_access'].append(format_role_perm(old_role))
elif new_role.content_type_id == team_content_type.id:
all_team_roles.add(old_role)
else:
ret['summary_fields']['indirect_access'].append(format_role_perm(old_role))
# Lazy role creation gives us a big problem, where some intermediate roles are not easy to find
# like when a team has indirect permission, so here we get all roles the users teams have
# these contribute to all potential permission-granting roles of the object
user_teams_qs = permission_registry.team_model.objects.filter(member_roles__in=ObjectRole.objects.filter(users=user))
team_obj_roles = ObjectRole.objects.filter(teams__in=user_teams_qs)
for evaluation in RoleEvaluation.objects.filter(role__in=team_obj_roles, **gfk_kwargs).prefetch_related('role'):
new_role = evaluation.role
if new_role.id in new_roles_seen:
continue
new_roles_seen.add(new_role.id)
old_role = get_role_from_object_role(new_role)
all_permissive_role_ids.add(old_role.id)
# In DAB RBAC, superuser is strictly a user flag, and global roles are not in the RoleEvaluation table
if user.is_superuser:
ret['summary_fields'].setdefault('indirect_access', [])
all_role_names = [field.name for field in obj._meta.get_fields() if isinstance(field, ImplicitRoleField)]
ret['summary_fields']['indirect_access'].append(
{
"role": {
"id": None,
"name": _("System Administrator"),
"description": _("Can manage all aspects of the system"),
"user_capabilities": {"unattach": False},
},
"descendant_roles": all_role_names,
}
)
elif user.is_system_auditor:
ret['summary_fields'].setdefault('indirect_access', [])
ret['summary_fields']['indirect_access'].append(
{
"role": {
"id": None,
"name": _("System Auditor"),
"description": _("Can view all aspects of the system"),
"user_capabilities": {"unattach": False},
},
"descendant_roles": ["read_role"],
}
)
ret['summary_fields']['direct_access'].extend([y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in all_team_roles) for y in x])
return ret return ret
direct_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('id', flat=True)
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True) all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all() direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
@@ -3185,7 +3083,7 @@ class CredentialSerializerCreate(CredentialSerializer):
credential = super(CredentialSerializerCreate, self).create(validated_data) credential = super(CredentialSerializerCreate, self).create(validated_data)
if user: if user:
give_creator_permissions(user, credential) credential.admin_role.members.add(user)
if team: if team:
if not credential.organization or team.organization.id != credential.organization.id: if not credential.organization or team.organization.id != credential.organization.id:
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")}) raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
@@ -5381,7 +5279,7 @@ class NotificationSerializer(BaseSerializer):
) )
def get_body(self, obj): def get_body(self, obj):
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'): if obj.notification_type in ('webhook', 'pagerduty'):
if isinstance(obj.body, dict): if isinstance(obj.body, dict):
if 'body' in obj.body: if 'body' in obj.body:
return obj.body['body'] return obj.body['body']
@@ -5403,9 +5301,9 @@ class NotificationSerializer(BaseSerializer):
def to_representation(self, obj): def to_representation(self, obj):
ret = super(NotificationSerializer, self).to_representation(obj) ret = super(NotificationSerializer, self).to_representation(obj)
if obj.notification_type in ('webhook', 'awssns'): if obj.notification_type == 'webhook':
ret.pop('subject') ret.pop('subject')
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'): if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
ret.pop('body') ret.pop('body')
return ret return ret
@@ -5696,7 +5594,7 @@ class InstanceSerializer(BaseSerializer):
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk}) res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk}) res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk}) res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP] and not obj.managed: if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP]:
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk}) res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor: if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
if obj.node_type == 'execution': if obj.node_type == 'execution':

View File

@@ -2,12 +2,6 @@
- hosts: all - hosts: all
become: yes become: yes
tasks: tasks:
- name: Create the receptor group
group:
{% verbatim %}
name: "{{ receptor_group }}"
{% endverbatim %}
state: present
- name: Create the receptor user - name: Create the receptor user
user: user:
{% verbatim %} {% verbatim %}

View File

@@ -2,21 +2,28 @@
# All Rights Reserved. # All Rights Reserved.
from django.conf import settings from django.conf import settings
from django.urls import NoReverseMatch
from rest_framework.reverse import reverse as drf_reverse from rest_framework.reverse import _reverse
from rest_framework.versioning import URLPathVersioning as BaseVersioning from rest_framework.versioning import URLPathVersioning as BaseVersioning
def is_optional_api_urlpattern_prefix_request(request): def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request: """
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"): Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
return True query string parameters.
return False """
scheme = getattr(request, 'versioning_scheme', None)
if scheme is not None:
try:
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
except NoReverseMatch:
# In case the versioning scheme reversal fails, fallback to the
# default implementation
url = _reverse(viewname, args, kwargs, request, format, **extra)
else:
url = _reverse(viewname, args, kwargs, request, format, **extra)
def transform_optional_api_urlpattern_prefix_url(request, url):
if is_optional_api_urlpattern_prefix_request(request):
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
return url return url

View File

@@ -60,11 +60,6 @@ from oauth2_provider.models import get_access_token_model
import pytz import pytz
from wsgiref.util import FileWrapper from wsgiref.util import FileWrapper
# django-ansible-base
from ansible_base.lib.utils.requests import get_remote_hosts
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
# AWX # AWX
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
from awx.main.access import get_user_queryset from awx.main.access import get_user_queryset
@@ -92,7 +87,6 @@ from awx.api.generics import (
from awx.api.views.labels import LabelSubListCreateAttachDetachView from awx.api.views.labels import LabelSubListCreateAttachDetachView
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main import models from awx.main import models
from awx.main.models.rbac import get_role_definition
from awx.main.utils import ( from awx.main.utils import (
camelcase_to_underscore, camelcase_to_underscore,
extract_ansible_vars, extract_ansible_vars,
@@ -130,7 +124,6 @@ from awx.api.views.mixin import (
from awx.api.pagination import UnifiedJobEventPagination from awx.api.pagination import UnifiedJobEventPagination
from awx.main.utils import set_environ from awx.main.utils import set_environ
logger = logging.getLogger('awx.api.views') logger = logging.getLogger('awx.api.views')
@@ -279,24 +272,16 @@ class DashboardJobsGraphView(APIView):
success_query = user_unified_jobs.filter(status='successful') success_query = user_unified_jobs.filter(status='successful')
failed_query = user_unified_jobs.filter(status='failed') failed_query = user_unified_jobs.filter(status='failed')
canceled_query = user_unified_jobs.filter(status='canceled')
error_query = user_unified_jobs.filter(status='error')
if job_type == 'inv_sync': if job_type == 'inv_sync':
success_query = success_query.filter(instance_of=models.InventoryUpdate) success_query = success_query.filter(instance_of=models.InventoryUpdate)
failed_query = failed_query.filter(instance_of=models.InventoryUpdate) failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
canceled_query = canceled_query.filter(instance_of=models.InventoryUpdate)
error_query = error_query.filter(instance_of=models.InventoryUpdate)
elif job_type == 'playbook_run': elif job_type == 'playbook_run':
success_query = success_query.filter(instance_of=models.Job) success_query = success_query.filter(instance_of=models.Job)
failed_query = failed_query.filter(instance_of=models.Job) failed_query = failed_query.filter(instance_of=models.Job)
canceled_query = canceled_query.filter(instance_of=models.Job)
error_query = error_query.filter(instance_of=models.Job)
elif job_type == 'scm_update': elif job_type == 'scm_update':
success_query = success_query.filter(instance_of=models.ProjectUpdate) success_query = success_query.filter(instance_of=models.ProjectUpdate)
failed_query = failed_query.filter(instance_of=models.ProjectUpdate) failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
canceled_query = canceled_query.filter(instance_of=models.ProjectUpdate)
error_query = error_query.filter(instance_of=models.ProjectUpdate)
end = now() end = now()
interval = 'day' interval = 'day'
@@ -312,12 +297,10 @@ class DashboardJobsGraphView(APIView):
else: else:
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST) return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
dashboard_data = {"jobs": {"successful": [], "failed": [], "canceled": [], "error": []}} dashboard_data = {"jobs": {"successful": [], "failed": []}}
succ_list = dashboard_data['jobs']['successful'] succ_list = dashboard_data['jobs']['successful']
fail_list = dashboard_data['jobs']['failed'] fail_list = dashboard_data['jobs']['failed']
canceled_list = dashboard_data['jobs']['canceled']
error_list = dashboard_data['jobs']['error']
qs_s = ( qs_s = (
success_query.filter(finished__range=(start, end)) success_query.filter(finished__range=(start, end))
@@ -335,22 +318,6 @@ class DashboardJobsGraphView(APIView):
.annotate(agg=Count('id', distinct=True)) .annotate(agg=Count('id', distinct=True))
) )
data_f = {item['d']: item['agg'] for item in qs_f} data_f = {item['d']: item['agg'] for item in qs_f}
qs_c = (
canceled_query.filter(finished__range=(start, end))
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
.order_by()
.values('d')
.annotate(agg=Count('id', distinct=True))
)
data_c = {item['d']: item['agg'] for item in qs_c}
qs_e = (
error_query.filter(finished__range=(start, end))
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
.order_by()
.values('d')
.annotate(agg=Count('id', distinct=True))
)
data_e = {item['d']: item['agg'] for item in qs_e}
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0) start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
for d in itertools.count(): for d in itertools.count():
@@ -359,8 +326,6 @@ class DashboardJobsGraphView(APIView):
break break
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)]) succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)]) fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
canceled_list.append([time.mktime(date.timetuple()), data_c.get(date, 0)])
error_list.append([time.mktime(date.timetuple()), data_e.get(date, 0)])
return Response(dashboard_data) return Response(dashboard_data)
@@ -543,7 +508,6 @@ class InstanceGroupAccessList(ResourceAccessList):
class InstanceGroupObjectRolesList(SubListAPIView): class InstanceGroupObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.InstanceGroup parent_model = models.InstanceGroup
@@ -713,81 +677,16 @@ class AuthView(APIView):
return Response(data) return Response(data)
def immutablesharedfields(cls):
'''
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
Works by overriding these view methods:
- create
- delete
- perform_update
create and delete are overridden to raise a PermissionDenied exception.
perform_update is overridden to check if any shared fields are being modified,
and raise a PermissionDenied exception if so.
'''
# create instead of perform_create because some of our views
# override create instead of perform_create
if hasattr(cls, 'create'):
cls.original_create = cls.create
@functools.wraps(cls.create)
def create_wrapper(*args, **kwargs):
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
return cls.original_create(*args, **kwargs)
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
cls.create = create_wrapper
if hasattr(cls, 'delete'):
cls.original_delete = cls.delete
@functools.wraps(cls.delete)
def delete_wrapper(*args, **kwargs):
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
return cls.original_delete(*args, **kwargs)
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
cls.delete = delete_wrapper
if hasattr(cls, 'perform_update'):
cls.original_perform_update = cls.perform_update
@functools.wraps(cls.perform_update)
def update_wrapper(*args, **kwargs):
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
view, serializer = args
instance = view.get_object()
if instance:
if isinstance(instance, models.Organization):
shared_fields = OrganizationType._declared_fields.keys()
elif isinstance(instance, models.User):
shared_fields = UserType._declared_fields.keys()
elif isinstance(instance, models.Team):
shared_fields = TeamType._declared_fields.keys()
attrs = serializer.validated_data
for field in shared_fields:
if field in attrs and getattr(instance, field) != attrs[field]:
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
return cls.original_perform_update(*args, **kwargs)
cls.perform_update = update_wrapper
return cls
@immutablesharedfields
class TeamList(ListCreateAPIView): class TeamList(ListCreateAPIView):
model = models.Team model = models.Team
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
@immutablesharedfields
class TeamDetail(RetrieveUpdateDestroyAPIView): class TeamDetail(RetrieveUpdateDestroyAPIView):
model = models.Team model = models.Team
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
@immutablesharedfields
class TeamUsersList(BaseUsersList): class TeamUsersList(BaseUsersList):
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
@@ -797,7 +696,6 @@ class TeamUsersList(BaseUsersList):
class TeamRolesList(SubListAttachDetachAPIView): class TeamRolesList(SubListAttachDetachAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializerWithParentAccess serializer_class = serializers.RoleSerializerWithParentAccess
metadata_class = RoleMetadata metadata_class = RoleMetadata
@@ -837,12 +735,10 @@ class TeamRolesList(SubListAttachDetachAPIView):
class TeamObjectRolesList(SubListAPIView): class TeamObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Team parent_model = models.Team
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -860,15 +756,8 @@ class TeamProjectsList(SubListAPIView):
self.check_parent_access(team) self.check_parent_access(team)
model_ct = ContentType.objects.get_for_model(self.model) model_ct = ContentType.objects.get_for_model(self.model)
parent_ct = ContentType.objects.get_for_model(self.parent_model) parent_ct = ContentType.objects.get_for_model(self.parent_model)
proj_roles = models.Role.objects.filter(Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk), content_type=model_ct)
rd = get_role_definition(team.member_role) return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
if role is None:
# Team has no permissions, therefore team has no projects
return self.model.objects.none()
else:
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
class TeamActivityStreamList(SubListAPIView): class TeamActivityStreamList(SubListAPIView):
@@ -883,23 +772,10 @@ class TeamActivityStreamList(SubListAPIView):
self.check_parent_access(parent) self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model) qs = self.request.user.get_queryset(self.model)
return qs.filter( return qs.filter(
Q(team=parent) Q(team=parent)
| Q( | Q(project__in=models.Project.accessible_objects(parent.member_role, 'read_role'))
project__in=RoleEvaluation.objects.filter( | Q(credential__in=models.Credential.accessible_objects(parent.member_role, 'read_role'))
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Project).id, codename='view_project'
)
.values_list('object_id')
.distinct()
)
| Q(
credential__in=RoleEvaluation.objects.filter(
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Credential).id, codename='view_credential'
)
.values_list('object_id')
.distinct()
)
) )
@@ -1151,12 +1027,10 @@ class ProjectAccessList(ResourceAccessList):
class ProjectObjectRolesList(SubListAPIView): class ProjectObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Project parent_model = models.Project
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -1169,7 +1043,6 @@ class ProjectCopy(CopyAPIView):
copy_return_serializer_class = serializers.ProjectSerializer copy_return_serializer_class = serializers.ProjectSerializer
@immutablesharedfields
class UserList(ListCreateAPIView): class UserList(ListCreateAPIView):
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
@@ -1315,7 +1188,6 @@ class UserTeamsList(SubListAPIView):
class UserRolesList(SubListAttachDetachAPIView): class UserRolesList(SubListAttachDetachAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializerWithParentAccess serializer_class = serializers.RoleSerializerWithParentAccess
metadata_class = RoleMetadata metadata_class = RoleMetadata
@@ -1340,16 +1212,7 @@ class UserRolesList(SubListAttachDetachAPIView):
user = get_object_or_400(models.User, pk=self.kwargs['pk']) user = get_object_or_400(models.User, pk=self.kwargs['pk'])
role = get_object_or_400(models.Role, pk=sub_id) role = get_object_or_400(models.Role, pk=sub_id)
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type} credential_content_type = ContentType.objects.get_for_model(models.Credential)
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
for model in [models.Organization, models.Team]:
ct = content_types[model]
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
return Response(data, status=status.HTTP_403_FORBIDDEN)
credential_content_type = content_types[models.Credential]
if role.content_type == credential_content_type: if role.content_type == credential_content_type:
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role: if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization")) data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
@@ -1421,7 +1284,6 @@ class UserActivityStreamList(SubListAPIView):
return qs.filter(Q(actor=parent) | Q(user__in=[parent])) return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
@immutablesharedfields
class UserDetail(RetrieveUpdateDestroyAPIView): class UserDetail(RetrieveUpdateDestroyAPIView):
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
@@ -1600,12 +1462,10 @@ class CredentialAccessList(ResourceAccessList):
class CredentialObjectRolesList(SubListAPIView): class CredentialObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Credential parent_model = models.Credential
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -2392,13 +2252,12 @@ class JobTemplateList(ListCreateAPIView):
serializer_class = serializers.JobTemplateSerializer serializer_class = serializers.JobTemplateSerializer
always_allow_superuser = False always_allow_superuser = False
def check_permissions(self, request): def post(self, request, *args, **kwargs):
if request.method == 'POST': ret = super(JobTemplateList, self).post(request, *args, **kwargs)
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data) if ret.status_code == 201:
if not can_access: job_template = models.JobTemplate.objects.get(id=ret.data['id'])
self.permission_denied(request, message=messages) job_template.admin_role.members.add(request.user)
return ret
super(JobTemplateList, self).check_permissions(request)
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
@@ -2779,7 +2638,12 @@ class JobTemplateCallback(GenericAPIView):
host for the current request. host for the current request.
""" """
# Find the list of remote host names/IPs to check. # Find the list of remote host names/IPs to check.
remote_hosts = set(get_remote_hosts(self.request)) remote_hosts = set()
for header in settings.REMOTE_HOST_HEADERS:
for value in self.request.META.get(header, '').split(','):
value = value.strip()
if value:
remote_hosts.add(value)
# Add the reverse lookup of IP addresses. # Add the reverse lookup of IP addresses.
for rh in list(remote_hosts): for rh in list(remote_hosts):
try: try:
@@ -2940,12 +2804,10 @@ class JobTemplateAccessList(ResourceAccessList):
class JobTemplateObjectRolesList(SubListAPIView): class JobTemplateObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.JobTemplate parent_model = models.JobTemplate
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -3119,14 +2981,6 @@ class WorkflowJobTemplateList(ListCreateAPIView):
serializer_class = serializers.WorkflowJobTemplateSerializer serializer_class = serializers.WorkflowJobTemplateSerializer
always_allow_superuser = False always_allow_superuser = False
def check_permissions(self, request):
if request.method == 'POST':
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
if not can_access:
self.permission_denied(request, message=messages)
super(WorkflowJobTemplateList, self).check_permissions(request)
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = models.WorkflowJobTemplate model = models.WorkflowJobTemplate
@@ -3336,12 +3190,10 @@ class WorkflowJobTemplateAccessList(ResourceAccessList):
class WorkflowJobTemplateObjectRolesList(SubListAPIView): class WorkflowJobTemplateObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.WorkflowJobTemplate parent_model = models.WorkflowJobTemplate
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -4350,7 +4202,6 @@ class ActivityStreamDetail(RetrieveAPIView):
class RoleList(ListAPIView): class RoleList(ListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
permission_classes = (IsAuthenticated,) permission_classes = (IsAuthenticated,)
@@ -4358,13 +4209,11 @@ class RoleList(ListAPIView):
class RoleDetail(RetrieveAPIView): class RoleDetail(RetrieveAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
class RoleUsersList(SubListAttachDetachAPIView): class RoleUsersList(SubListAttachDetachAPIView):
deprecated = True
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
parent_model = models.Role parent_model = models.Role
@@ -4385,15 +4234,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
user = get_object_or_400(models.User, pk=sub_id) user = get_object_or_400(models.User, pk=sub_id)
role = self.get_parent_object() role = self.get_parent_object()
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type} credential_content_type = ContentType.objects.get_for_model(models.Credential)
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
for model in [models.Organization, models.Team]:
ct = content_types[model]
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
return Response(data, status=status.HTTP_403_FORBIDDEN)
credential_content_type = content_types[models.Credential]
if role.content_type == credential_content_type: if role.content_type == credential_content_type:
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role: if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization")) data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
@@ -4407,7 +4248,6 @@ class RoleUsersList(SubListAttachDetachAPIView):
class RoleTeamsList(SubListAttachDetachAPIView): class RoleTeamsList(SubListAttachDetachAPIView):
deprecated = True
model = models.Team model = models.Team
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
parent_model = models.Role parent_model = models.Role
@@ -4452,12 +4292,10 @@ class RoleTeamsList(SubListAttachDetachAPIView):
team.member_role.children.remove(role) team.member_role.children.remove(role)
else: else:
team.member_role.children.add(role) team.member_role.children.add(role)
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class RoleParentsList(SubListAPIView): class RoleParentsList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Role parent_model = models.Role
@@ -4471,7 +4309,6 @@ class RoleParentsList(SubListAPIView):
class RoleChildrenList(SubListAPIView): class RoleChildrenList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Role parent_model = models.Role

View File

@@ -48,23 +48,23 @@ class AnalyticsRootView(APIView):
def get(self, request, format=None): def get(self, request, format=None):
data = OrderedDict() data = OrderedDict()
data['authorized'] = reverse('api:analytics_authorized', request=request) data['authorized'] = reverse('api:analytics_authorized')
data['reports'] = reverse('api:analytics_reports_list', request=request) data['reports'] = reverse('api:analytics_reports_list')
data['report_options'] = reverse('api:analytics_report_options_list', request=request) data['report_options'] = reverse('api:analytics_report_options_list')
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request) data['adoption_rate'] = reverse('api:analytics_adoption_rate')
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request) data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options')
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request) data['event_explorer'] = reverse('api:analytics_event_explorer')
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request) data['event_explorer_options'] = reverse('api:analytics_event_explorer_options')
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request) data['host_explorer'] = reverse('api:analytics_host_explorer')
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request) data['host_explorer_options'] = reverse('api:analytics_host_explorer_options')
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request) data['job_explorer'] = reverse('api:analytics_job_explorer')
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request) data['job_explorer_options'] = reverse('api:analytics_job_explorer_options')
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request) data['probe_templates'] = reverse('api:analytics_probe_templates_explorer')
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request) data['probe_templates_options'] = reverse('api:analytics_probe_templates_options')
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request) data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer')
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request) data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options')
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request) data['roi_templates'] = reverse('api:analytics_roi_templates_explorer')
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request) data['roi_templates_options'] = reverse('api:analytics_roi_templates_options')
return Response(data) return Response(data)

View File

@@ -152,7 +152,6 @@ class InventoryObjectRolesList(SubListAPIView):
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Inventory parent_model = Inventory
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()

View File

@@ -53,18 +53,15 @@ from awx.api.serializers import (
CredentialSerializer, CredentialSerializer,
) )
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
from awx.api.views import immutablesharedfields
logger = logging.getLogger('awx.api.views.organization') logger = logging.getLogger('awx.api.views.organization')
@immutablesharedfields
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView): class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
model = Organization model = Organization
serializer_class = OrganizationSerializer serializer_class = OrganizationSerializer
@immutablesharedfields
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Organization model = Organization
serializer_class = OrganizationSerializer serializer_class = OrganizationSerializer
@@ -107,7 +104,6 @@ class OrganizationInventoriesList(SubListAPIView):
relationship = 'inventories' relationship = 'inventories'
@immutablesharedfields
class OrganizationUsersList(BaseUsersList): class OrganizationUsersList(BaseUsersList):
model = User model = User
serializer_class = UserSerializer serializer_class = UserSerializer
@@ -116,7 +112,6 @@ class OrganizationUsersList(BaseUsersList):
ordering = ('username',) ordering = ('username',)
@immutablesharedfields
class OrganizationAdminsList(BaseUsersList): class OrganizationAdminsList(BaseUsersList):
model = User model = User
serializer_class = UserSerializer serializer_class = UserSerializer
@@ -155,7 +150,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
parent_key = 'organization' parent_key = 'organization'
@immutablesharedfields
class OrganizationTeamsList(SubListCreateAttachDetachAPIView): class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
model = Team model = Team
serializer_class = TeamSerializer serializer_class = TeamSerializer
@@ -232,7 +226,6 @@ class OrganizationObjectRolesList(SubListAPIView):
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Organization parent_model = Organization
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()

View File

@@ -13,7 +13,6 @@ from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.csrf import ensure_csrf_cookie
from django.template.loader import render_to_string from django.template.loader import render_to_string
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.urls import reverse as django_reverse
from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
@@ -28,7 +27,7 @@ from awx.main.analytics import all_collectors
from awx.main.ha import is_ha_environment from awx.main.ha import is_ha_environment
from awx.main.utils import get_awx_version, get_custom_venv_choices from awx.main.utils import get_awx_version, get_custom_venv_choices
from awx.main.utils.licensing import validate_entitlement_manifest from awx.main.utils.licensing import validate_entitlement_manifest
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse from awx.api.versioning import reverse, drf_reverse
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
from awx.main.utils import set_environ from awx.main.utils import set_environ
@@ -40,19 +39,19 @@ logger = logging.getLogger('awx.api.views.root')
class ApiRootView(APIView): class ApiRootView(APIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
name = _('REST API') name = _('REST API')
versioning_class = URLPathVersioning versioning_class = None
swagger_topic = 'Versioning' swagger_topic = 'Versioning'
@method_decorator(ensure_csrf_cookie) @method_decorator(ensure_csrf_cookie)
def get(self, request, format=None): def get(self, request, format=None):
'''List supported API versions''' '''List supported API versions'''
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
data = OrderedDict() data = OrderedDict()
data['description'] = _('AWX REST API') data['description'] = _('AWX REST API')
data['current_version'] = v2 data['current_version'] = v2
data['available_versions'] = dict(v2=v2) data['available_versions'] = dict(v2=v2)
if not is_optional_api_urlpattern_prefix_request(request): data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
data['custom_logo'] = settings.CUSTOM_LOGO data['custom_logo'] = settings.CUSTOM_LOGO
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
@@ -131,10 +130,6 @@ class ApiVersionRootView(APIView):
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request) data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
data['bulk'] = reverse('api:bulk', request=request) data['bulk'] = reverse('api:bulk', request=request)
data['analytics'] = reverse('api:analytics_root_view', request=request) data['analytics'] = reverse('api:analytics_root_view', request=request)
data['service_index'] = django_reverse('service-index-root')
data['role_definitions'] = django_reverse('roledefinition-list')
data['role_user_assignments'] = django_reverse('roleuserassignment-list')
data['role_team_assignments'] = django_reverse('roleteamassignment-list')
return Response(data) return Response(data)

View File

@@ -55,7 +55,6 @@ register(
# Optional; category_slug will be slugified version of category if not # Optional; category_slug will be slugified version of category if not
# explicitly provided. # explicitly provided.
category_slug='cows', category_slug='cows',
hidden=True,
) )

View File

@@ -61,10 +61,6 @@ class StringListBooleanField(ListField):
def to_representation(self, value): def to_representation(self, value):
try: try:
if isinstance(value, str):
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
# DRF changed truthy and falsy lists to be capitalized
value = value.lower()
if isinstance(value, (list, tuple)): if isinstance(value, (list, tuple)):
return super(StringListBooleanField, self).to_representation(value) return super(StringListBooleanField, self).to_representation(value)
elif value in BooleanField.TRUE_VALUES: elif value in BooleanField.TRUE_VALUES:
@@ -82,8 +78,6 @@ class StringListBooleanField(ListField):
def to_internal_value(self, data): def to_internal_value(self, data):
try: try:
if isinstance(data, str):
data = data.lower()
if isinstance(data, (list, tuple)): if isinstance(data, (list, tuple)):
return super(StringListBooleanField, self).to_internal_value(data) return super(StringListBooleanField, self).to_internal_value(data)
elif data in BooleanField.TRUE_VALUES: elif data in BooleanField.TRUE_VALUES:

View File

@@ -127,8 +127,6 @@ class SettingsRegistry(object):
encrypted = bool(field_kwargs.pop('encrypted', False)) encrypted = bool(field_kwargs.pop('encrypted', False))
defined_in_file = bool(field_kwargs.pop('defined_in_file', False)) defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
unit = field_kwargs.pop('unit', None) unit = field_kwargs.pop('unit', None)
hidden = field_kwargs.pop('hidden', False)
warning_text = field_kwargs.pop('warning_text', None)
if getattr(field_kwargs.get('child', None), 'source', None) is not None: if getattr(field_kwargs.get('child', None), 'source', None) is not None:
field_kwargs['child'].source = None field_kwargs['child'].source = None
field_instance = field_class(**field_kwargs) field_instance = field_class(**field_kwargs)
@@ -136,14 +134,12 @@ class SettingsRegistry(object):
field_instance.category = category field_instance.category = category
field_instance.depends_on = depends_on field_instance.depends_on = depends_on
field_instance.unit = unit field_instance.unit = unit
field_instance.hidden = hidden
if placeholder is not empty: if placeholder is not empty:
field_instance.placeholder = placeholder field_instance.placeholder = placeholder
field_instance.defined_in_file = defined_in_file field_instance.defined_in_file = defined_in_file
if field_instance.defined_in_file: if field_instance.defined_in_file:
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text) field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
field_instance.encrypted = encrypted field_instance.encrypted = encrypted
field_instance.warning_text = warning_text
original_field_instance = field_instance original_field_instance = field_instance
if field_class != original_field_class: if field_class != original_field_class:
original_field_instance = original_field_class(**field_kwargs) original_field_instance = original_field_class(**field_kwargs)

View File

@@ -1,7 +1,6 @@
# Python # Python
import contextlib import contextlib
import logging import logging
import psycopg
import threading import threading
import time import time
import os import os
@@ -14,7 +13,7 @@ from django.conf import settings, UserSettingsHolder
from django.core.cache import cache as django_cache from django.core.cache import cache as django_cache
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
from django.db import transaction, connection from django.db import transaction, connection
from django.db.utils import DatabaseError, ProgrammingError from django.db.utils import Error as DBError, ProgrammingError
from django.utils.functional import cached_property from django.utils.functional import cached_property
# Django REST Framework # Django REST Framework
@@ -81,26 +80,18 @@ def _ctit_db_wrapper(trans_safe=False):
logger.debug('Obtaining database settings in spite of broken transaction.') logger.debug('Obtaining database settings in spite of broken transaction.')
transaction.set_rollback(False) transaction.set_rollback(False)
yield yield
except ProgrammingError as e: except DBError as exc:
# Exception raised for programming errors
# Examples may be table not found or already exists,
# this generally means we can't fetch Tower configuration
# because the database hasn't actually finished migrating yet;
# this is usually a sign that a service in a container (such as ws_broadcast)
# has come up *before* the database has finished migrating, and
# especially that the conf.settings table doesn't exist yet
# syntax error in the SQL statement, wrong number of parameters specified, etc.
if trans_safe: if trans_safe:
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}') level = logger.warning
else: if isinstance(exc, ProgrammingError):
logger.exception('Error modifying something related to database settings.') if 'relation' in str(exc) and 'does not exist' in str(exc):
except DatabaseError as e: # this generally means we can't fetch Tower configuration
if trans_safe: # because the database hasn't actually finished migrating yet;
cause = e.__cause__ # this is usually a sign that a service in a container (such as ws_broadcast)
if cause and hasattr(cause, 'sqlstate'): # has come up *before* the database has finished migrating, and
sqlstate = cause.sqlstate # especially that the conf.settings table doesn't exist yet
sqlstate_str = psycopg.errors.lookup(sqlstate) level = logger.debug
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str)) level(f'Database settings are not available, using defaults. error: {str(exc)}')
else: else:
logger.exception('Error modifying something related to database settings.') logger.exception('Error modifying something related to database settings.')
finally: finally:

View File

@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []}) settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT' assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []}) settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
def test_empty_setting(settings, mocker): def test_empty_setting(settings, mocker):
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})}) mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
with pytest.raises(AttributeError): with pytest.raises(AttributeError):
settings.AWX_SOME_SETTING settings.AWX_SOME_SETTING
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
def test_setting_from_db(settings, mocker): def test_setting_from_db(settings, mocker):
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})}) mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
assert settings.AWX_SOME_SETTING == 'FROM_DB' assert settings.AWX_SOME_SETTING == 'FROM_DB'
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB' assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting}) setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
settings.AWX_SOME_SETTING = 'NEW-VALUE' settings.AWX_SOME_SETTING = 'NEW-VALUE'
assert existing_setting.value == 'NEW-VALUE' assert existing_setting.value == 'NEW-VALUE'
existing_setting.save.assert_called_with(update_fields=['value']) existing_setting.save.assert_called_with(update_fields=['value'])
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
del settings.AWX_SOME_SETTING del settings.AWX_SOME_SETTING
assert existing_setting.delete.call_count == 1 assert existing_setting.delete.call_count == 1
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
# use its primary key as part of the encryption key # use its primary key as part of the encryption key
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!') setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})}) mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
cache.set('AWX_ENCRYPTED', 'SECRET!') cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!' assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!' assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
def test_readonly_sensitive_cache_data_is_encrypted(settings): def test_readonly_sensitive_cache_data_is_encrypted(settings):

View File

@@ -20,10 +20,7 @@ from rest_framework.exceptions import ParseError, PermissionDenied
# Django OAuth Toolkit # Django OAuth Toolkit
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
# django-ansible-base
from ansible_base.lib.utils.validation import to_python_boolean from ansible_base.lib.utils.validation import to_python_boolean
from ansible_base.rbac.models import RoleEvaluation
from ansible_base.rbac import permission_registry
# AWX # AWX
from awx.main.utils import ( from awx.main.utils import (
@@ -75,6 +72,8 @@ from awx.main.models import (
WorkflowJobTemplateNode, WorkflowJobTemplateNode,
WorkflowApproval, WorkflowApproval,
WorkflowApprovalTemplate, WorkflowApprovalTemplate,
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
ROLE_SINGLETON_SYSTEM_AUDITOR,
) )
from awx.main.models.mixins import ResourceMixin from awx.main.models.mixins import ResourceMixin
@@ -265,11 +264,7 @@ class BaseAccess(object):
return self.can_change(obj, data) return self.can_change(obj, data)
def can_delete(self, obj): def can_delete(self, obj):
if self.user.is_superuser: return self.user.is_superuser
return True
if obj._meta.model_name in [cls._meta.model_name for cls in permission_registry.all_registered_models]:
return self.user.has_obj_perm(obj, 'delete')
return False
def can_copy(self, obj): def can_copy(self, obj):
return self.can_add({'reference_obj': obj}) return self.can_add({'reference_obj': obj})
@@ -598,7 +593,7 @@ class InstanceGroupAccess(BaseAccess):
- a superuser - a superuser
- admin role on the Instance group - admin role on the Instance group
I can add/delete Instance Groups: I can add/delete Instance Groups:
- a superuser(system administrator), because these are not org-scoped - a superuser(system administrator)
I can use Instance Groups when I have: I can use Instance Groups when I have:
- use_role on the instance group - use_role on the instance group
""" """
@@ -627,7 +622,7 @@ class InstanceGroupAccess(BaseAccess):
def can_delete(self, obj): def can_delete(self, obj):
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
return False return False
return self.user.has_obj_perm(obj, 'delete') return self.user.is_superuser
class UserAccess(BaseAccess): class UserAccess(BaseAccess):
@@ -644,10 +639,7 @@ class UserAccess(BaseAccess):
""" """
model = User model = User
prefetch_related = ( prefetch_related = ('profile',)
'profile',
'resource',
)
def filtered_queryset(self): def filtered_queryset(self):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()): if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
@@ -656,7 +648,9 @@ class UserAccess(BaseAccess):
qs = ( qs = (
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members')) User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
| User.objects.filter(pk=self.user.id) | User.objects.filter(pk=self.user.id)
| User.objects.filter(is_superuser=True) | User.objects.filter(
pk__in=Role.objects.filter(singleton_name__in=[ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
)
).distinct() ).distinct()
return qs return qs
@@ -714,15 +708,6 @@ class UserAccess(BaseAccess):
if not allow_orphans: if not allow_orphans:
# in these cases only superusers can modify orphan users # in these cases only superusers can modify orphan users
return False return False
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
# Permission granted if the user has all permissions that the target user has
target_perms = set(
RoleEvaluation.objects.filter(role__in=obj.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
)
user_perms = set(
RoleEvaluation.objects.filter(role__in=self.user.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
)
return not (target_perms - user_perms)
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists() return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
else: else:
return self.is_all_org_admin(obj) return self.is_all_org_admin(obj)
@@ -850,7 +835,6 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
prefetch_related = ( prefetch_related = (
'created_by', 'created_by',
'modified_by', 'modified_by',
'resource', # dab_resource_registry
) )
# organization admin_role is not a parent of organization auditor_role # organization admin_role is not a parent of organization auditor_role
notification_attach_roles = ['admin_role', 'auditor_role'] notification_attach_roles = ['admin_role', 'auditor_role']
@@ -961,6 +945,9 @@ class InventoryAccess(BaseAccess):
def can_update(self, obj): def can_update(self, obj):
return self.user in obj.update_role return self.user in obj.update_role
def can_delete(self, obj):
return self.can_admin(obj, None)
def can_run_ad_hoc_commands(self, obj): def can_run_ad_hoc_commands(self, obj):
return self.user in obj.adhoc_role return self.user in obj.adhoc_role
@@ -1316,7 +1303,6 @@ class TeamAccess(BaseAccess):
'created_by', 'created_by',
'modified_by', 'modified_by',
'organization', 'organization',
'resource', # dab_resource_registry
) )
def filtered_queryset(self): def filtered_queryset(self):
@@ -1387,11 +1373,12 @@ class TeamAccess(BaseAccess):
class ExecutionEnvironmentAccess(BaseAccess): class ExecutionEnvironmentAccess(BaseAccess):
""" """
I can see an execution environment when: I can see an execution environment when:
- I can see its organization - I'm a superuser
- It is a global ExecutionEnvironment - I'm a member of the same organization
- it is a global ExecutionEnvironment
I can create/change an execution environment when: I can create/change an execution environment when:
- I'm a superuser - I'm a superuser
- I have an organization or object role that gives access - I'm an admin for the organization(s)
""" """
model = ExecutionEnvironment model = ExecutionEnvironment
@@ -1400,9 +1387,7 @@ class ExecutionEnvironmentAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
return ExecutionEnvironment.objects.filter( return ExecutionEnvironment.objects.filter(
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
| Q(organization__isnull=True)
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
).distinct() ).distinct()
@check_superuser @check_superuser
@@ -1415,19 +1400,13 @@ class ExecutionEnvironmentAccess(BaseAccess):
def can_change(self, obj, data): def can_change(self, obj, data):
if obj and obj.organization_id is None: if obj and obj.organization_id is None:
raise PermissionDenied raise PermissionDenied
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED: if self.user not in obj.organization.execution_environment_admin_role:
if not self.user.has_obj_perm(obj, 'change'): raise PermissionDenied
if data and 'organization' in data:
new_org = get_object_from_data('organization', Organization, data, obj=obj)
if not new_org or self.user not in new_org.execution_environment_admin_role:
return False return False
else: return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
if self.user not in obj.organization.execution_environment_admin_role:
raise PermissionDenied
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
return False
# Special case that check_related does not catch, org users can not remove the organization from the EE
if data and ('organization' in data or 'organization_id' in data):
if (not data.get('organization')) and (not data.get('organization_id')):
return False
return True
def can_delete(self, obj): def can_delete(self, obj):
if obj.managed: if obj.managed:
@@ -1599,8 +1578,6 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
inventory = get_value(Inventory, 'inventory') inventory = get_value(Inventory, 'inventory')
if inventory: if inventory:
if self.user not in inventory.use_role: if self.user not in inventory.use_role:
if self.save_messages:
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
return False return False
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'): if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
@@ -1609,16 +1586,11 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
project = get_value(Project, 'project') project = get_value(Project, 'project')
# If the user has admin access to the project (as an org admin), should # If the user has admin access to the project (as an org admin), should
# be able to proceed without additional checks. # be able to proceed without additional checks.
if not project: if project:
return self.user in project.use_role
else:
return False return False
if self.user not in project.use_role:
if self.save_messages:
self.messages['project'] = [_('You do not have use permission on Project')]
return False
return True
@check_superuser @check_superuser
def can_copy_related(self, obj): def can_copy_related(self, obj):
""" """
@@ -2102,23 +2074,11 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists() return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True): return bool(
if data.get('organization', None) is None: self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
if self.save_messages: and self.check_related('inventory', Inventory, data, role_field='use_role')
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')] and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
return False )
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
if self.save_messages:
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
return False
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
if self.save_messages:
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
return False
return True
def can_copy(self, obj): def can_copy(self, obj):
if self.save_messages: if self.save_messages:
@@ -2627,8 +2587,6 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
if not JobLaunchConfigAccess(self.user).can_add(data): if not JobLaunchConfigAccess(self.user).can_add(data):
return False return False
if not data: if not data:
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return self.user.has_roles.filter(permission_partials__codename__in=['execute_jobtemplate', 'update_project', 'update_inventory']).exists()
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists() return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True) return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
@@ -2650,15 +2608,13 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
class NotificationTemplateAccess(BaseAccess): class NotificationTemplateAccess(BaseAccess):
""" """
Run standard logic from DAB RBAC I can see/use a notification_template if I have permission to
""" """
model = NotificationTemplate model = NotificationTemplate
prefetch_related = ('created_by', 'modified_by', 'organization') prefetch_related = ('created_by', 'modified_by', 'organization')
def filtered_queryset(self): def filtered_queryset(self):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return self.model.access_qs(self.user, 'view')
return self.model.objects.filter( return self.model.objects.filter(
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations) Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
).distinct() ).distinct()
@@ -2671,7 +2627,10 @@ class NotificationTemplateAccess(BaseAccess):
@check_superuser @check_superuser
def can_change(self, obj, data): def can_change(self, obj, data):
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role') if obj.organization is None:
# only superusers are allowed to edit orphan notification templates
return False
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
def can_admin(self, obj, data): def can_admin(self, obj, data):
return self.can_change(obj, data) return self.can_change(obj, data)
@@ -2681,7 +2640,9 @@ class NotificationTemplateAccess(BaseAccess):
@check_superuser @check_superuser
def can_start(self, obj, validate_license=True): def can_start(self, obj, validate_license=True):
return self.can_change(obj, None) if obj.organization is None:
return False
return self.user in obj.organization.notification_admin_role
class NotificationAccess(BaseAccess): class NotificationAccess(BaseAccess):
@@ -2822,7 +2783,7 @@ class ActivityStreamAccess(BaseAccess):
| Q(notification_template__organization__in=auditing_orgs) | Q(notification_template__organization__in=auditing_orgs)
| Q(notification__notification_template__organization__in=auditing_orgs) | Q(notification__notification_template__organization__in=auditing_orgs)
| Q(label__organization__in=auditing_orgs) | Q(label__organization__in=auditing_orgs)
| Q(role__in=Role.visible_roles(self.user) if auditing_orgs else []) | Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
) )
project_set = Project.accessible_pk_qs(self.user, 'read_role') project_set = Project.accessible_pk_qs(self.user, 'read_role')
@@ -2879,10 +2840,13 @@ class RoleAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
result = Role.visible_roles(self.user) result = Role.visible_roles(self.user)
# Make system admin/auditor mandatorily visible. # Sanity check: is the requesting user an orphaned non-admin/auditor?
mandatories = ('system_administrator', 'system_auditor') # if yes, make system admin/auditor mandatorily visible.
super_qs = Role.objects.filter(singleton_name__in=mandatories) if not self.user.is_superuser and not self.user.is_system_auditor and not self.user.organizations.exists():
return result | super_qs mandatories = ('system_administrator', 'system_auditor')
super_qs = Role.objects.filter(singleton_name__in=mandatories)
result = result | super_qs
return result
def can_add(self, obj, data): def can_add(self, obj, data):
# Unsupported for now # Unsupported for now

View File

@@ -66,8 +66,10 @@ class FixedSlidingWindow:
class RelayWebsocketStatsManager: class RelayWebsocketStatsManager:
def __init__(self, local_hostname): def __init__(self, event_loop, local_hostname):
self._local_hostname = local_hostname self._local_hostname = local_hostname
self._event_loop = event_loop
self._stats = dict() self._stats = dict()
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
@@ -92,10 +94,7 @@ class RelayWebsocketStatsManager:
self.start() self.start()
def start(self): def start(self):
self.async_task = asyncio.get_running_loop().create_task( self.async_task = self._event_loop.create_task(self.run_loop())
self.run_loop(),
name='RelayWebsocketStatsManager.run_loop',
)
return self.async_task return self.async_task
@classmethod @classmethod

View File

@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
resolved_action, resolved_action,
resolved_role, resolved_role,
-- '-' operator listed here: -- '-' operator listed here:
-- https://www.postgresql.org/docs/15/functions-json.html -- https://www.postgresql.org/docs/12/functions-json.html
-- note that operator is only supported by jsonb objects -- note that operator is only supported by jsonb objects
-- https://www.postgresql.org/docs/current/datatype-json.html -- https://www.postgresql.org/docs/current/datatype-json.html
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats, (CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,

View File

@@ -1,40 +1,7 @@
from django.apps import AppConfig from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from awx.main.utils.named_url_graph import _customize_graph, generate_graph
from awx.conf import register, fields
class MainConfig(AppConfig): class MainConfig(AppConfig):
name = 'awx.main' name = 'awx.main'
verbose_name = _('Main') verbose_name = _('Main')
def load_named_url_feature(self):
models = [m for m in self.get_models() if hasattr(m, 'get_absolute_url')]
generate_graph(models)
_customize_graph()
register(
'NAMED_URL_FORMATS',
field_class=fields.DictField,
read_only=True,
label=_('Formats of all available named urls'),
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
category=_('Named URL'),
category_slug='named-url',
)
register(
'NAMED_URL_GRAPH_NODES',
field_class=fields.DictField,
read_only=True,
label=_('List of all named url graph nodes.'),
help_text=_(
'Read-only list of key-value pairs that exposes named URL graph topology.'
' Use this list to programmatically generate named URLs for resources'
),
category=_('Named URL'),
category_slug='named-url',
)
def ready(self):
super().ready()
self.load_named_url_feature()

View File

@@ -2,7 +2,6 @@
import logging import logging
# Django # Django
from django.core.checks import Error
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
# Django REST Framework # Django REST Framework
@@ -93,7 +92,6 @@ register(
), ),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
required=False,
) )
register( register(
@@ -776,7 +774,6 @@ register(
allow_null=True, allow_null=True,
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
required=False,
) )
register( register(
'AUTOMATION_ANALYTICS_LAST_ENTRIES', 'AUTOMATION_ANALYTICS_LAST_ENTRIES',
@@ -818,7 +815,6 @@ register(
help_text=_('Max jobs to allow bulk jobs to launch'), help_text=_('Max jobs to allow bulk jobs to launch'),
category=_('Bulk Actions'), category=_('Bulk Actions'),
category_slug='bulk', category_slug='bulk',
hidden=True,
) )
register( register(
@@ -829,7 +825,6 @@ register(
help_text=_('Max number of hosts to allow to be created in a single bulk action'), help_text=_('Max number of hosts to allow to be created in a single bulk action'),
category=_('Bulk Actions'), category=_('Bulk Actions'),
category_slug='bulk', category_slug='bulk',
hidden=True,
) )
register( register(
@@ -840,7 +835,6 @@ register(
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'), help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
category=_('Bulk Actions'), category=_('Bulk Actions'),
category_slug='bulk', category_slug='bulk',
hidden=True,
) )
register( register(
@@ -851,7 +845,6 @@ register(
help_text=_('Enable preview of new user interface.'), help_text=_('Enable preview of new user interface.'),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
hidden=True,
) )
register( register(
@@ -929,16 +922,6 @@ register(
category_slug='debug', category_slug='debug',
) )
register(
'RECEPTOR_KEEP_WORK_ON_ERROR',
field_class=fields.BooleanField,
label=_('Keep receptor work on error'),
default=False,
help_text=_('Prevent receptor work from being released on when error is detected'),
category=('Debug'),
category_slug='debug',
)
def logging_validate(serializer, attrs): def logging_validate(serializer, attrs):
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'): if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
@@ -965,27 +948,3 @@ def logging_validate(serializer, attrs):
register_validate('logging', logging_validate) register_validate('logging', logging_validate)
def csrf_trusted_origins_validate(serializer, attrs):
if not serializer.instance or not hasattr(serializer.instance, 'CSRF_TRUSTED_ORIGINS'):
return attrs
if 'CSRF_TRUSTED_ORIGINS' not in attrs:
return attrs
errors = []
for origin in attrs['CSRF_TRUSTED_ORIGINS']:
if "://" not in origin:
errors.append(
Error(
"As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS "
"setting must start with a scheme (usually http:// or "
"https://) but found %s. See the release notes for details." % origin,
)
)
if errors:
error_messages = [error.msg for error in errors]
raise serializers.ValidationError(_('\n'.join(error_messages)))
return attrs
register_validate('system', csrf_trusted_origins_validate)

View File

@@ -14,7 +14,7 @@ __all__ = [
'STANDARD_INVENTORY_UPDATE_ENV', 'STANDARD_INVENTORY_UPDATE_ENV',
] ]
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization') CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
PRIVILEGE_ESCALATION_METHODS = [ PRIVILEGE_ESCALATION_METHODS = [
('sudo', _('Sudo')), ('sudo', _('Sudo')),
('su', _('Su')), ('su', _('Su')),
@@ -43,7 +43,6 @@ STANDARD_INVENTORY_UPDATE_ENV = {
} }
CAN_CANCEL = ('new', 'pending', 'waiting', 'running') CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
ACTIVE_STATES = CAN_CANCEL ACTIVE_STATES = CAN_CANCEL
ERROR_STATES = ('error',)
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF']) MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
CENSOR_VALUE = '************' CENSOR_VALUE = '************'
ENV_BLOCKLIST = frozenset( ENV_BLOCKLIST = frozenset(
@@ -115,28 +114,3 @@ SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS = 'unique_managed_hosts'
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts # Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id') HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
# Data for RBAC compatibility layer
role_name_to_perm_mapping = {
'adhoc_role': ['adhoc_'],
'approval_role': ['approve_'],
'auditor_role': ['audit_'],
'admin_role': ['change_', 'add_', 'delete_'],
'execute_role': ['execute_'],
'read_role': ['view_'],
'update_role': ['update_'],
'member_role': ['member_'],
'use_role': ['use_'],
}
org_role_to_permission = {
'notification_admin_role': 'add_notificationtemplate',
'project_admin_role': 'add_project',
'execute_role': 'execute_jobtemplate',
'inventory_admin_role': 'add_inventory',
'credential_admin_role': 'add_credential',
'workflow_admin_role': 'add_workflowjobtemplate',
'job_template_admin_role': 'change_jobtemplate', # TODO: this doesnt really work, solution not clear
'execution_environment_admin_role': 'add_executionenvironment',
'auditor_role': 'view_project', # TODO: also doesnt really work
}

View File

@@ -1,10 +1,9 @@
from azure.keyvault.secrets import SecretClient
from azure.identity import ClientSecretCredential
from msrestazure import azure_cloud
from .plugin import CredentialPlugin from .plugin import CredentialPlugin
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
from azure.common.credentials import ServicePrincipalCredentials
from msrestazure import azure_cloud
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py # https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
@@ -55,9 +54,22 @@ azure_keyvault_inputs = {
def azure_keyvault_backend(**kwargs): def azure_keyvault_backend(**kwargs):
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret']) url = kwargs['url']
kv = SecretClient(credential=csc, vault_url=kwargs['url']) [cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
def auth_callback(server, resource, scope):
credentials = ServicePrincipalCredentials(
url=url,
client_id=kwargs['client'],
secret=kwargs['secret'],
tenant=kwargs['tenant'],
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
)
token = credentials.token
return token['token_type'], token['access_token']
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend) azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)

View File

@@ -1,7 +1,6 @@
import os import os
import psycopg import psycopg
import select import select
from copy import deepcopy
from contextlib import contextmanager from contextlib import contextmanager
@@ -95,23 +94,18 @@ class PubSub(object):
def create_listener_connection(): def create_listener_connection():
conf = deepcopy(settings.DATABASES['default']) conf = settings.DATABASES['default'].copy()
conf['OPTIONS'] = deepcopy(conf.get('OPTIONS', {})) conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
# Modify the application name to distinguish from other connections the process might use # Modify the application name to distinguish from other connections the process might use
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener') conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
# Apply overrides specifically for the listener connection # Apply overrides specifically for the listener connection
for k, v in settings.LISTENER_DATABASES.get('default', {}).items(): for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
if k != 'OPTIONS': conf[k] = v
conf[k] = v
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items(): for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
conf['OPTIONS'][k] = v conf['OPTIONS'][k] = v
# Allow password-less authentication connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} password={conf['PASSWORD']} port={conf['PORT']}"
if 'PASSWORD' in conf:
conf['OPTIONS']['password'] = conf.pop('PASSWORD')
connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} port={conf['PORT']}"
return psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS']) return psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS'])

View File

@@ -162,7 +162,7 @@ class AWXConsumerRedis(AWXConsumerBase):
class AWXConsumerPG(AWXConsumerBase): class AWXConsumerPG(AWXConsumerBase):
def __init__(self, *args, schedule=None, **kwargs): def __init__(self, *args, schedule=None, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.pg_max_wait = getattr(settings, 'DISPATCHER_DB_DOWNTOWN_TOLLERANCE', settings.DISPATCHER_DB_DOWNTIME_TOLERANCE) self.pg_max_wait = settings.DISPATCHER_DB_DOWNTIME_TOLERANCE
# if no successful loops have ran since startup, then we should fail right away # if no successful loops have ran since startup, then we should fail right away
self.pg_is_down = True # set so that we fail if we get database errors on startup self.pg_is_down = True # set so that we fail if we get database errors on startup
init_time = time.time() init_time = time.time()
@@ -259,12 +259,6 @@ class AWXConsumerPG(AWXConsumerBase):
current_downtime = time.time() - self.pg_down_time current_downtime = time.time() - self.pg_down_time
if current_downtime > self.pg_max_wait: if current_downtime > self.pg_max_wait:
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting") logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
# Sending QUIT to multiprocess queue to signal workers to exit
for worker in self.pool.workers:
try:
worker.quit()
except Exception:
logger.exception(f"Error sending QUIT to worker {worker}")
raise raise
# Wait for a second before next attempt, but still listen for any shutdown signals # Wait for a second before next attempt, but still listen for any shutdown signals
for i in range(10): for i in range(10):
@@ -276,12 +270,6 @@ class AWXConsumerPG(AWXConsumerBase):
except Exception: except Exception:
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata # Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
logger.exception('Encountered unhandled error in dispatcher main loop') logger.exception('Encountered unhandled error in dispatcher main loop')
# Sending QUIT to multiprocess queue to signal workers to exit
for worker in self.pool.workers:
try:
worker.quit()
except Exception:
logger.exception(f"Error sending QUIT to worker {worker}")
raise raise

View File

@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
kwargs.setdefault('related_name', '+') kwargs.setdefault('related_name', '+')
kwargs.setdefault('null', 'True') kwargs.setdefault('null', 'True')
kwargs.setdefault('editable', False) kwargs.setdefault('editable', False)
kwargs.setdefault('on_delete', models.SET_NULL) kwargs.setdefault('on_delete', models.CASCADE)
super(ImplicitRoleField, self).__init__(*args, **kwargs) super(ImplicitRoleField, self).__init__(*args, **kwargs)
def deconstruct(self): def deconstruct(self):

View File

@@ -1,12 +0,0 @@
from django.core.management.base import BaseCommand, CommandError
from awx.main.models.ha import Instance
class Command(BaseCommand):
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
def handle(self, *args, **options):
if Instance.objects.me().node_state != Instance.States.READY:
raise CommandError('Instance is not ready') # so that return code is not 0
return

View File

@@ -2,7 +2,6 @@
# All Rights Reserved # All Rights Reserved
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.db import transaction
from crum import impersonate from crum import impersonate
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
from awx.main.signals import disable_computed_fields from awx.main.signals import disable_computed_fields
@@ -14,12 +13,6 @@ class Command(BaseCommand):
help = 'Creates a preload tower data if there is none.' help = 'Creates a preload tower data if there is none.'
def handle(self, *args, **kwargs): def handle(self, *args, **kwargs):
# Wrap the operation in an atomic block, so we do not on accident
# create the organization but not create the project, etc.
with transaction.atomic():
self._handle()
def _handle(self):
changed = False changed = False
# Create a default organization as the first superuser found. # Create a default organization as the first superuser found.
@@ -50,11 +43,10 @@ class Command(BaseCommand):
ssh_type = CredentialType.objects.filter(namespace='ssh').first() ssh_type = CredentialType.objects.filter(namespace='ssh').first()
c, _ = Credential.objects.get_or_create( c, _ = Credential.objects.get_or_create(
credential_type=ssh_type, name='Demo Credential', inputs={'username': getattr(superuser, 'username', 'null')}, created_by=superuser credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
) )
if superuser: c.admin_role.members.add(superuser)
c.admin_role.members.add(superuser)
public_galaxy_credential, _ = Credential.objects.get_or_create( public_galaxy_credential, _ = Credential.objects.get_or_create(
name='Ansible Galaxy', name='Ansible Galaxy',

View File

@@ -1,195 +0,0 @@
import json
import os
import sys
import re
from typing import Any
from django.core.management.base import BaseCommand
from django.conf import settings
from awx.conf import settings_registry
class Command(BaseCommand):
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
DAB_SAML_AUTHENTICATOR_KEYS = {
"SP_ENTITY_ID": True,
"SP_PUBLIC_CERT": True,
"SP_PRIVATE_KEY": True,
"ORG_INFO": True,
"TECHNICAL_CONTACT": True,
"SUPPORT_CONTACT": True,
"SP_EXTRA": False,
"SECURITY_CONFIG": False,
"EXTRA_DATA": False,
"ENABLED_IDPS": True,
"CALLBACK_URL": False,
}
DAB_LDAP_AUTHENTICATOR_KEYS = {
"SERVER_URI": True,
"BIND_DN": False,
"BIND_PASSWORD": False,
"CONNECTION_OPTIONS": False,
"GROUP_TYPE": True,
"GROUP_TYPE_PARAMS": True,
"GROUP_SEARCH": False,
"START_TLS": False,
"USER_DN_TEMPLATE": True,
"USER_ATTR_MAP": True,
"USER_SEARCH": False,
}
def is_enabled(self, settings, keys):
missing_fields = []
for key, required in keys.items():
if required and not settings.get(key):
missing_fields.append(key)
if missing_fields:
return False, missing_fields
return True, None
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
awx_ldap_settings = {}
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
value = getattr(settings, awx_ldap_setting, None)
awx_ldap_settings[key] = value
grouped_settings = {}
for key, value in awx_ldap_settings.items():
match = re.search(r'(\d+)', key)
index = int(match.group()) if match else 0
new_key = re.sub(r'\d+_', '', key)
if index not in grouped_settings:
grouped_settings[index] = {}
grouped_settings[index][new_key] = value
if new_key == "GROUP_TYPE" and value:
grouped_settings[index][new_key] = type(value).__name__
if new_key == "SERVER_URI" and value:
value = value.split(", ")
grouped_settings[index][new_key] = value
if type(value).__name__ == "LDAPSearch":
data = []
data.append(value.base_dn)
data.append("SCOPE_SUBTREE")
data.append(value.filterstr)
grouped_settings[index][new_key] = data
return grouped_settings
def get_awx_saml_settings(self) -> dict[str, Any]:
awx_saml_settings = {}
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
return awx_saml_settings
def format_config_data(self, enabled, awx_settings, type, keys, name):
config = {
"type": f"ansible_base.authentication.authenticator_plugins.{type}",
"name": name,
"enabled": enabled,
"create_objects": True,
"users_unique": False,
"remove_users": True,
"configuration": {},
}
for k in keys:
v = awx_settings.get(k)
config["configuration"].update({k: v})
if type == "saml":
idp_to_key_mapping = {
"url": "IDP_URL",
"x509cert": "IDP_X509_CERT",
"entity_id": "IDP_ENTITY_ID",
"attr_email": "IDP_ATTR_EMAIL",
"attr_groups": "IDP_GROUPS",
"attr_username": "IDP_ATTR_USERNAME",
"attr_last_name": "IDP_ATTR_LAST_NAME",
"attr_first_name": "IDP_ATTR_FIRST_NAME",
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
}
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
for key in idp_to_key_mapping:
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
if value is not None:
config["name"] = idp_name
config["configuration"].update({idp_to_key_mapping[key]: value})
return config
def add_arguments(self, parser):
parser.add_argument(
"output_file",
nargs="?",
type=str,
default=None,
help="Output JSON file path",
)
def handle(self, *args, **options):
try:
data = []
# dump SAML settings
awx_saml_settings = self.get_awx_saml_settings()
awx_saml_enabled, saml_missing_fields = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
if awx_saml_enabled:
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
data.append(
self.format_config_data(
awx_saml_enabled,
awx_saml_settings,
"saml",
self.DAB_SAML_AUTHENTICATOR_KEYS,
awx_saml_name,
)
)
else:
data.append({"SAML_missing_fields": saml_missing_fields})
# dump LDAP settings
awx_ldap_group_settings = self.get_awx_ldap_settings()
for awx_ldap_name, awx_ldap_settings in awx_ldap_group_settings.items():
awx_ldap_enabled, ldap_missing_fields = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
if awx_ldap_enabled:
data.append(
self.format_config_data(
awx_ldap_enabled,
awx_ldap_settings,
"ldap",
self.DAB_LDAP_AUTHENTICATOR_KEYS,
f"LDAP_{awx_ldap_name}",
)
)
else:
data.append({f"LDAP_{awx_ldap_name}_missing_fields": ldap_missing_fields})
# write to file if requested
if options["output_file"]:
# Define the path for the output JSON file
output_file = options["output_file"]
# Ensure the directory exists
os.makedirs(os.path.dirname(output_file), exist_ok=True)
# Write data to the JSON file
with open(output_file, "w") as f:
json.dump(data, f, indent=4)
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
else:
self.stdout.write(json.dumps(data, indent=4))
except Exception as e:
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
sys.exit(1)

View File

@@ -1,151 +0,0 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
# Django
from django.core.management.base import BaseCommand
from django.db import connection
import json
import re
class Command(BaseCommand):
"""
Emits some simple statistics suitable for external monitoring
"""
help = 'Run queries that provide an overview of the performance of the system over a given period of time'
def add_arguments(self, parser):
parser.add_argument('--since', action='store', dest='days', type=str, default="1", help='Max days to look back to for data')
parser.add_argument('--limit', action='store', dest='limit', type=str, default="10", help='Max number of records for database queries (LIMIT)')
def execute_query(self, query):
with connection.cursor() as cursor:
cursor.execute(query)
rows = cursor.fetchall()
return rows
def jsonify(self, title, keys, values, query):
result = []
query = re.sub('\n', ' ', query)
query = re.sub('\s{2,}', ' ', query)
for value in values:
result.append(dict(zip(keys, value)))
return {title: result, 'count': len(values), 'query': query}
def jobs_pending_duration(self, days, limit):
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
query = f"""
SELECT name, id AS job_id, unified_job_template_id, created, started - created AS pending_duration
FROM main_unifiedjob
WHERE finished IS NOT null
AND started IS NOT null
AND cancel_flag IS NOT true
AND created > NOW() - INTERVAL '{days} days'
AND started - created > INTERVAL '0 seconds'
ORDER BY pending_duration DESC
LIMIT {limit};"""
values = self.execute_query(query)
return self.jsonify(
title='completed_or_started_jobs_by_pending_duration',
keys=('job_name', 'job_id', 'unified_job_template_id', 'job_created', 'pending_duration'),
values=values,
query=query,
)
def times_of_day_pending_more_than_X_min(self, days, limit, minutes_pending):
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
query = f"""
SELECT
date_trunc('hour', created) as day_and_hour,
COUNT(created) as count_jobs_pending_greater_than_{minutes_pending}_min
FROM main_unifiedjob
WHERE started IS NOT NULL
AND started - created > INTERVAL '{minutes_pending} minutes'
AND created > NOW() - INTERVAL '{days} days'
GROUP BY date_trunc('hour', created)
ORDER BY count_jobs_pending_greater_than_{minutes_pending}_min DESC
LIMIT {limit};"""
values = self.execute_query(query)
return self.jsonify(
title=f'times_of_day_pending_more_than_{minutes_pending}',
keys=('day_and_hour', f'count_jobs_pending_more_than_{minutes_pending}_min'),
values=values,
query=query,
)
def pending_jobs_details(self, days, limit):
"""Return list of jobs that are in pending and list details such as reasons they may be blocked, within configured number of days and limit."""
query = f"""
SELECT DISTINCT ON(A.id) A.name, A.id, A.unified_job_template_id, A.created, NOW() - A.created as pending_duration, F.allow_simultaneous, B.current_job_id as current_ujt_job, I.to_unifiedjob_id as dependency_job_id, A.dependencies_processed
FROM main_unifiedjob A
LEFT JOIN (
SELECT C.id, C.current_job_id FROM main_unifiedjobtemplate as C
) B
ON A.unified_job_template_id = B.id
LEFT JOIN main_job F ON A.id = F.unifiedjob_ptr_id
LEFT JOIN (
SELECT * FROM main_unifiedjob_dependent_jobs as G
RIGHT JOIN main_unifiedjob H ON G.to_unifiedjob_id = H.id
) I
ON A.id = I.from_unifiedjob_id
WHERE A.status = 'pending'
AND A.created > NOW() - INTERVAL '{days} days'
ORDER BY id DESC
LIMIT {limit};"""
values = self.execute_query(query)
return self.jsonify(
title='pending_jobs_details',
keys=(
'job_name',
'job_id',
'unified_job_template_id',
'job_created',
'pending_duration',
'allow_simultaneous',
'current_ujt_job',
'dependency_job_id',
'dependencies_processed',
),
values=values,
query=query,
)
def jobs_by_FUNC_event_processing_time(self, func, days, limit):
"""Return list of jobs sorted by MAX job event procesing time within configured number of days (within limit)"""
if func not in ('MAX', 'MIN', 'AVG', 'SUM'):
raise RuntimeError('Only able to asses job events grouped by job with MAX, MIN, AVG, SUM functions')
query = f"""SELECT job_id, {func}(A.modified - A.created) as job_event_processing_delay_{func}, B.name, B.created, B.finished, B.controller_node, B.execution_node
FROM main_jobevent A
RIGHT JOIN (
SELECT id, created, name, finished, controller_node, execution_node FROM
main_unifiedjob
WHERE created > NOW() - INTERVAL '{days} days'
AND created IS NOT null
AND finished IS NOT null
AND id IS NOT null
AND name IS NOT null
) B
ON A.job_id=B.id
WHERE A.job_id is not null
GROUP BY job_id, B.name, B.created, B.finished, B.controller_node, B.execution_node
ORDER BY job_event_processing_delay_{func} DESC
LIMIT {limit};"""
values = self.execute_query(query)
return self.jsonify(
title=f'jobs_by_{func}_event_processing',
keys=('job_id', f'{func}_job_event_processing_delay', 'job_name', 'job_created_time', 'job_finished_time', 'controller_node', 'execution_node'),
values=values,
query=query,
)
def handle(self, *args, **options):
items = []
for func in ('MAX', 'MIN', 'AVG'):
items.append(self.jobs_by_FUNC_event_processing_time(func, options['days'], options['limit']))
items.append(self.jobs_pending_duration(options['days'], options['limit']))
items.append(self.pending_jobs_details(options['days'], options['limit']))
items.append(self.times_of_day_pending_more_than_X_min(options['days'], options['limit'], minutes_pending=10))
self.stdout.write(json.dumps(items, indent=4, sort_keys=True, default=str))

View File

@@ -92,6 +92,8 @@ class Command(BaseCommand):
return host_stats return host_stats
def handle(self, *arg, **options): def handle(self, *arg, **options):
WebsocketsMetricsServer().start()
# it's necessary to delay this import in case # it's necessary to delay this import in case
# database migrations are still running # database migrations are still running
from awx.main.models.ha import Instance from awx.main.models.ha import Instance
@@ -101,9 +103,8 @@ class Command(BaseCommand):
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes())) migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
connection.close() # Because of async nature, main loop will use new connection, so close this connection.close() # Because of async nature, main loop will use new connection, so close this
except Exception as exc: except Exception as exc:
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
# sleeping before logging because logging rely on setting which require database connection... time.sleep(10)
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
return return
# In containerized deployments, migrations happen in the task container, # In containerized deployments, migrations happen in the task container,
@@ -122,14 +123,13 @@ class Command(BaseCommand):
return return
try: try:
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection my_hostname = Instance.objects.my_hostname()
logger.info('Active instance with hostname {} is registered.'.format(my_hostname)) logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
except RuntimeError as e: except RuntimeError as e:
# the CLUSTER_HOST_ID in the task, and web instance must match and # the CLUSTER_HOST_ID in the task, and web instance must match and
# ensure network connectivity between the task and web instance # ensure network connectivity between the task and web instance
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
# sleeping before logging because logging rely on setting which require database connection... time.sleep(5)
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
return return
if options.get('status'): if options.get('status'):
@@ -166,16 +166,8 @@ class Command(BaseCommand):
return return
WebsocketsMetricsServer().start()
try: try:
logger.info('Starting Websocket Relayer...')
websocket_relay_manager = WebSocketRelayManager() websocket_relay_manager = WebSocketRelayManager()
asyncio.run(websocket_relay_manager.run()) asyncio.run(websocket_relay_manager.run())
except KeyboardInterrupt: except KeyboardInterrupt:
logger.info('Terminating Websocket Relayer') logger.info('Terminating Websocket Relayer')
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
# sleeping before logging because logging rely on setting which require database connection...
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
return

View File

@@ -1,25 +1,25 @@
# Copyright (c) 2015 Ansible, Inc. # Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved. # All Rights Reserved.
import functools
import logging import logging
import threading import threading
import time import time
import urllib.parse import urllib.parse
from pathlib import Path, PurePosixPath
from django.conf import settings from django.conf import settings
from django.contrib.auth import logout from django.contrib.auth import logout
from django.db.migrations.recorder import MigrationRecorder from django.contrib.auth.models import User
from django.db.migrations.executor import MigrationExecutor
from django.db import connection from django.db import connection
from django.shortcuts import redirect from django.shortcuts import redirect
from django.apps import apps
from django.utils.deprecation import MiddlewareMixin from django.utils.deprecation import MiddlewareMixin
from django.utils.translation import gettext_lazy as _
from django.urls import reverse, resolve from django.urls import reverse, resolve
from awx.main import migrations from awx.main.utils.named_url_graph import generate_graph, GraphNode
from awx.conf import fields, register
from awx.main.utils.profiling import AWXProfiler from awx.main.utils.profiling import AWXProfiler
from awx.main.utils.common import memoize
from awx.urls import get_urlpatterns
logger = logging.getLogger('awx.main.middleware') logger = logging.getLogger('awx.main.middleware')
@@ -97,7 +97,49 @@ class DisableLocalAuthMiddleware(MiddlewareMixin):
logout(request) logout(request)
def _customize_graph():
from awx.main.models import Instance, Schedule, UnifiedJobTemplate
for model in [Schedule, UnifiedJobTemplate]:
if model in settings.NAMED_URL_GRAPH:
settings.NAMED_URL_GRAPH[model].remove_bindings()
settings.NAMED_URL_GRAPH.pop(model)
if User not in settings.NAMED_URL_GRAPH:
settings.NAMED_URL_GRAPH[User] = GraphNode(User, ['username'], [])
settings.NAMED_URL_GRAPH[User].add_bindings()
if Instance not in settings.NAMED_URL_GRAPH:
settings.NAMED_URL_GRAPH[Instance] = GraphNode(Instance, ['hostname'], [])
settings.NAMED_URL_GRAPH[Instance].add_bindings()
class URLModificationMiddleware(MiddlewareMixin): class URLModificationMiddleware(MiddlewareMixin):
def __init__(self, get_response):
models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')]
generate_graph(models)
_customize_graph()
register(
'NAMED_URL_FORMATS',
field_class=fields.DictField,
read_only=True,
label=_('Formats of all available named urls'),
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
category=_('Named URL'),
category_slug='named-url',
)
register(
'NAMED_URL_GRAPH_NODES',
field_class=fields.DictField,
read_only=True,
label=_('List of all named url graph nodes.'),
help_text=_(
'Read-only list of key-value pairs that exposes named URL graph topology.'
' Use this list to programmatically generate named URLs for resources'
),
category=_('Named URL'),
category_slug='named-url',
)
super().__init__(get_response)
@staticmethod @staticmethod
def _hijack_for_old_jt_name(node, kwargs, named_url): def _hijack_for_old_jt_name(node, kwargs, named_url):
try: try:
@@ -138,36 +180,14 @@ class URLModificationMiddleware(MiddlewareMixin):
@classmethod @classmethod
def _convert_named_url(cls, url_path): def _convert_named_url(cls, url_path):
default_prefix = PurePosixPath('/api/v2/') url_units = url_path.split('/')
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/') # If the identifier is an empty string, it is always invalid.
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
url_path_original = url_path return url_path
url_path = PurePosixPath(url_path) resource = url_units[3]
if set(optional_prefix.parts).issubset(set(url_path.parts)):
url_prefix = optional_prefix
elif set(default_prefix.parts).issubset(set(url_path.parts)):
url_prefix = default_prefix
else:
return url_path_original
# Remove prefix
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
try:
resource_path = PurePosixPath(url_path.parts[0])
name = url_path.parts[1]
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
except IndexError:
return url_path_original
resource = resource_path.parts[0]
if resource in settings.NAMED_URL_MAPPINGS: if resource in settings.NAMED_URL_MAPPINGS:
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name)) url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
else: return '/'.join(url_units)
return url_path_original
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
return PurePosixPath(*parts).as_posix() + '/'
def process_request(self, request): def process_request(self, request):
old_path = request.path_info old_path = request.path_info
@@ -178,46 +198,9 @@ class URLModificationMiddleware(MiddlewareMixin):
request.path_info = new_path request.path_info = new_path
@memoize(ttl=20)
def is_migrating():
latest_number = 0
latest_name = ''
for migration_path in Path(migrations.__path__[0]).glob('[0-9]*.py'):
try:
migration_number = int(migration_path.name.split('_', 1)[0])
except ValueError:
continue
if migration_number > latest_number:
latest_number = migration_number
latest_name = migration_path.name[: -len('.py')]
return not MigrationRecorder(connection).migration_qs.filter(app='main', name=latest_name).exists()
class MigrationRanCheckMiddleware(MiddlewareMixin): class MigrationRanCheckMiddleware(MiddlewareMixin):
def process_request(self, request): def process_request(self, request):
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran': executor = MigrationExecutor(connection)
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
return redirect(reverse("ui:migrations_notran")) return redirect(reverse("ui:migrations_notran"))
class OptionalURLPrefixPath(MiddlewareMixin):
@functools.lru_cache
def _url_optional(self, prefix):
# Relavant Django code path https://github.com/django/django/blob/stable/4.2.x/django/core/handlers/base.py#L300
#
# resolve_request(request)
# get_resolver(request.urlconf)
# _get_cached_resolver(request.urlconf) <-- cached via @functools.cache
#
# Django will attempt to cache the value(s) of request.urlconf
# Being hashable is a prerequisit for being cachable.
# tuple() is hashable list() is not.
# Hence the tuple(list()) wrap.
return tuple(get_urlpatterns(prefix=prefix))
def process_request(self, request):
prefix = settings.OPTIONAL_API_URLPATTERN_PREFIX
if request.path.startswith(f"/api/{prefix}"):
request.urlconf = self._url_optional(prefix)
else:
request.urlconf = 'awx.urls'

View File

@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
model_name='organization', model_name='organization',
name='execute_role', name='execute_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AddField( migrations.AddField(
model_name='organization', model_name='organization',
name='job_template_admin_role', name='job_template_admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AddField( migrations.AddField(
model_name='organization', model_name='organization',
name='credential_admin_role', name='credential_admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AddField( migrations.AddField(
model_name='organization', model_name='organization',
name='inventory_admin_role', name='inventory_admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AddField( migrations.AddField(
model_name='organization', model_name='organization',
name='project_admin_role', name='project_admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AddField( migrations.AddField(
model_name='organization', model_name='organization',
name='workflow_admin_role', name='workflow_admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AddField( migrations.AddField(
model_name='organization', model_name='organization',
name='notification_admin_role', name='notification_admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AlterField( migrations.AlterField(
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
name='admin_role', name='admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'], parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
model_name='inventory', model_name='inventory',
name='admin_role', name='admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role' null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
), ),
), ),
migrations.AlterField( migrations.AlterField(
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
name='admin_role', name='admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.project_admin_role', 'singleton:system_administrator'], parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
name='admin_role', name='admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'], parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
name='execute_role', name='execute_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['admin_role', 'organization.execute_role'], parent_role=['admin_role', 'organization.execute_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'], parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
name='execute_role', name='execute_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'], parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=[ parent_role=[
'admin_role', 'admin_role',
'execute_role', 'execute_role',

View File

@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
model_name='organization', model_name='organization',
name='member_role', name='member_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role' editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
), ),
), ),
migrations.AlterField( migrations.AlterField(
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=[ parent_role=[
'member_role', 'member_role',
'auditor_role', 'auditor_role',

View File

@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
model_name='organization', model_name='organization',
name='approval_role', name='approval_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
preserve_default='True', preserve_default='True',
), ),
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.approval_role', 'admin_role'], parent_role=['organization.approval_role', 'admin_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=[ parent_role=[
'member_role', 'member_role',
'auditor_role', 'auditor_role',
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'], parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',

View File

@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.job_template_admin_role'], parent_role=['organization.job_template_admin_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['admin_role', 'organization.execute_role'], parent_role=['admin_role', 'organization.execute_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
related_name='+', related_name='+',
to='main.Role', to='main.Role',

View File

@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
model_name='organization', model_name='organization',
name='execution_environment_admin_role', name='execution_environment_admin_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role' editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
), ),
preserve_default='True', preserve_default='True',
), ),

View File

@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=[ parent_role=[
'member_role', 'member_role',
'auditor_role', 'auditor_role',

View File

@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_administrator'], parent_role=['singleton:system_administrator'],
related_name='+', related_name='+',
to='main.role', to='main.role',
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, editable=False,
null='True', null='True',
on_delete=django.db.models.deletion.SET_NULL, on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'], parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
related_name='+', related_name='+',
to='main.role', to='main.role',
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
model_name='instancegroup', model_name='instancegroup',
name='use_role', name='use_role',
field=awx.main.fields.ImplicitRoleField( field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role' editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
), ),
preserve_default='True', preserve_default='True',
), ),

View File

@@ -1,58 +0,0 @@
# Generated by Django 4.2.6 on 2024-02-15 20:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0189_inbound_hop_nodes'),
]
operations = [
migrations.AlterField(
model_name='inventorysource',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
],
default=None,
max_length=32,
),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
],
default=None,
max_length=32,
),
),
]

View File

@@ -1,85 +0,0 @@
# Generated by Django 4.2.6 on 2023-11-13 20:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0190_alter_inventorysource_source_and_more'),
('dab_rbac', '__first__'),
]
operations = [
# Add custom permissions for all special actions, like update, use, adhoc, and so on
migrations.AlterModelOptions(
name='credential',
options={'ordering': ('name',), 'permissions': [('use_credential', 'Can use credential in a job or related resource')]},
),
migrations.AlterModelOptions(
name='instancegroup',
options={'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')]},
),
migrations.AlterModelOptions(
name='inventory',
options={
'ordering': ('name',),
'permissions': [
('use_inventory', 'Can use inventory in a job template'),
('adhoc_inventory', 'Can run ad hoc commands'),
('update_inventory', 'Can update inventory sources in inventory'),
],
'verbose_name_plural': 'inventories',
},
),
migrations.AlterModelOptions(
name='jobtemplate',
options={'ordering': ('name',), 'permissions': [('execute_jobtemplate', 'Can run this job template')]},
),
migrations.AlterModelOptions(
name='project',
options={
'ordering': ('id',),
'permissions': [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')],
},
),
migrations.AlterModelOptions(
name='workflowjobtemplate',
options={
'permissions': [
('execute_workflowjobtemplate', 'Can run this workflow job template'),
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
]
},
),
migrations.AlterModelOptions(
name='organization',
options={
'default_permissions': ('change', 'delete', 'view'),
'ordering': ('name',),
'permissions': [
('member_organization', 'Basic participation permissions for organization'),
('audit_organization', 'Audit everything inside the organization'),
],
},
),
migrations.AlterModelOptions(
name='team',
options={'ordering': ('organization__name', 'name'), 'permissions': [('member_team', 'Inherit all roles assigned to this team')]},
),
# Remove add default permission for a few models
migrations.AlterModelOptions(
name='jobtemplate',
options={
'default_permissions': ('change', 'delete', 'view'),
'ordering': ('name',),
'permissions': [('execute_jobtemplate', 'Can run this job template')],
},
),
migrations.AlterModelOptions(
name='instancegroup',
options={
'default_permissions': ('change', 'delete', 'view'),
'permissions': [('use_instancegroup', 'Can use instance group in a preference list of a resource')],
},
),
]

View File

@@ -1,20 +0,0 @@
# Generated by Django 4.2.6 on 2023-11-21 02:06
from django.db import migrations
from awx.main.migrations._dab_rbac import migrate_to_new_rbac, create_permissions_as_operation, setup_managed_role_definitions
class Migration(migrations.Migration):
dependencies = [
('main', '0191_add_django_permissions'),
('dab_rbac', '__first__'),
]
operations = [
# make sure permissions and content types have been created by now
# these normally run in a post_migrate signal but we need them for our logic
migrations.RunPython(create_permissions_as_operation, migrations.RunPython.noop),
migrations.RunPython(setup_managed_role_definitions, migrations.RunPython.noop),
migrations.RunPython(migrate_to_new_rbac, migrations.RunPython.noop),
]

View File

@@ -1,51 +0,0 @@
# Generated by Django 4.2.6 on 2024-05-08 07:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0192_custom_roles'),
]
operations = [
migrations.AlterField(
model_name='notification',
name='notification_type',
field=models.CharField(
choices=[
('awssns', 'AWS SNS'),
('email', 'Email'),
('grafana', 'Grafana'),
('irc', 'IRC'),
('mattermost', 'Mattermost'),
('pagerduty', 'Pagerduty'),
('rocketchat', 'Rocket.Chat'),
('slack', 'Slack'),
('twilio', 'Twilio'),
('webhook', 'Webhook'),
],
max_length=32,
),
),
migrations.AlterField(
model_name='notificationtemplate',
name='notification_type',
field=models.CharField(
choices=[
('awssns', 'AWS SNS'),
('email', 'Email'),
('grafana', 'Grafana'),
('irc', 'IRC'),
('mattermost', 'Mattermost'),
('pagerduty', 'Pagerduty'),
('rocketchat', 'Rocket.Chat'),
('slack', 'Slack'),
('twilio', 'Twilio'),
('webhook', 'Webhook'),
],
max_length=32,
),
),
]

View File

@@ -1,61 +0,0 @@
# Generated by Django 4.2.10 on 2024-06-12 19:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0193_alter_notification_notification_type_and_more'),
]
operations = [
migrations.AlterField(
model_name='inventorysource',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
('openshift_virtualization', 'OpenShift Virtualization'),
],
default=None,
max_length=32,
),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
('openshift_virtualization', 'OpenShift Virtualization'),
],
default=None,
max_length=32,
),
),
]

View File

@@ -1,26 +0,0 @@
# Generated by Django 4.2.6 on 2024-06-20 15:55
from django.db import migrations
def delete_execution_environment_read_role(apps, schema_editor):
permission_classes = [apps.get_model('auth', 'Permission'), apps.get_model('dab_rbac', 'DABPermission')]
for permission_cls in permission_classes:
ee_read_perm = permission_cls.objects.filter(codename='view_executionenvironment').first()
if ee_read_perm:
ee_read_perm.delete()
class Migration(migrations.Migration):
dependencies = [
('main', '0194_alter_inventorysource_source_and_more'),
]
operations = [
migrations.AlterModelOptions(
name='executionenvironment',
options={'default_permissions': ('add', 'change', 'delete'), 'ordering': ('-created',)},
),
migrations.RunPython(delete_execution_environment_read_role, migrations.RunPython.noop),
]

View File

@@ -1,402 +0,0 @@
import json
import logging
from django.apps import apps as global_apps
from django.db.models import ForeignKey
from django.conf import settings
from ansible_base.rbac.migrations._utils import give_permissions
from ansible_base.rbac.management import create_dab_permissions
from awx.main.fields import ImplicitRoleField
from awx.main.constants import role_name_to_perm_mapping
from ansible_base.rbac.permission_registry import permission_registry
logger = logging.getLogger('awx.main.migrations._dab_rbac')
def create_permissions_as_operation(apps, schema_editor):
create_dab_permissions(global_apps.get_app_config("main"), apps=apps)
"""
Data structures and methods for the migration of old Role model to ObjectRole
"""
system_admin = ImplicitRoleField(name='system_administrator')
system_auditor = ImplicitRoleField(name='system_auditor')
system_admin.model = None
system_auditor.model = None
def resolve_parent_role(f, role_path):
"""
Given a field and a path declared in parent_role from the field definition, like
execute_role = ImplicitRoleField(parent_role='admin_role')
This expects to be passed in (execute_role object, "admin_role")
It hould return the admin_role from that object
"""
if role_path == 'singleton:system_administrator':
return system_admin
elif role_path == 'singleton:system_auditor':
return system_auditor
else:
related_field = f
current_model = f.model
for related_field_name in role_path.split('.'):
related_field = current_model._meta.get_field(related_field_name)
if isinstance(related_field, ForeignKey) and not isinstance(related_field, ImplicitRoleField):
current_model = related_field.related_model
return related_field
def build_role_map(apps):
"""
For the old Role model, this builds and returns dictionaries (children, parents)
which give a global mapping of the ImplicitRoleField instances according to the graph
"""
models = set(apps.get_app_config('main').get_models())
all_fields = set()
parents = {}
children = {}
all_fields.add(system_admin)
all_fields.add(system_auditor)
for cls in models:
for f in cls._meta.get_fields():
if isinstance(f, ImplicitRoleField):
all_fields.add(f)
for f in all_fields:
if f.parent_role is not None:
if isinstance(f.parent_role, str):
parent_roles = [f.parent_role]
else:
parent_roles = f.parent_role
# SPECIAL CASE: organization auditor_role is not a child of admin_role
# this makes no practical sense and conflicts with expected managed role
# so we put it in as a hack here
if f.name == 'auditor_role' and f.model._meta.model_name == 'organization':
parent_roles.append('admin_role')
parent_list = []
for rel_name in parent_roles:
parent_list.append(resolve_parent_role(f, rel_name))
parents[f] = parent_list
# build children lookup from parents lookup
for child_field, parent_list in parents.items():
for parent_field in parent_list:
children.setdefault(parent_field, [])
children[parent_field].append(child_field)
return (parents, children)
def get_descendents(f, children_map):
"""
Given ImplicitRoleField F and the children mapping, returns all descendents
of that field, as a set of other fields, including itself
"""
ret = {f}
if f in children_map:
for child_field in children_map[f]:
ret.update(get_descendents(child_field, children_map))
return ret
def get_permissions_for_role(role_field, children_map, apps):
Permission = apps.get_model('dab_rbac', 'DABPermission')
ContentType = apps.get_model('contenttypes', 'ContentType')
perm_list = []
for child_field in get_descendents(role_field, children_map):
if child_field.name in role_name_to_perm_mapping:
for perm_name in role_name_to_perm_mapping[child_field.name]:
if perm_name == 'add_' and role_field.model._meta.model_name != 'organization':
continue # only organizations can contain add permissions
perm = Permission.objects.filter(content_type=ContentType.objects.get_for_model(child_field.model), codename__startswith=perm_name).first()
if perm is not None and perm not in perm_list:
perm_list.append(perm)
# special case for two models that have object roles but no organization roles in old system
if role_field.name == 'notification_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
ct = ContentType.objects.get_for_model(apps.get_model('main', 'NotificationTemplate'))
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
if role_field.name == 'execution_environment_admin_role' or (role_field.name == 'admin_role' and role_field.model._meta.model_name == 'organization'):
ct = ContentType.objects.get_for_model(apps.get_model('main', 'ExecutionEnvironment'))
perm_list.extend(list(Permission.objects.filter(content_type=ct)))
# more special cases for those same above special org-level roles
if role_field.name == 'auditor_role':
perm_list.append(Permission.objects.get(codename='view_notificationtemplate'))
return perm_list
def model_class(ct, apps):
"""
You can not use model methods in migrations, so this duplicates
what ContentType.model_class does, using current apps
"""
try:
return apps.get_model(ct.app_label, ct.model)
except LookupError:
return None
def migrate_to_new_rbac(apps, schema_editor):
"""
This method moves the assigned permissions from the old rbac.py models
to the new RoleDefinition and ObjectRole models
"""
Role = apps.get_model('main', 'Role')
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
Permission = apps.get_model('dab_rbac', 'DABPermission')
# remove add premissions that are not valid for migrations from old versions
for perm_str in ('add_organization', 'add_jobtemplate'):
perm = Permission.objects.filter(codename=perm_str).first()
if perm:
perm.delete()
managed_definitions = dict()
for role_definition in RoleDefinition.objects.filter(managed=True):
permissions = frozenset(role_definition.permissions.values_list('id', flat=True))
managed_definitions[permissions] = role_definition
# Build map of old role model
parents, children = build_role_map(apps)
# NOTE: this import is expected to break at some point, and then just move the data here
from awx.main.models.rbac import role_descriptions
for role in Role.objects.prefetch_related('members', 'parents').iterator():
if role.singleton_name:
continue # only bothering to migrate object roles
team_roles = []
for parent in role.parents.all():
if parent.id not in json.loads(role.implicit_parents):
team_roles.append(parent)
# we will not create any roles that do not have any users or teams
if not (role.members.all() or team_roles):
logger.debug(f'Skipping role {role.role_field} for {role.content_type.model}-{role.object_id} due to no members')
continue
# get a list of permissions that the old role would grant
object_cls = apps.get_model(f'main.{role.content_type.model}')
object = object_cls.objects.get(pk=role.object_id) # WORKAROUND, role.content_object does not work in migrations
f = object._meta.get_field(role.role_field) # should be ImplicitRoleField
perm_list = get_permissions_for_role(f, children, apps)
permissions = frozenset(perm.id for perm in perm_list)
# With the needed permissions established, obtain the RoleDefinition this will need, priorities:
# 1. If it exists as a managed RoleDefinition then obviously use that
# 2. If we already created this for a prior role, use that
# 3. Create a new RoleDefinition that lists those permissions
if permissions in managed_definitions:
role_definition = managed_definitions[permissions]
else:
action = role.role_field.rsplit('_', 1)[0] # remove the _field ending of the name
role_definition_name = f'{model_class(role.content_type, apps).__name__} {action.title()}'
description = role_descriptions[role.role_field]
if type(description) == dict:
if role.content_type.model in description:
description = description.get(role.content_type.model)
else:
description = description.get('default')
if '%s' in description:
description = description % role.content_type.model
role_definition, created = RoleDefinition.objects.get_or_create(
name=role_definition_name,
defaults={'description': description, 'content_type_id': role.content_type_id},
)
if created:
logger.info(f'Created custom Role Definition {role_definition_name}, pk={role_definition.pk}')
role_definition.permissions.set(perm_list)
# Create the object role and add users to it
give_permissions(
apps,
role_definition,
users=role.members.all(),
teams=[tr.object_id for tr in team_roles],
object_id=role.object_id,
content_type_id=role.content_type_id,
)
# Create new replacement system auditor role
new_system_auditor, created = RoleDefinition.objects.get_or_create(
name='System Auditor',
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
)
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
# migrate is_system_auditor flag, because it is no longer handled by a system role
old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first()
if old_system_auditor:
# if the system auditor role is not present, this is a new install and no users should exist
ct = 0
for user in role.members.all():
RoleUserAssignment.objects.create(user=user, role_definition=new_system_auditor)
ct += 1
if ct:
logger.info(f'Migrated {ct} users to new system auditor flag')
def get_or_create_managed(name, description, ct, permissions, RoleDefinition):
role_definition, created = RoleDefinition.objects.get_or_create(name=name, defaults={'managed': True, 'description': description, 'content_type': ct})
role_definition.permissions.set(list(permissions))
if not role_definition.managed:
role_definition.managed = True
role_definition.save(update_fields=['managed'])
if created:
logger.info(f'Created RoleDefinition {role_definition.name} pk={role_definition} with {len(permissions)} permissions')
return role_definition
def setup_managed_role_definitions(apps, schema_editor):
"""
Idepotent method to create or sync the managed role definitions
"""
to_create = {
'object_admin': '{cls.__name__} Admin',
'org_admin': 'Organization Admin',
'org_children': 'Organization {cls.__name__} Admin',
'special': '{cls.__name__} {action}',
}
ContentType = apps.get_model('contenttypes', 'ContentType')
Permission = apps.get_model('dab_rbac', 'DABPermission')
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
Organization = apps.get_model(settings.ANSIBLE_BASE_ORGANIZATION_MODEL)
org_ct = ContentType.objects.get_for_model(Organization)
managed_role_definitions = []
org_perms = set()
for cls in permission_registry.all_registered_models:
ct = ContentType.objects.get_for_model(cls)
cls_name = cls._meta.model_name
object_perms = set(Permission.objects.filter(content_type=ct))
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
if cls_name != 'instancegroup':
org_perms.update(object_perms)
if 'object_admin' in to_create and cls_name != 'organization':
indiv_perms = object_perms.copy()
add_perms = [perm for perm in indiv_perms if perm.codename.startswith('add_')]
if add_perms:
for perm in add_perms:
indiv_perms.remove(perm)
managed_role_definitions.append(
get_or_create_managed(
to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition
)
)
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
org_child_perms = object_perms.copy()
org_child_perms.add(Permission.objects.get(codename='view_organization'))
managed_role_definitions.append(
get_or_create_managed(
to_create['org_children'].format(cls=cls),
f'Has all permissions to {cls._meta.verbose_name_plural} within an organization',
org_ct,
org_child_perms,
RoleDefinition,
)
)
if 'special' in to_create:
special_perms = []
for perm in object_perms:
# Organization auditor is handled separately
if perm.codename.split('_')[0] not in ('add', 'change', 'delete', 'view', 'audit'):
special_perms.append(perm)
for perm in special_perms:
action = perm.codename.split('_')[0]
view_perm = Permission.objects.get(content_type=ct, codename__startswith='view_')
perm_list = [perm, view_perm]
# Handle special-case where adhoc role also listed use permission
if action == 'adhoc':
for other_perm in object_perms:
if other_perm.codename == 'use_inventory':
perm_list.append(other_perm)
break
managed_role_definitions.append(
get_or_create_managed(
to_create['special'].format(cls=cls, action=action.title()),
f'Has {action} permissions to a single {cls._meta.verbose_name}',
ct,
perm_list,
RoleDefinition,
)
)
if 'org_admin' in to_create:
managed_role_definitions.append(
get_or_create_managed(
to_create['org_admin'].format(cls=Organization),
'Has all permissions to a single organization and all objects inside of it',
org_ct,
org_perms,
RoleDefinition,
)
)
# Special "organization action" roles
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
managed_role_definitions.append(
get_or_create_managed(
'Organization Audit',
'Has permission to view all objects inside of a single organization',
org_ct,
audit_permissions,
RoleDefinition,
)
)
org_execute_permissions = {'view_jobtemplate', 'execute_jobtemplate', 'view_workflowjobtemplate', 'execute_workflowjobtemplate', 'view_organization'}
managed_role_definitions.append(
get_or_create_managed(
'Organization Execute',
'Has permission to execute all runnable objects in the organization',
org_ct,
[perm for perm in org_perms if perm.codename in org_execute_permissions],
RoleDefinition,
)
)
org_approval_permissions = {'view_organization', 'view_workflowjobtemplate', 'approve_workflowjobtemplate'}
managed_role_definitions.append(
get_or_create_managed(
'Organization Approval',
'Has permission to approve any workflow steps within a single organization',
org_ct,
[perm for perm in org_perms if perm.codename in org_approval_permissions],
RoleDefinition,
)
)
unexpected_role_definitions = RoleDefinition.objects.filter(managed=True).exclude(pk__in=[rd.pk for rd in managed_role_definitions])
for role_definition in unexpected_role_definitions:
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
role_definition.delete()

View File

@@ -1,19 +1,12 @@
# Copyright (c) 2015 Ansible, Inc. # Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved. # All Rights Reserved.
import json
# Django # Django
from django.conf import settings # noqa from django.conf import settings # noqa
from django.db import connection from django.db import connection
from django.db.models.signals import pre_delete # noqa from django.db.models.signals import pre_delete # noqa
# django-ansible-base
from ansible_base.resource_registry.fields import AnsibleResourceField
from ansible_base.rbac import permission_registry
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment
from ansible_base.lib.utils.models import prevent_search from ansible_base.lib.utils.models import prevent_search
from ansible_base.lib.utils.models import user_summary_fields
# AWX # AWX
from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
@@ -106,8 +99,6 @@ from awx.main.access import get_user_queryset, check_user_access, check_user_acc
User.add_to_class('get_queryset', get_user_queryset) User.add_to_class('get_queryset', get_user_queryset)
User.add_to_class('can_access', check_user_access) User.add_to_class('can_access', check_user_access)
User.add_to_class('can_access_with_errors', check_user_access_with_errors) User.add_to_class('can_access_with_errors', check_user_access_with_errors)
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
User.add_to_class('summary_fields', user_summary_fields)
def convert_jsonfields(): def convert_jsonfields():
@@ -176,17 +167,17 @@ pre_delete.connect(cleanup_created_modified_by, sender=User)
@property @property
def user_get_organizations(user): def user_get_organizations(user):
return Organization.access_qs(user, 'member') return Organization.objects.filter(member_role__members=user)
@property @property
def user_get_admin_of_organizations(user): def user_get_admin_of_organizations(user):
return Organization.access_qs(user, 'change') return Organization.objects.filter(admin_role__members=user)
@property @property
def user_get_auditor_of_organizations(user): def user_get_auditor_of_organizations(user):
return Organization.access_qs(user, 'audit') return Organization.objects.filter(auditor_role__members=user)
@property @property
@@ -200,21 +191,11 @@ User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
User.add_to_class('created', created) User.add_to_class('created', created)
def get_system_auditor_role():
rd, created = RoleDefinition.objects.get_or_create(
name='System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
)
if created:
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))
return rd
@property @property
def user_is_system_auditor(user): def user_is_system_auditor(user):
if not hasattr(user, '_is_system_auditor'): if not hasattr(user, '_is_system_auditor'):
if user.pk: if user.pk:
rd = get_system_auditor_role() user._is_system_auditor = user.roles.filter(singleton_name='system_auditor', role_field='system_auditor').exists()
user._is_system_auditor = RoleUserAssignment.objects.filter(user=user, role_definition=rd).exists()
else: else:
# Odd case where user is unsaved, this should never be relied on # Odd case where user is unsaved, this should never be relied on
return False return False
@@ -228,17 +209,17 @@ def user_is_system_auditor(user, tf):
# time they've logged in, and we've just created the new User in this # time they've logged in, and we've just created the new User in this
# request), we need one to set up the system auditor role # request), we need one to set up the system auditor role
user.save() user.save()
rd = get_system_auditor_role() if tf:
assignment = RoleUserAssignment.objects.filter(user=user, role_definition=rd).first() role = Role.singleton('system_auditor')
prior_value = bool(assignment) # must check if member to not duplicate activity stream
if prior_value != bool(tf): if user not in role.members.all():
if assignment: role.members.add(user)
assignment.delete() user._is_system_auditor = True
else: else:
rd.give_global_permission(user) role = Role.singleton('system_auditor')
user._is_system_auditor = bool(tf) if user in role.members.all():
entry = ActivityStream.objects.create(changes=json.dumps({"is_system_auditor": [prior_value, bool(tf)]}), object1='user', operation='update') role.members.remove(user)
entry.user.add(user) user._is_system_auditor = False
User.add_to_class('is_system_auditor', user_is_system_auditor) User.add_to_class('is_system_auditor', user_is_system_auditor)
@@ -306,10 +287,6 @@ activity_stream_registrar.connect(WorkflowApprovalTemplate)
activity_stream_registrar.connect(OAuth2Application) activity_stream_registrar.connect(OAuth2Application)
activity_stream_registrar.connect(OAuth2AccessToken) activity_stream_registrar.connect(OAuth2AccessToken)
# Register models
permission_registry.register(Project, Team, WorkflowJobTemplate, JobTemplate, Inventory, Organization, Credential, NotificationTemplate, ExecutionEnvironment)
permission_registry.register(InstanceGroup, parent_field_name=None) # Not part of an organization
# prevent API filtering on certain Django-supplied sensitive fields # prevent API filtering on certain Django-supplied sensitive fields
prevent_search(User._meta.get_field('password')) prevent_search(User._meta.get_field('password'))
prevent_search(OAuth2AccessToken._meta.get_field('token')) prevent_search(OAuth2AccessToken._meta.get_field('token'))

View File

@@ -7,9 +7,6 @@ from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.utils.timezone import now from django.utils.timezone import now
# django-ansible-base
from ansible_base.lib.utils.models import get_type_for_model
# Django-CRUM # Django-CRUM
from crum import get_current_user from crum import get_current_user
@@ -142,23 +139,6 @@ class BaseModel(models.Model):
self.save(update_fields=update_fields) self.save(update_fields=update_fields)
return update_fields return update_fields
def summary_fields(self):
"""
This exists for use by django-ansible-base,
which has standard patterns that differ from AWX, but we enable views from DAB
for those views to list summary_fields for AWX models, those models need to provide this
"""
from awx.api.serializers import SUMMARIZABLE_FK_FIELDS
model_name = get_type_for_model(self)
related_fields = SUMMARIZABLE_FK_FIELDS.get(model_name, {})
summary_data = {}
for field_name in related_fields:
fval = getattr(self, field_name, None)
if fval is not None:
summary_data[field_name] = fval
return summary_data
class CreatedModifiedModel(BaseModel): class CreatedModifiedModel(BaseModel):
""" """

View File

@@ -21,10 +21,6 @@ from django.conf import settings
from django.utils.encoding import force_str from django.utils.encoding import force_str
from django.utils.functional import cached_property from django.utils.functional import cached_property
from django.utils.timezone import now from django.utils.timezone import now
from django.contrib.auth.models import User
# DRF
from rest_framework.serializers import ValidationError as DRFValidationError
# AWX # AWX
from awx.api.versioning import reverse from awx.api.versioning import reverse
@@ -45,7 +41,6 @@ from awx.main.models.rbac import (
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
ROLE_SINGLETON_SYSTEM_AUDITOR, ROLE_SINGLETON_SYSTEM_AUDITOR,
) )
from awx.main.models import Team, Organization
from awx.main.utils import encrypt_field from awx.main.utils import encrypt_field
from . import injectors as builtin_injectors from . import injectors as builtin_injectors
@@ -88,7 +83,6 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
app_label = 'main' app_label = 'main'
ordering = ('name',) ordering = ('name',)
unique_together = ('organization', 'name', 'credential_type') unique_together = ('organization', 'name', 'credential_type')
permissions = [('use_credential', 'Can use credential in a job or related resource')]
PASSWORD_FIELDS = ['inputs'] PASSWORD_FIELDS = ['inputs']
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources'] FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
@@ -320,16 +314,6 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
else: else:
raise ValueError('{} is not a dynamic input field'.format(field_name)) raise ValueError('{} is not a dynamic input field'.format(field_name))
def validate_role_assignment(self, actor, role_definition):
if self.organization:
if isinstance(actor, User):
if actor.is_superuser or Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists():
return
if isinstance(actor, Team):
if actor.organization == self.organization:
return
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
class CredentialType(CommonModelNameNotUnique): class CredentialType(CommonModelNameNotUnique):
""" """
@@ -1247,14 +1231,6 @@ ManagedCredentialType(
'multiline': True, 'multiline': True,
'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'), 'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'),
}, },
{
'id': 'gce_credentials',
'label': gettext_noop('Google Cloud Platform account credentials'),
'type': 'string',
'secret': True,
'multiline': True,
'help_text': gettext_noop('Google Cloud Platform account credentials in JSON format.'),
},
], ],
'required': ['configuration'], 'required': ['configuration'],
}, },

View File

@@ -130,10 +130,3 @@ def terraform(cred, env, private_data_dir):
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
f.write(cred.get_input('configuration')) f.write(cred.get_input('configuration'))
env['TF_BACKEND_CONFIG_FILE'] = to_container_path(path, private_data_dir) env['TF_BACKEND_CONFIG_FILE'] = to_container_path(path, private_data_dir)
# Handle env variables for GCP account credentials
if 'gce_credentials' in cred.inputs:
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
with os.fdopen(handle, 'w') as f:
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
f.write(cred.get_input('gce_credentials'))
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(path, private_data_dir)

View File

@@ -4,12 +4,11 @@ import datetime
from datetime import timezone from datetime import timezone
import logging import logging
from collections import defaultdict from collections import defaultdict
import itertools
import time import time
from django.conf import settings from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist from django.core.exceptions import ObjectDoesNotExist
from django.db import models, DatabaseError, transaction from django.db import models, DatabaseError
from django.db.models.functions import Cast from django.db.models.functions import Cast
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
from django.utils.text import Truncator from django.utils.text import Truncator
@@ -606,23 +605,19 @@ class JobEvent(BasePlaybookEvent):
def _update_host_metrics(updated_hosts_list): def _update_host_metrics(updated_hosts_list):
from awx.main.models import HostMetric # circular import from awx.main.models import HostMetric # circular import
# bulk-create
current_time = now() current_time = now()
HostMetric.objects.bulk_create(
# FUTURE: [HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace. )
# - Ability to do ORM upserts *may* have been introduced in Django 5.0. # bulk-update
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models. batch_start, batch_size = 0, 1000
# Hopefully this will be fully ready for batch use by 5.2 LTS. while batch_start <= len(updated_hosts_list):
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
args = [iter(updated_hosts_list)] * 500 HostMetric.objects.filter(hostname__in=batched_host_list).update(
for hosts in itertools.zip_longest(*args): last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
with transaction.atomic(): )
HostMetric.objects.bulk_create( batch_start += batch_size
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
)
HostMetric.objects.filter(hostname__in=hosts).update(
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
)
@property @property
def job_verbosity(self): def job_verbosity(self):

View File

@@ -1,8 +1,6 @@
from django.db import models from django.db import models
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main.models.base import CommonModel from awx.main.models.base import CommonModel
from awx.main.validators import validate_container_image_name from awx.main.validators import validate_container_image_name
@@ -14,8 +12,6 @@ __all__ = ['ExecutionEnvironment']
class ExecutionEnvironment(CommonModel): class ExecutionEnvironment(CommonModel):
class Meta: class Meta:
ordering = ('-created',) ordering = ('-created',)
# Remove view permission, as a temporary solution, defer to organization read permission
default_permissions = ('add', 'change', 'delete')
PULL_CHOICES = [ PULL_CHOICES = [
('always', _("Always pull container before running.")), ('always', _("Always pull container before running.")),
@@ -57,12 +53,3 @@ class ExecutionEnvironment(CommonModel):
def get_absolute_url(self, request=None): def get_absolute_url(self, request=None):
return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request) return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request)
def validate_role_assignment(self, actor, role_definition):
if self.managed:
raise ValidationError({'object_id': _('Can not assign object roles to managed Execution Environments')})
if self.organization_id is None:
raise ValidationError({'object_id': _('Can not assign object roles to global Execution Environments')})
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})

View File

@@ -485,9 +485,6 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin, ResourceMi
class Meta: class Meta:
app_label = 'main' app_label = 'main'
permissions = [('use_instancegroup', 'Can use instance group in a preference list of a resource')]
# Since this has no direct organization field only superuser can add, so remove add permission
default_permissions = ('change', 'delete', 'view')
def set_default_policy_fields(self): def set_default_policy_fields(self):
self.policy_instance_list = [] self.policy_instance_list = []

View File

@@ -11,8 +11,6 @@ import os.path
from urllib.parse import urljoin from urllib.parse import urljoin
import yaml import yaml
import tempfile
import stat
# Django # Django
from django.conf import settings from django.conf import settings
@@ -91,11 +89,6 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
verbose_name_plural = _('inventories') verbose_name_plural = _('inventories')
unique_together = [('name', 'organization')] unique_together = [('name', 'organization')]
ordering = ('name',) ordering = ('name',)
permissions = [
('use_inventory', 'Can use inventory in a job template'),
('adhoc_inventory', 'Can run ad hoc commands'),
('update_inventory', 'Can update inventory sources in inventory'),
]
organization = models.ForeignKey( organization = models.ForeignKey(
'Organization', 'Organization',
@@ -932,8 +925,6 @@ class InventorySourceOptions(BaseModel):
('rhv', _('Red Hat Virtualization')), ('rhv', _('Red Hat Virtualization')),
('controller', _('Red Hat Ansible Automation Platform')), ('controller', _('Red Hat Ansible Automation Platform')),
('insights', _('Red Hat Insights')), ('insights', _('Red Hat Insights')),
('terraform', _('Terraform State')),
('openshift_virtualization', _('OpenShift Virtualization')),
] ]
# From the options of the Django management base command # From the options of the Django management base command
@@ -1043,7 +1034,7 @@ class InventorySourceOptions(BaseModel):
def cloud_credential_validation(source, cred): def cloud_credential_validation(source, cred):
if not source: if not source:
return None return None
if cred and source not in ('custom', 'scm', 'openshift_virtualization'): if cred and source not in ('custom', 'scm'):
# If a credential was provided, it's important that it matches # If a credential was provided, it's important that it matches
# the actual inventory source being used (Amazon requires Amazon # the actual inventory source being used (Amazon requires Amazon
# credentials; Rackspace requires Rackspace credentials; etc...) # credentials; Rackspace requires Rackspace credentials; etc...)
@@ -1052,14 +1043,12 @@ class InventorySourceOptions(BaseModel):
# Allow an EC2 source to omit the credential. If Tower is running on # Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials # an EC2 instance with an IAM Role assigned, boto will use credentials
# from the instance metadata instead of those explicitly provided. # from the instance metadata instead of those explicitly provided.
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']: elif source in CLOUD_PROVIDERS and source != 'ec2':
return _('Credential is required for a cloud source.') return _('Credential is required for a cloud source.')
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'): elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.') return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'): elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
return _('Credentials of type insights and vault are disallowed for scm inventory sources.') return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
return None return None
def get_cloud_credential(self): def get_cloud_credential(self):
@@ -1410,7 +1399,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
return selected_groups return selected_groups
class CustomInventoryScript(CommonModelNameNotUnique): class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('name',) ordering = ('name',)
@@ -1641,42 +1630,6 @@ class satellite6(PluginFileInjector):
return ret return ret
class terraform(PluginFileInjector):
plugin_name = 'terraform_state'
namespace = 'cloud'
collection = 'terraform'
use_fqcn = True
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super().inventory_as_dict(inventory_update, private_data_dir)
credential = inventory_update.get_cloud_credential()
config_cred = credential.get_input('configuration')
if config_cred:
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
with os.fdopen(handle, 'w') as f:
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
f.write(config_cred)
ret['backend_config_files'] = to_container_path(path, private_data_dir)
return ret
def build_plugin_private_data(self, inventory_update, private_data_dir):
credential = inventory_update.get_cloud_credential()
private_data = {'credentials': {}}
gce_cred = credential.get_input('gce_credentials', default=None)
if gce_cred:
private_data['credentials'][credential] = gce_cred
return private_data
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
credential = inventory_update.get_cloud_credential()
cred_data = private_data_files['credentials']
if credential in cred_data:
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
return env
class controller(PluginFileInjector): class controller(PluginFileInjector):
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
base_injector = 'template' base_injector = 'template'
@@ -1696,16 +1649,6 @@ class insights(PluginFileInjector):
use_fqcn = True use_fqcn = True
class openshift_virtualization(PluginFileInjector):
plugin_name = 'kubevirt'
base_injector = 'template'
namespace = 'kubevirt'
collection = 'core'
downstream_namespace = 'redhat'
downstream_collection = 'openshift_virtualization'
use_fqcn = True
class constructed(PluginFileInjector): class constructed(PluginFileInjector):
plugin_name = 'constructed' plugin_name = 'constructed'
namespace = 'ansible' namespace = 'ansible'

View File

@@ -205,9 +205,6 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('name',) ordering = ('name',)
permissions = [('execute_jobtemplate', 'Can run this job template')]
# Remove add permission, ability to add comes from use permission for inventory, project, credentials
default_permissions = ('change', 'delete', 'view')
job_type = models.CharField( job_type = models.CharField(
max_length=64, max_length=64,

View File

@@ -19,14 +19,13 @@ from django.utils.translation import gettext_lazy as _
from ansible_base.lib.utils.models import prevent_search from ansible_base.lib.utils.models import prevent_search
# AWX # AWX
from awx.main.models.rbac import Role, RoleAncestorEntry
from awx.main.models.rbac import Role, RoleAncestorEntry, to_permissions
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
from awx.main.utils.execution_environments import get_default_execution_environment from awx.main.utils.execution_environments import get_default_execution_environment
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
from awx.main.fields import AskForField from awx.main.fields import AskForField
from awx.main.constants import ACTIVE_STATES, org_role_to_permission from awx.main.constants import ACTIVE_STATES
logger = logging.getLogger('awx.main.models.mixins') logger = logging.getLogger('awx.main.models.mixins')
@@ -65,18 +64,6 @@ class ResourceMixin(models.Model):
@staticmethod @staticmethod
def _accessible_pk_qs(cls, accessor, role_field, content_types=None): def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
if cls._meta.model_name == 'organization' and role_field in org_role_to_permission:
# Organization roles can not use the DAB RBAC shortcuts
# like Organization.access_qs(user, 'change_jobtemplate') is needed
# not just Organization.access_qs(user, 'change') is needed
if accessor.is_superuser:
return cls.objects.values_list('id')
codename = org_role_to_permission[role_field]
return cls.access_ids_qs(accessor, codename, content_types=content_types)
return cls.access_ids_qs(accessor, to_permissions[role_field], content_types=content_types)
if accessor._meta.model_name == 'user': if accessor._meta.model_name == 'user':
ancestor_roles = accessor.roles.all() ancestor_roles = accessor.roles.all()
elif type(accessor) == Role: elif type(accessor) == Role:

View File

@@ -5,7 +5,6 @@ from copy import deepcopy
import datetime import datetime
import logging import logging
import json import json
import traceback
from django.db import models from django.db import models
from django.conf import settings from django.conf import settings
@@ -31,7 +30,6 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
from awx.main.notifications.grafana_backend import GrafanaBackend from awx.main.notifications.grafana_backend import GrafanaBackend
from awx.main.notifications.rocketchat_backend import RocketChatBackend from awx.main.notifications.rocketchat_backend import RocketChatBackend
from awx.main.notifications.irc_backend import IrcBackend from awx.main.notifications.irc_backend import IrcBackend
from awx.main.notifications.awssns_backend import AWSSNSBackend
logger = logging.getLogger('awx.main.models.notifications') logger = logging.getLogger('awx.main.models.notifications')
@@ -41,7 +39,6 @@ __all__ = ['NotificationTemplate', 'Notification']
class NotificationTemplate(CommonModelNameNotUnique): class NotificationTemplate(CommonModelNameNotUnique):
NOTIFICATION_TYPES = [ NOTIFICATION_TYPES = [
('awssns', _('AWS SNS'), AWSSNSBackend),
('email', _('Email'), CustomEmailBackend), ('email', _('Email'), CustomEmailBackend),
('slack', _('Slack'), SlackBackend), ('slack', _('Slack'), SlackBackend),
('twilio', _('Twilio'), TwilioBackend), ('twilio', _('Twilio'), TwilioBackend),
@@ -396,11 +393,11 @@ class JobNotificationMixin(object):
'verbosity': 0, 'verbosity': 0,
}, },
'job_friendly_name': 'Job', 'job_friendly_name': 'Job',
'url': 'https://platformhost/#/jobs/playbook/1010', 'url': 'https://towerhost/#/jobs/playbook/1010',
'approval_status': 'approved', 'approval_status': 'approved',
'approval_node_name': 'Approve Me', 'approval_node_name': 'Approve Me',
'workflow_url': 'https://platformhost/#/jobs/workflow/1010', 'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
'job_metadata': """{'url': 'https://platformhost/$/jobs/playbook/13', 'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
'traceback': '', 'traceback': '',
'status': 'running', 'status': 'running',
'started': '2019-08-07T21:46:38.362630+00:00', 'started': '2019-08-07T21:46:38.362630+00:00',
@@ -487,29 +484,14 @@ class JobNotificationMixin(object):
if msg_template: if msg_template:
try: try:
msg = env.from_string(msg_template).render(**context) msg = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError) as e: except (TemplateSyntaxError, UndefinedError, SecurityError):
msg = '\r\n'.join([e.message, ''.join(traceback.format_exception(None, e, e.__traceback__).replace('\n', '\r\n'))]) msg = ''
if body_template: if body_template:
try: try:
body = env.from_string(body_template).render(**context) body = env.from_string(body_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError) as e: except (TemplateSyntaxError, UndefinedError, SecurityError):
body = '\r\n'.join([e.message, ''.join(traceback.format_exception(None, e, e.__traceback__).replace('\n', '\r\n'))]) body = ''
# https://datatracker.ietf.org/doc/html/rfc2822#section-2.2
# Body should have at least 2 CRLF, some clients will interpret
# the email incorrectly with blank body. So we will check that
if len(body.strip().splitlines()) < 1:
# blank body
body = '\r\n'.join(
[
"The template rendering return a blank body.",
"Please check the template.",
"Refer to https://github.com/ansible/awx/issues/13983",
"for further information.",
]
)
return (msg, body) return (msg, body)

View File

@@ -10,8 +10,6 @@ from django.contrib.sessions.models import Session
from django.utils.timezone import now as tz_now from django.utils.timezone import now as tz_now
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
# django-ansible-base
from ansible_base.resource_registry.fields import AnsibleResourceField
# AWX # AWX
from awx.api.versioning import reverse from awx.api.versioning import reverse
@@ -35,12 +33,6 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('name',) ordering = ('name',)
permissions = [
('member_organization', 'Basic participation permissions for organization'),
('audit_organization', 'Audit everything inside the organization'),
]
# Remove add permission, only superuser can add
default_permissions = ('change', 'delete', 'view')
instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership') instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership')
galaxy_credentials = OrderedManyToManyField( galaxy_credentials = OrderedManyToManyField(
@@ -111,7 +103,6 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
approval_role = ImplicitRoleField( approval_role = ImplicitRoleField(
parent_role='admin_role', parent_role='admin_role',
) )
resource = AnsibleResourceField(primary_key_field="id")
def get_absolute_url(self, request=None): def get_absolute_url(self, request=None):
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request) return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
@@ -143,7 +134,6 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
app_label = 'main' app_label = 'main'
unique_together = [('organization', 'name')] unique_together = [('organization', 'name')]
ordering = ('organization__name', 'name') ordering = ('organization__name', 'name')
permissions = [('member_team', 'Inherit all roles assigned to this team')]
organization = models.ForeignKey( organization = models.ForeignKey(
'Organization', 'Organization',
@@ -161,7 +151,6 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
read_role = ImplicitRoleField( read_role = ImplicitRoleField(
parent_role=['organization.auditor_role', 'member_role'], parent_role=['organization.auditor_role', 'member_role'],
) )
resource = AnsibleResourceField(primary_key_field="id")
def get_absolute_url(self, request=None): def get_absolute_url(self, request=None):
return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request) return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request)

View File

@@ -259,7 +259,6 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('id',) ordering = ('id',)
permissions = [('update_project', 'Can run a project update'), ('use_project', 'Can use project in a job template')]
default_environment = models.ForeignKey( default_environment = models.ForeignKey(
'ExecutionEnvironment', 'ExecutionEnvironment',

View File

@@ -7,30 +7,14 @@ import threading
import contextlib import contextlib
import re import re
# django-rest-framework
from rest_framework.serializers import ValidationError
# crum to impersonate users
from crum import impersonate
# Django # Django
from django.db import models, transaction, connection from django.db import models, transaction, connection
from django.db.models.signals import m2m_changed
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.apps import apps
from django.conf import settings
# Ansible_base app
from ansible_base.rbac.models import RoleDefinition
from ansible_base.lib.utils.models import get_type_for_model
# AWX # AWX
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main.migrations._dab_rbac import build_role_map, get_permissions_for_role
from awx.main.constants import role_name_to_perm_mapping, org_role_to_permission
__all__ = [ __all__ = [
'Role', 'Role',
@@ -91,11 +75,6 @@ role_descriptions = {
} }
to_permissions = {}
for k, v in role_name_to_perm_mapping.items():
to_permissions[k] = v[0].strip('_')
tls = threading.local() # thread local storage tls = threading.local() # thread local storage
@@ -107,8 +86,10 @@ def check_singleton(func):
""" """
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
sys_admin = Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR)
sys_audit = Role.singleton(ROLE_SINGLETON_SYSTEM_AUDITOR)
user = args[0] user = args[0]
if user.is_superuser or user.is_system_auditor: if user in sys_admin or user in sys_audit:
if len(args) == 2: if len(args) == 2:
return args[1] return args[1]
return Role.objects.all() return Role.objects.all()
@@ -188,24 +169,6 @@ class Role(models.Model):
def __contains__(self, accessor): def __contains__(self, accessor):
if accessor._meta.model_name == 'user': if accessor._meta.model_name == 'user':
if accessor.is_superuser:
return True
if self.role_field == 'system_administrator':
return accessor.is_superuser
elif self.role_field == 'system_auditor':
return accessor.is_system_auditor
elif self.role_field in ('read_role', 'auditor_role') and accessor.is_system_auditor:
return True
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
if self.content_object and self.content_object._meta.model_name == 'organization' and self.role_field in org_role_to_permission:
codename = org_role_to_permission[self.role_field]
return accessor.has_obj_perm(self.content_object, codename)
if self.role_field not in to_permissions:
raise Exception(f'{self.role_field} evaluated but not a translatable permission')
return accessor.has_obj_perm(self.content_object, to_permissions[self.role_field])
return self.ancestors.filter(members=accessor).exists() return self.ancestors.filter(members=accessor).exists()
else: else:
raise RuntimeError(f'Role evaluations only valid for users, received {accessor}') raise RuntimeError(f'Role evaluations only valid for users, received {accessor}')
@@ -317,9 +280,6 @@ class Role(models.Model):
# #
# #
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return
if len(additions) == 0 and len(removals) == 0: if len(additions) == 0 and len(removals) == 0:
return return
@@ -452,12 +412,6 @@ class Role(models.Model):
in their organization, but some of those roles descend from in their organization, but some of those roles descend from
organization admin_role, but not auditor_role. organization admin_role, but not auditor_role.
""" """
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
from ansible_base.rbac.models import RoleEvaluation
q = RoleEvaluation.objects.filter(role__in=user.has_roles.all()).values_list('object_id', 'content_type_id').query
return roles_qs.extra(where=[f'(object_id,content_type_id) in ({q})'])
return roles_qs.filter( return roles_qs.filter(
id__in=RoleAncestorEntry.objects.filter( id__in=RoleAncestorEntry.objects.filter(
descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list( descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list(
@@ -480,13 +434,6 @@ class Role(models.Model):
return self.singleton_name in [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR] return self.singleton_name in [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]
class AncestorManager(models.Manager):
def get_queryset(self):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
raise RuntimeError('The old RBAC system has been disabled, this should never be called')
return super(AncestorManager, self).get_queryset()
class RoleAncestorEntry(models.Model): class RoleAncestorEntry(models.Model):
class Meta: class Meta:
app_label = 'main' app_label = 'main'
@@ -504,8 +451,6 @@ class RoleAncestorEntry(models.Model):
content_type_id = models.PositiveIntegerField(null=False) content_type_id = models.PositiveIntegerField(null=False)
object_id = models.PositiveIntegerField(null=False) object_id = models.PositiveIntegerField(null=False)
objects = AncestorManager()
def role_summary_fields_generator(content_object, role_field): def role_summary_fields_generator(content_object, role_field):
global role_descriptions global role_descriptions
@@ -534,185 +479,3 @@ def role_summary_fields_generator(content_object, role_field):
summary['name'] = role_names[role_field] summary['name'] = role_names[role_field]
summary['id'] = getattr(content_object, '{}_id'.format(role_field)) summary['id'] = getattr(content_object, '{}_id'.format(role_field))
return summary return summary
# ----------------- Custom Role Compatibility -------------------------
# The following are methods to connect this (old) RBAC system to the new
# system which allows custom roles
# this follows the ORM interface layer documented in docs/rbac.md
def get_role_codenames(role):
obj = role.content_object
if obj is None:
return
f = obj._meta.get_field(role.role_field)
parents, children = build_role_map(apps)
return [perm.codename for perm in get_permissions_for_role(f, children, apps)]
def get_role_definition(role):
"""Given a old-style role, this gives a role definition in the new RBAC system for it"""
obj = role.content_object
if obj is None:
return
f = obj._meta.get_field(role.role_field)
action_name = f.name.rsplit("_", 1)[0]
model_print = type(obj).__name__
rd_name = f'{model_print} {action_name.title()} Compat'
perm_list = get_role_codenames(role)
defaults = {
'content_type_id': role.content_type_id,
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
}
with impersonate(None):
try:
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
except ValidationError:
# This is a tricky case - practically speaking, users should not be allowed to create team roles
# or roles that include the team member permission.
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
defaults['managed'] = True
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
return rd
def get_role_from_object_role(object_role):
"""
Given an object role from the new system, return the corresponding role from the old system
reverses naming from get_role_definition, and the ANSIBLE_BASE_ROLE_PRECREATE setting.
"""
rd = object_role.role_definition
if rd.name.endswith(' Compat'):
model_name, role_name, _ = rd.name.split()
role_name = role_name.lower()
role_name += '_role'
elif rd.name.endswith(' Admin') and rd.name.count(' ') == 2:
# cases like "Organization Project Admin"
model_name, target_model_name, role_name = rd.name.split()
role_name = role_name.lower()
model_cls = apps.get_model('main', target_model_name)
target_model_name = get_type_for_model(model_cls)
# exception cases completely specific to one model naming convention
if target_model_name == 'notification_template':
target_model_name = 'notification'
elif target_model_name == 'workflow_job_template':
target_model_name = 'workflow'
role_name = f'{target_model_name}_admin_role'
elif rd.name.endswith(' Admin'):
# cases like "project-admin"
role_name = 'admin_role'
elif rd.name == 'Organization Audit':
role_name = 'auditor_role'
else:
model_name, role_name = rd.name.split()
role_name = role_name.lower()
role_name += '_role'
return getattr(object_role.content_object, role_name)
def give_or_remove_permission(role, actor, giving=True):
obj = role.content_object
if obj is None:
return
rd = get_role_definition(role)
rd.give_or_remove_permission(actor, obj, giving=giving)
class SyncEnabled(threading.local):
def __init__(self):
self.enabled = True
rbac_sync_enabled = SyncEnabled()
@contextlib.contextmanager
def disable_rbac_sync():
try:
previous_value = rbac_sync_enabled.enabled
rbac_sync_enabled.enabled = False
yield
finally:
rbac_sync_enabled.enabled = previous_value
def give_creator_permissions(user, obj):
assignment = RoleDefinition.objects.give_creator_permissions(user, obj)
if assignment:
with disable_rbac_sync():
old_role = get_role_from_object_role(assignment.object_role)
old_role.members.add(user)
def sync_members_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
if action.startswith('pre_'):
return
if not rbac_sync_enabled.enabled:
return
if action == 'post_add':
is_giving = True
elif action == 'post_remove':
is_giving = False
elif action == 'post_clear':
raise RuntimeError('Clearing of role members not supported')
if reverse:
user = instance
else:
role = instance
for user_or_role_id in pk_set:
if reverse:
role = Role.objects.get(pk=user_or_role_id)
else:
user = get_user_model().objects.get(pk=user_or_role_id)
give_or_remove_permission(role, user, giving=is_giving)
def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
if action.startswith('pre_'):
return
if action == 'post_add':
is_giving = True
elif action == 'post_remove':
is_giving = False
elif action == 'post_clear':
raise RuntimeError('Clearing of role members not supported')
if reverse:
parent_role = instance
else:
child_role = instance
for role_id in pk_set:
if reverse:
try:
child_role = Role.objects.get(id=role_id)
except Role.DoesNotExist:
continue
else:
try:
parent_role = Role.objects.get(id=role_id)
except Role.DoesNotExist:
continue
# To a fault, we want to avoid running this if triggered from implicit_parents management
# we only want to do anything if we know for sure this is a non-implicit team role
if parent_role.role_field == 'member_role' and parent_role.content_type.model == 'team':
# Team internal parents are member_role->read_role and admin_role->member_role
# for the same object, this parenting will also be implicit_parents management
# do nothing for internal parents, but OTHER teams may still be assigned permissions to a team
if (child_role.content_type_id == parent_role.content_type_id) and (child_role.object_id == parent_role.object_id):
return
from awx.main.models.organization import Team
team = Team.objects.get(pk=parent_role.object_id)
give_or_remove_permission(child_role, team, giving=is_giving)
m2m_changed.connect(sync_members_to_new_rbac, Role.members.through)
m2m_changed.connect(sync_parents_to_new_rbac, Role.parents.through)

View File

@@ -17,7 +17,7 @@ from collections import OrderedDict
# Django # Django
from django.conf import settings from django.conf import settings
from django.db import models, connection, transaction from django.db import models, connection
from django.core.exceptions import NON_FIELD_ERRORS from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.utils.timezone import now from django.utils.timezone import now
@@ -31,15 +31,13 @@ from rest_framework.exceptions import ParseError
from polymorphic.models import PolymorphicModel from polymorphic.models import PolymorphicModel
from ansible_base.lib.utils.models import prevent_search, get_type_for_model from ansible_base.lib.utils.models import prevent_search, get_type_for_model
from ansible_base.rbac import permission_registry
# AWX # AWX
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
from awx.main.dispatch import get_task_queuename from awx.main.dispatch import get_task_queuename
from awx.main.dispatch.control import Control as ControlDispatcher from awx.main.dispatch.control import Control as ControlDispatcher
from awx.main.registrar import activity_stream_registrar from awx.main.registrar import activity_stream_registrar
from awx.main.models.mixins import TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
from awx.main.models.rbac import to_permissions
from awx.main.utils.common import ( from awx.main.utils.common import (
camelcase_to_underscore, camelcase_to_underscore,
get_model_for_type, get_model_for_type,
@@ -198,7 +196,9 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
@classmethod @classmethod
def _submodels_with_roles(cls): def _submodels_with_roles(cls):
return [c for c in cls.__subclasses__() if permission_registry.is_registered(c)] ujt_classes = [c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
return [ct.id for ct in ct_dict.values()]
@classmethod @classmethod
def accessible_pk_qs(cls, accessor, role_field): def accessible_pk_qs(cls, accessor, role_field):
@@ -210,23 +210,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
# do not use this if in a subclass # do not use this if in a subclass
if cls != UnifiedJobTemplate: if cls != UnifiedJobTemplate:
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field) return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
from ansible_base.rbac.models import RoleEvaluation return ResourceMixin._accessible_pk_qs(cls, accessor, role_field, content_types=cls._submodels_with_roles())
action = to_permissions[role_field]
# Special condition for super auditor
role_subclasses = cls._submodels_with_roles()
role_cts = ContentType.objects.get_for_models(*role_subclasses).values()
all_codenames = {f'{action}_{cls._meta.model_name}' for cls in role_subclasses}
if not (all_codenames - accessor.singleton_permissions()):
qs = cls.objects.filter(polymorphic_ctype__in=role_cts)
return qs.values_list('id', flat=True)
return (
RoleEvaluation.objects.filter(role__in=accessor.has_roles.all(), codename__in=all_codenames, content_type_id__in=[ct.id for ct in role_cts])
.values_list('object_id')
.distinct()
)
def _perform_unique_checks(self, unique_checks): def _perform_unique_checks(self, unique_checks):
# Handle the list of unique fields returned above. Replace with an # Handle the list of unique fields returned above. Replace with an
@@ -280,14 +264,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
if new_next_schedule: if new_next_schedule:
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run: if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
return # no-op, common for infrequent schedules return # no-op, common for infrequent schedules
self.next_schedule = new_next_schedule
# If in a transaction, use select_for_update to lock the next schedule row, which
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
if transaction.get_autocommit():
self.next_schedule = related_schedules.first()
else:
self.next_schedule = related_schedules.select_for_update().first()
self.next_job_run = new_next_schedule.next_run self.next_job_run = new_next_schedule.next_run
self.save(update_fields=['next_schedule', 'next_job_run']) self.save(update_fields=['next_schedule', 'next_job_run'])
@@ -837,7 +814,7 @@ class UnifiedJob(
update_fields.append(key) update_fields.append(key)
if parent_instance: if parent_instance:
if self.status in ('pending', 'running'): if self.status in ('pending', 'waiting', 'running'):
if parent_instance.current_job != self: if parent_instance.current_job != self:
parent_instance_set('current_job', self) parent_instance_set('current_job', self)
# Update parent with all the 'good' states of it's child # Update parent with all the 'good' states of it's child
@@ -874,7 +851,7 @@ class UnifiedJob(
# If this job already exists in the database, retrieve a copy of # If this job already exists in the database, retrieve a copy of
# the job in its prior state. # the job in its prior state.
# If update_fields are given without status, then that indicates no change # If update_fields are given without status, then that indicates no change
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)): if self.pk and ((not update_fields) or ('status' in update_fields)):
self_before = self.__class__.objects.get(pk=self.pk) self_before = self.__class__.objects.get(pk=self.pk)
if self_before.status != self.status: if self_before.status != self.status:
status_before = self_before.status status_before = self_before.status
@@ -916,8 +893,7 @@ class UnifiedJob(
update_fields.append('elapsed') update_fields.append('elapsed')
# Ensure that the job template information is current. # Ensure that the job template information is current.
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking if self.unified_job_template != self._get_parent_instance():
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
self.unified_job_template = self._get_parent_instance() self.unified_job_template = self._get_parent_instance()
if 'unified_job_template' not in update_fields: if 'unified_job_template' not in update_fields:
update_fields.append('unified_job_template') update_fields.append('unified_job_template')
@@ -930,9 +906,8 @@ class UnifiedJob(
# Okay; we're done. Perform the actual save. # Okay; we're done. Perform the actual save.
result = super(UnifiedJob, self).save(*args, **kwargs) result = super(UnifiedJob, self).save(*args, **kwargs)
# If status changed, update the parent instance # If status changed, update the parent instance.
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking if self.status != status_before:
if self.status != status_before and self.status != 'waiting':
# Update parent outside of the transaction for Job w/ allow_simultaneous=True # Update parent outside of the transaction for Job w/ allow_simultaneous=True
# This dodges lock contention at the expense of the foreign key not being # This dodges lock contention at the expense of the foreign key not being
# completely correct. # completely correct.
@@ -1624,8 +1599,7 @@ class UnifiedJob(
extra["controller_node"] = self.controller_node or "NOT_SET" extra["controller_node"] = self.controller_node or "NOT_SET"
elif state == "execution_node_chosen": elif state == "execution_node_chosen":
extra["execution_node"] = self.execution_node or "NOT_SET" extra["execution_node"] = self.execution_node or "NOT_SET"
logger_job_lifecycle.info(msg, extra=extra)
logger_job_lifecycle.info(f"{msg} {json.dumps(extra)}")
@property @property
def launched_by(self): def launched_by(self):

View File

@@ -467,10 +467,6 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
class Meta: class Meta:
app_label = 'main' app_label = 'main'
permissions = [
('execute_workflowjobtemplate', 'Can run this workflow job template'),
('approve_workflowjobtemplate', 'Can approve steps in this workflow job template'),
]
notification_templates_approvals = models.ManyToManyField( notification_templates_approvals = models.ManyToManyField(
"NotificationTemplate", "NotificationTemplate",

View File

@@ -1,70 +0,0 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
import logging
import boto3
from botocore.exceptions import ClientError
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.awssns_backend')
WEBSOCKET_TIMEOUT = 30
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
}
recipient_parameter = "sns_topic_arn"
sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}"
default_messages = CustomNotificationBase.job_metadata_messages
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
session = boto3.session.Session()
client_config = {"service_name": 'sns'}
if aws_region:
client_config["region_name"] = aws_region
if aws_secret_access_key:
client_config["aws_secret_access_key"] = aws_secret_access_key
if aws_access_key_id:
client_config["aws_access_key_id"] = aws_access_key_id
if aws_session_token:
client_config["aws_session_token"] = aws_session_token
self.client = session.client(**client_config)
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
def _sns_publish(self, topic_arn, message):
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
def format_body(self, body):
if isinstance(body, str):
try:
body = json.loads(body)
except json.JSONDecodeError:
pass
if isinstance(body, dict):
body = json.dumps(body)
# convert dict body to json string
return body
def send_messages(self, messages):
sent_messages = 0
for message in messages:
sns_topic_arn = str(message.recipients()[0])
try:
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
sent_messages += 1
except ClientError as error:
if not self.fail_silently:
raise error
return sent_messages

View File

@@ -1,6 +1,5 @@
# Copyright (c) 2019 Ansible, Inc. # Copyright (c) 2019 Ansible, Inc.
# All Rights Reserved. # All Rights Reserved.
# -*-coding:utf-8-*-
class CustomNotificationBase(object): class CustomNotificationBase(object):
@@ -32,15 +31,3 @@ class CustomNotificationBase(object):
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None}, "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
}, },
} }
job_metadata_messages = {
"started": {"body": "{{ job_metadata }}"},
"success": {"body": "{{ job_metadata }}"},
"error": {"body": "{{ job_metadata }}"},
"workflow_approval": {
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
},
}

View File

@@ -27,7 +27,17 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
sender_parameter = None sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}" DEFAULT_BODY = "{{ job_metadata }}"
default_messages = CustomNotificationBase.job_metadata_messages default_messages = {
"started": {"body": DEFAULT_BODY},
"success": {"body": DEFAULT_BODY},
"error": {"body": DEFAULT_BODY},
"workflow_approval": {
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
},
}
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs): def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
self.http_method = http_method self.http_method = http_method

View File

@@ -4,15 +4,13 @@ import logging
from django.conf import settings from django.conf import settings
from django.urls import re_path from django.urls import re_path
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter from channels.routing import ProtocolTypeRouter, URLRouter
from ansible_base.lib.channels.middleware import DrfAuthMiddlewareStack
from . import consumers from . import consumers
logger = logging.getLogger('awx.main.routing') logger = logging.getLogger('awx.main.routing')
_application = None
class AWXProtocolTypeRouter(ProtocolTypeRouter): class AWXProtocolTypeRouter(ProtocolTypeRouter):
@@ -28,95 +26,13 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
class MultipleURLRouterAdapter:
"""
Django channels doesn't nicely support Auth_1(urls_1), Auth_2(urls_2), ..., Auth_n(urls_n)
This class allows assocating a websocket url with an auth
Ordering matters. The first matching url will be used.
"""
def __init__(self, *auths):
self._auths = [a for a in auths]
async def __call__(self, scope, receive, send):
"""
Loop through the list of passed in URLRouter's (they may or may not be wrapped by auth).
We know we have exhausted the list of URLRouter patterns when we get a
ValueError('No route found for path %s'). When that happens, move onto the next
URLRouter.
If the final URLRouter raises an error, re-raise it in the end.
We know that we found a match when no error is raised, end the loop.
"""
last_index = len(self._auths) - 1
for i, auth in enumerate(self._auths):
try:
return await auth.__call__(scope, receive, send)
except ValueError as e:
if str(e).startswith('No route found for path'):
# Only surface the error if on the last URLRouter
if i == last_index:
raise
websocket_urlpatterns = [ websocket_urlpatterns = [
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()), re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
]
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
websocket_relay_urlpatterns = [
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()), re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
] ]
application = AWXProtocolTypeRouter(
def application_func(cls=AWXProtocolTypeRouter) -> ProtocolTypeRouter: {
return cls( 'websocket': AuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
{ }
'websocket': MultipleURLRouterAdapter( )
URLRouter(websocket_relay_urlpatterns),
DrfAuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
)
}
)
def __getattr__(name: str) -> ProtocolTypeRouter:
"""
Defer instantiating application.
For testing, we just need it to NOT run on import.
https://peps.python.org/pep-0562/#specification
Normally, someone would get application from this module via:
from awx.main.routing import application
and do something with the application:
application.do_something()
What does the callstack look like when the import runs?
...
awx.main.routing.__getattribute__(...) # <-- we don't define this so NOOP as far as we are concerned
if '__getattr__' in awx.main.routing.__dict__: # <-- this triggers the function we are in
return awx.main.routing.__dict__.__getattr__("application")
Why isn't this function simply implemented as:
def __getattr__(name):
if not _application:
_application = application_func()
return _application
It could. I manually tested it and it passes test_routing.py.
But my understanding after reading the PEP-0562 specification link above is that
performance would be a bit worse due to the extra __getattribute__ calls when
we reference non-global variables.
"""
if name == "application":
globs = globals()
if not globs['_application']:
globs['_application'] = application_func()
return globs['_application']
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")

View File

@@ -138,8 +138,7 @@ class TaskBase:
# Lock # Lock
with task_manager_bulk_reschedule(): with task_manager_bulk_reschedule():
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000 # convert to milliseconds with advisory_lock(f"{self.prefix}_lock", wait=False) as acquired:
with advisory_lock(f"{self.prefix}_lock", lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
with transaction.atomic(): with transaction.atomic():
if acquired is False: if acquired is False:
logger.debug(f"Not running {self.prefix} scheduler, another task holds lock") logger.debug(f"Not running {self.prefix} scheduler, another task holds lock")

View File

@@ -126,8 +126,6 @@ def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwarg
def sync_superuser_status_to_rbac(instance, **kwargs): def sync_superuser_status_to_rbac(instance, **kwargs):
'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role' 'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role'
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return
update_fields = kwargs.get('update_fields', None) update_fields = kwargs.get('update_fields', None)
if update_fields and 'is_superuser' not in update_fields: if update_fields and 'is_superuser' not in update_fields:
return return
@@ -139,8 +137,6 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
def sync_rbac_to_superuser_status(instance, sender, **kwargs): def sync_rbac_to_superuser_status(instance, sender, **kwargs):
'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that' 'When the is_superuser flag is false but a user has the System Admin role, update the database to reflect that'
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return
if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']: if kwargs['action'] in ['post_add', 'post_remove', 'post_clear']:
new_status_value = bool(kwargs['action'] == 'post_add') new_status_value = bool(kwargs['action'] == 'post_add')
if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add() if hasattr(instance, 'singleton_name'): # duck typing, role.members.add() vs user.roles.add()

View File

@@ -29,7 +29,7 @@ class RunnerCallback:
self.safe_env = {} self.safe_env = {}
self.event_ct = 0 self.event_ct = 0
self.model = model self.model = model
self.update_attempts = int(getattr(settings, 'DISPATCHER_DB_DOWNTOWN_TOLLERANCE', settings.DISPATCHER_DB_DOWNTIME_TOLERANCE) / 5) self.update_attempts = int(settings.DISPATCHER_DB_DOWNTIME_TOLERANCE / 5)
self.wrapup_event_dispatched = False self.wrapup_event_dispatched = False
self.artifacts_processed = False self.artifacts_processed = False
self.extra_update_fields = {} self.extra_update_fields = {}

View File

@@ -114,7 +114,7 @@ class BaseTask(object):
def __init__(self): def __init__(self):
self.cleanup_paths = [] self.cleanup_paths = []
self.update_attempts = int(getattr(settings, 'DISPATCHER_DB_DOWNTOWN_TOLLERANCE', settings.DISPATCHER_DB_DOWNTIME_TOLERANCE) / 5) self.update_attempts = int(settings.DISPATCHER_DB_DOWNTIME_TOLERANCE / 5)
self.runner_callback = self.callback_class(model=self.model) self.runner_callback = self.callback_class(model=self.model)
def update_model(self, pk, _attempt=0, **updates): def update_model(self, pk, _attempt=0, **updates):

View File

@@ -49,70 +49,6 @@ class ReceptorConnectionType(Enum):
STREAMTLS = 2 STREAMTLS = 2
"""
Translate receptorctl messages that come in over stdout into
structured messages. Currently, these are error messages.
"""
class ReceptorErrorBase:
_MESSAGE = 'Receptor Error'
def __init__(self, node: str = 'N/A', state_name: str = 'N/A'):
self.node = node
self.state_name = state_name
def __str__(self):
return f"{self.__class__.__name__} '{self._MESSAGE}' on node '{self.node}' with state '{self.state_name}'"
class WorkUnitError(ReceptorErrorBase):
_MESSAGE = 'unknown work unit '
def __init__(self, work_unit_id: str, *args, **kwargs):
super().__init__(*args, **kwargs)
self.work_unit_id = work_unit_id
def __str__(self):
return f"{super().__str__()} work unit id '{self.work_unit_id}'"
class WorkUnitCancelError(WorkUnitError):
_MESSAGE = 'error cancelling remote unit: unknown work unit '
class WorkUnitResultsError(WorkUnitError):
_MESSAGE = 'Failed to get results: unknown work unit '
class UnknownError(ReceptorErrorBase):
_MESSAGE = 'Unknown receptor ctl error'
def __init__(self, msg, *args, **kwargs):
super().__init__(*args, **kwargs)
self._MESSAGE = msg
class FuzzyError:
def __new__(self, e: RuntimeError, node: str, state_name: str):
"""
At the time of writing this comment all of the sub-classes detection
is centralized in this parent class. It's like a Router().
Someone may find it better to push down the error detection logic into
each sub-class.
"""
msg = e.args[0]
common_startswith = (WorkUnitCancelError, WorkUnitResultsError, WorkUnitError)
for klass in common_startswith:
if msg.startswith(klass._MESSAGE):
work_unit_id = msg[len(klass._MESSAGE) :]
return klass(work_unit_id, node=node, state_name=state_name)
return UnknownError(msg, node=node, state_name=state_name)
def read_receptor_config(): def read_receptor_config():
# for K8S deployments, getting a lock is necessary as another process # for K8S deployments, getting a lock is necessary as another process
# may be re-writing the config at this time # may be re-writing the config at this time
@@ -249,7 +185,6 @@ def run_until_complete(node, timing_data=None, **kwargs):
timing_data['transmit_timing'] = run_start - transmit_start timing_data['transmit_timing'] = run_start - transmit_start
run_timing = 0.0 run_timing = 0.0
stdout = '' stdout = ''
state_name = 'local var never set'
try: try:
resultfile = receptor_ctl.get_work_results(unit_id) resultfile = receptor_ctl.get_work_results(unit_id)
@@ -270,33 +205,13 @@ def run_until_complete(node, timing_data=None, **kwargs):
stdout = resultfile.read() stdout = resultfile.read()
stdout = str(stdout, encoding='utf-8') stdout = str(stdout, encoding='utf-8')
except RuntimeError as e:
receptor_e = FuzzyError(e, node, state_name)
if type(receptor_e) in (
WorkUnitError,
WorkUnitResultsError,
):
logger.warning(f'While consuming job results: {receptor_e}')
else:
raise
finally: finally:
if settings.RECEPTOR_RELEASE_WORK: if settings.RECEPTOR_RELEASE_WORK:
try: res = receptor_ctl.simple_command(f"work release {unit_id}")
res = receptor_ctl.simple_command(f"work release {unit_id}") if res != {'released': unit_id}:
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
if res != {'released': unit_id}: receptor_ctl.close()
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
receptor_ctl.close()
except RuntimeError as e:
receptor_e = FuzzyError(e, node, state_name)
if type(receptor_e) in (
WorkUnitError,
WorkUnitCancelError,
):
logger.warning(f"While releasing work: {receptor_e}")
else:
logger.error(f"While releasing work: {receptor_e}")
if state_name.lower() == 'failed': if state_name.lower() == 'failed':
work_detail = status.get('Detail', '') work_detail = status.get('Detail', '')
@@ -360,7 +275,7 @@ def _convert_args_to_cli(vargs):
args = ['cleanup'] args = ['cleanup']
for option in ('exclude_strings', 'remove_images'): for option in ('exclude_strings', 'remove_images'):
if vargs.get(option): if vargs.get(option):
args.append('--{}="{}"'.format(option.replace('_', '-'), ' '.join(vargs.get(option)))) args.append('--{}={}'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'): for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'):
if vargs.get(option) is True: if vargs.get(option) is True:
args.append('--{}'.format(option.replace('_', '-'))) args.append('--{}'.format(option.replace('_', '-')))
@@ -405,11 +320,10 @@ class AWXReceptorJob:
finally: finally:
# Make sure to always release the work unit if we established it # Make sure to always release the work unit if we established it
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK: if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
if settings.RECPETOR_KEEP_WORK_ON_ERROR and getattr(res, 'status', 'error') == 'error': try:
try: receptor_ctl.simple_command(f"work release {self.unit_id}")
receptor_ctl.simple_command(f"work release {self.unit_id}") except Exception:
except Exception: logger.exception(f"Error releasing work unit {self.unit_id}.")
logger.exception(f"Error releasing work unit {self.unit_id}.")
def _run_internal(self, receptor_ctl): def _run_internal(self, receptor_ctl):
# Create a socketpair. Where the left side will be used for writing our payload # Create a socketpair. Where the left side will be used for writing our payload

View File

@@ -6,7 +6,6 @@ import itertools
import json import json
import logging import logging
import os import os
import psycopg
from io import StringIO from io import StringIO
from contextlib import redirect_stdout from contextlib import redirect_stdout
import shutil import shutil
@@ -36,9 +35,6 @@ import ansible_runner.cleanup
# dateutil # dateutil
from dateutil.parser import parse as parse_date from dateutil.parser import parse as parse_date
# django-ansible-base
from ansible_base.resource_registry.tasks.sync import SyncExecutor
# AWX # AWX
from awx import __version__ as awx_application_version from awx import __version__ as awx_application_version
from awx.main.access import access_registry from awx.main.access import access_registry
@@ -54,7 +50,7 @@ from awx.main.models import (
Job, Job,
convert_jsonfields, convert_jsonfields,
) )
from awx.main.constants import ACTIVE_STATES, ERROR_STATES from awx.main.constants import ACTIVE_STATES
from awx.main.dispatch.publish import task from awx.main.dispatch.publish import task
from awx.main.dispatch import get_task_queuename, reaper from awx.main.dispatch import get_task_queuename, reaper
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
@@ -420,7 +416,7 @@ def handle_removed_image(remove_images=None):
@task(queue=get_task_queuename) @task(queue=get_task_queuename)
def cleanup_images_and_files(): def cleanup_images_and_files():
_cleanup_images_and_files(image_prune=True) _cleanup_images_and_files()
@task(queue=get_task_queuename) @task(queue=get_task_queuename)
@@ -634,18 +630,10 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen)) logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
except DatabaseError as e: except DatabaseError as e:
cause = e.__cause__ if 'did not affect any rows' in str(e):
if cause and hasattr(cause, 'sqlstate'): logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
sqlstate = cause.sqlstate
sqlstate_str = psycopg.errors.lookup(sqlstate)
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
if sqlstate == psycopg.errors.NoData:
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
else:
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
else: else:
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname)) logger.exception('Error marking {} as lost'.format(other_inst.hostname))
# Run local reaper # Run local reaper
if worker_tasks is not None: if worker_tasks is not None:
@@ -685,8 +673,6 @@ def awx_receptor_workunit_reaper():
unit_ids = [id for id in receptor_work_list] unit_ids = [id for id in receptor_work_list]
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES) jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
jobs_with_unreleased_receptor_units = jobs_with_unreleased_receptor_units.exclude(status__in=ERROR_STATES)
for job in jobs_with_unreleased_receptor_units: for job in jobs_with_unreleased_receptor_units:
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}") logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}") receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
@@ -706,10 +692,7 @@ def awx_k8s_reaper():
logger.debug("Checking for orphaned k8s pods for {}.".format(group)) logger.debug("Checking for orphaned k8s pods for {}.".format(group))
pods = PodManager.list_active_jobs(group) pods = PodManager.list_active_jobs(group)
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD) time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
reap_job_candidates = UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES) for job in UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES):
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
reap_job_candidates = reap_job_candidates.exclude(status__in=ERROR_STATES)
for job in reap_job_candidates:
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format)) logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
try: try:
pm = PodManager(job) pm = PodManager(job)
@@ -720,8 +703,7 @@ def awx_k8s_reaper():
@task(queue=get_task_queuename) @task(queue=get_task_queuename)
def awx_periodic_scheduler(): def awx_periodic_scheduler():
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000 with advisory_lock('awx_periodic_scheduler_lock', wait=False) as acquired:
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
if acquired is False: if acquired is False:
logger.debug("Not running periodic scheduler, another task holds lock") logger.debug("Not running periodic scheduler, another task holds lock")
return return
@@ -806,19 +788,10 @@ def update_inventory_computed_fields(inventory_id):
try: try:
i.update_computed_fields() i.update_computed_fields()
except DatabaseError as e: except DatabaseError as e:
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105 if 'did not affect any rows' in str(e):
# django raises DatabaseError("Forced update did not affect any rows.") logger.debug('Exiting duplicate update_inventory_computed_fields task.')
return
# if sqlstate is set then there was a database error and otherwise will re-raise that error raise
cause = e.__cause__
if cause and hasattr(cause, 'sqlstate'):
sqlstate = cause.sqlstate
sqlstate_str = psycopg.errors.lookup(sqlstate)
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
raise
# otherwise
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
def update_smart_memberships_for_inventory(smart_inventory): def update_smart_memberships_for_inventory(smart_inventory):
@@ -973,27 +946,3 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
permission_check_func(creater, copy_mapping.values()) permission_check_func(creater, copy_mapping.values())
if isinstance(new_obj, Inventory): if isinstance(new_obj, Inventory):
update_inventory_computed_fields.delay(new_obj.id) update_inventory_computed_fields.delay(new_obj.id)
@task(queue=get_task_queuename)
def periodic_resource_sync():
if not getattr(settings, 'RESOURCE_SERVER', None):
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
return
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
if acquired is False:
logger.debug("Not running periodic_resource_sync, another task holds lock")
return
logger.debug("Running periodic resource sync")
executor = SyncExecutor()
executor.run()
for key, item_list in executor.results.items():
if not item_list or key == 'noop':
continue
# Log creations and conflicts
if len(item_list) > 10 and settings.LOG_AGGREGATOR_LEVEL != 'DEBUG':
logger.info(f'Periodic resource sync {key}, first 10 items:\n{item_list[:10]}')
else:
logger.info(f'Periodic resource sync {key}:\n{item_list}')

View File

@@ -3,5 +3,5 @@
hosts: all hosts: all
tasks: tasks:
- name: Hello Message - name: Hello Message
ansible.builtin.debug: debug:
msg: "Hello World!" msg: "Hello World!"

View File

@@ -1,5 +0,0 @@
{
"K8S_AUTH_HOST": "https://foo.invalid",
"K8S_AUTH_API_KEY": "fooo",
"K8S_AUTH_VERIFY_SSL": "False"
}

View File

@@ -1,3 +0,0 @@
{
"GOOGLE_BACKEND_CREDENTIALS": "{{ file_reference }}"
}

View File

@@ -1,8 +1,13 @@
from awx.main.tests.functional.conftest import * # noqa from awx.main.tests.functional.conftest import * # noqa
import os
import pytest
@pytest.fixture() def pytest_addoption(parser):
def release(): parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
return os.environ.get('VERSION_TARGET', '')
def pytest_generate_tests(metafunc):
# This is called for every test. Only get/set command line arguments
# if the argument is specified in the list of test "fixturenames".
option_value = metafunc.config.option.release
if 'release' in metafunc.fixturenames and option_value is not None:
metafunc.parametrize("release", [option_value])

Some files were not shown because too many files have changed in this diff Show More