Compare commits

..

1 Commits

Author SHA1 Message Date
Jessica Steurer
dc8006757a Revert "Adds support for a pseudolocalization and lang query params (#13661)"
This reverts commit 7f1750324f.
2023-03-15 08:31:26 -03:00
2788 changed files with 357012 additions and 39136 deletions

View File

@@ -1,57 +0,0 @@
---
codecov:
notify:
after_n_builds: 6 # Number of test matrix+lint jobs uploading coverage
wait_for_ci: false
require_ci_to_pass: false
token: >- # repo-scoped, upload-only, needed for stability in PRs from forks
2b8c7a7a-7293-4a00-bf02-19bd55a1389b
comment:
require_changes: true
coverage:
range: 100..100
status:
patch:
default:
target: 100%
pytest:
target: 100%
flags:
- pytest
typing:
flags:
- MyPy
project:
default:
target: 75%
lib:
flags:
- pytest
paths:
- awx/
target: 75%
tests:
flags:
- pytest
paths:
- tests/
- >-
**/test/
- >-
**/tests/
- >-
**/test/**
- >-
**/tests/**
target: 95%
typing:
flags:
- MyPy
target: 100%
...

View File

@@ -1,6 +1,16 @@
[run]
source = awx
branch = True
omit =
awx/main/migrations/*
awx/lib/site-packages/*
[report] [report]
# Regexes for lines to exclude from consideration # Regexes for lines to exclude from consideration
exclude_also = exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover
# Don't complain about missing debug-only code: # Don't complain about missing debug-only code:
def __repr__ def __repr__
if self\.debug if self\.debug
@@ -13,18 +23,7 @@ exclude_also =
if 0: if 0:
if __name__ == .__main__.: if __name__ == .__main__.:
^\s*@pytest\.mark\.xfail ignore_errors = True
[run]
branch = True
omit =
awx/main/migrations/*
awx/settings/defaults.py
awx/settings/*_defaults.py
source =
.
source_pkgs =
awx
[xml] [xml]
output = ./reports/coverage.xml output = ./reports/coverage.xml

View File

@@ -19,8 +19,6 @@ body:
required: true required: true
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response. - label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
required: true required: true
- label: I am **NOT** reporting a (potential) security vulnerability. (These should be emailed to `security@ansible.com` instead.)
required: true
- type: textarea - type: textarea
id: summary id: summary
@@ -44,7 +42,6 @@ body:
label: Select the relevant components label: Select the relevant components
options: options:
- label: UI - label: UI
- label: UI (tech preview)
- label: API - label: API
- label: Docs - label: Docs
- label: Collection - label: Collection

View File

@@ -1,44 +0,0 @@
name: Setup images for AWX
description: Builds new awx_devel image
inputs:
github-token:
description: GitHub Token for registry access
required: true
private-github-key:
description: GitHub private key for private repositories
required: false
default: ''
runs:
using: composite
steps:
- uses: ./.github/actions/setup-python
- name: Set lower case owner name
shell: bash
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
env:
OWNER: '${{ github.repository_owner }}'
- name: Log in to registry
shell: bash
run: |
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- uses: ./.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ inputs.private-github-key }}
- name: Pre-pull latest devel image to warm cache
shell: bash
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
docker pull -q `make print-DEVEL_IMAGE_NAME`
continue-on-error: true
- name: Build image for current source checkout
shell: bash
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
make docker-compose-build

View File

@@ -1,77 +0,0 @@
name: Run AWX docker-compose
description: Runs AWX with `make docker-compose`
inputs:
github-token:
description: GitHub Token to pass to awx_devel_image
required: true
build-ui:
description: Should the UI be built?
required: false
default: false
type: boolean
private-github-key:
description: GitHub private key for private repositories
required: false
default: ''
outputs:
ip:
description: The IP of the tools_awx_1 container
value: ${{ steps.data.outputs.ip }}
runs:
using: composite
steps:
- name: Disable apparmor for rsyslogd, first step
shell: bash
run: sudo ln -s /etc/apparmor.d/usr.sbin.rsyslogd /etc/apparmor.d/disable/
- name: Disable apparmor for rsyslogd, second step
shell: bash
run: sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.rsyslogd
- name: Build awx_devel image for running checks
uses: ./.github/actions/awx_devel_image
with:
github-token: ${{ inputs.github-token }}
private-github-key: ${{ inputs.private-github-key }}
- name: Upgrade ansible-core
shell: bash
run: python3 -m pip install --upgrade ansible-core
- name: Install system deps
shell: bash
run: sudo apt-get install -y gettext
- name: Start AWX
shell: bash
run: |
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
DJANGO_COLORS=nocolor \
SUPERVISOR_ARGS="-n -t" \
COMPOSE_UP_OPTS="-d --no-color" \
make docker-compose
- name: Update default AWX password
shell: bash
run: |
SECONDS=0
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]; do
if [[ $SECONDS -gt 600 ]]; then
echo "Timing out, AWX never came up"
exit 1
fi
echo "Waiting for AWX..."
sleep 5
done
echo "AWX is up, updating the password..."
docker exec -i tools_awx_1 sh <<-EOSH
awx-manage update_password --username=admin --password=password
EOSH
- name: Get instance data
id: data
shell: bash
run: |
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT

View File

@@ -1,27 +0,0 @@
name: 'Setup Python from Makefile'
description: 'Extract and set up Python version from Makefile'
inputs:
python-version:
description: 'Override Python version (optional)'
required: false
default: ''
working-directory:
description: 'Directory containing the Makefile'
required: false
default: '.'
runs:
using: composite
steps:
- name: Get python version from Makefile
shell: bash
run: |
if [ -n "${{ inputs.python-version }}" ]; then
echo "py_version=${{ inputs.python-version }}" >> $GITHUB_ENV
else
cd ${{ inputs.working-directory }}
echo "py_version=`make PYTHON_VERSION`" >> $GITHUB_ENV
fi
- name: Install python
uses: actions/setup-python@v5
with:
python-version: ${{ env.py_version }}

View File

@@ -1,29 +0,0 @@
name: 'Setup SSH for GitHub'
description: 'Configure SSH for private repository access'
inputs:
ssh-private-key:
description: 'SSH private key for repository access'
required: false
default: ''
runs:
using: composite
steps:
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ inputs.ssh-private-key }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ inputs.ssh-private-key }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}

View File

@@ -1,19 +0,0 @@
name: Upload logs
description: Upload logs from `make docker-compose` devel environment to GitHub as an artifact
inputs:
log-filename:
description: "*Unique* name of the log file"
required: true
runs:
using: composite
steps:
- name: Get AWX logs
shell: bash
run: |
docker logs tools_awx_1 > ${{ inputs.log-filename }}
- name: Upload AWX logs as artifact
uses: actions/upload-artifact@v4
with:
name: docker-compose-logs-${{ inputs.log-filename }}
path: ${{ inputs.log-filename }}

View File

@@ -1,10 +1,19 @@
version: 2 version: 2
updates: updates:
- package-ecosystem: "pip" - package-ecosystem: "npm"
directory: "docs/docsite/" directory: "/awx/ui"
schedule: schedule:
interval: "weekly" interval: "monthly"
open-pull-requests-limit: 2 open-pull-requests-limit: 5
allow:
- dependency-type: "production"
reviewers:
- "AlexSCorey"
- "keithjgrant"
- "kialam"
- "mabashian"
- "marshmalien"
labels: labels:
- "docs" - "component:ui"
- "dependencies" - "dependencies"
target-branch: "devel"

View File

@@ -1,5 +1,8 @@
"component:api": "component:api":
- any: ["awx/**/*"] - any: ["awx/**/*", "!awx/ui/**"]
"component:ui":
- any: ["awx/ui/**/*"]
"component:docs": "component:docs":
- any: ["docs/**/*"] - any: ["docs/**/*"]
@@ -11,4 +14,6 @@
- any: ["awx_collection/**/*"] - any: ["awx_collection/**/*"]
"dependencies": "dependencies":
- any: ["requirements/*"] - any: ["awx/ui/package.json"]
- any: ["awx/requirements/*.txt"]
- any: ["awx/requirements/requirements.in"]

View File

@@ -1,13 +1,14 @@
## General ## General
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question? - Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html - You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
## PRs/Issues ## PRs/Issues
### Visit the Forum or Matrix ### Visit our mailing list
- Hello, this appears to be less of a bug report or feature request and more of a question. Could you please ask this on either the [Ansible AWX channel on Matrix](https://matrix.to/#/#awx:ansible.com) or the [Ansible Community Forum](https://forum.ansible.com/tag/awx)? - Hello, this appears to be less of a bug report or feature request and more of a question. Could you please ask this on our mailing list? See https://github.com/ansible/awx/#get-involved for information for ways to connect with us.
### Denied Submission ### Denied Submission
@@ -82,7 +83,7 @@ The Ansible Community is looking at building an EE that corresponds to all of th
## Mailing List Triage ## Mailing List Triage
### Create an issue ### Create an issue
- Hello, thanks for reaching out on list. We think this merits an issue on our GitHub, https://github.com/ansible/awx/issues. If you could open an issue up on GitHub it will get tagged and integrated into our planning and workflow. All future work will be tracked there. Issues should include as much information as possible, including screenshots, log outputs, or any reproducers. - Hello, thanks for reaching out on list. We think this merits an issue on our Github, https://github.com/ansible/awx/issues. If you could open an issue up on Github it will get tagged and integrated into our planning and workflow. All future work will be tracked there. Issues should include as much information as possible, including screenshots, log outputs, or any reproducers.
### Create a Pull Request ### Create a Pull Request
- Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md - Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
@@ -92,8 +93,8 @@ The Ansible Community is looking at building an EE that corresponds to all of th
- Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks! - Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
### Ansible Engine not AWX ### Ansible Engine not AWX
- Hello, your question seems to be about Ansible development, not about AWX. Try asking on in the Forum https://forum.ansible.com/tag/development - Hello, your question seems to be about Ansible development, not about AWX. Try asking on the Ansible-devel specific mailing list: https://groups.google.com/g/ansible-devel
- Hello, your question seems to be about using Ansible Core, not about AWX. https://forum.ansible.com/tag/ansible-core is the best place to visit for user questions about Ansible. Thanks! - Hello, your question seems to be about using Ansible, not about AWX. https://groups.google.com/g/ansible-project is the best place to visit for user questions about Ansible. Thanks!
### Ansible Galaxy not AWX ### Ansible Galaxy not AWX
- Hey there. That sounds like an FAQ question. Did this: https://www.ansible.com/products/awx-project/faq cover your question? - Hey there. That sounds like an FAQ question. Did this: https://www.ansible.com/products/awx-project/faq cover your question?
@@ -103,7 +104,7 @@ The Ansible Community is looking at building an EE that corresponds to all of th
- AWX-Operator: https://github.com/ansible/awx-operator/blob/devel/CONTRIBUTING.md - AWX-Operator: https://github.com/ansible/awx-operator/blob/devel/CONTRIBUTING.md
### Oracle AWX ### Oracle AWX
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support. We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
### Community Resolved ### Community Resolved
Hi, Hi,

View File

@@ -3,19 +3,14 @@ name: CI
env: env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEV_DOCKER_OWNER: ${{ github.repository_owner }} DEV_DOCKER_TAG_BASE: ghcr.io/${{ github.repository_owner }}
COMPOSE_TAG: ${{ github.base_ref || 'devel' }} COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
UPSTREAM_REPOSITORY_ID: 91594105
on: on:
pull_request: pull_request:
push:
branches:
- devel # needed to publish code coverage post-merge
jobs: jobs:
common-tests: common-tests:
name: ${{ matrix.tests.name }} name: ${{ matrix.tests.name }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 60
permissions: permissions:
packages: write packages: write
contents: read contents: read
@@ -24,157 +19,57 @@ jobs:
matrix: matrix:
tests: tests:
- name: api-test - name: api-test
command: /start_tests.sh test_coverage command: /start_tests.sh
coverage-upload-name: ""
- name: api-migrations
command: /start_tests.sh test_migrations
coverage-upload-name: ""
- name: api-lint - name: api-lint
command: /var/lib/awx/venv/awx/bin/tox -e linters command: /var/lib/awx/venv/awx/bin/tox -e linters
coverage-upload-name: ""
- name: api-swagger - name: api-swagger
command: /start_tests.sh swagger command: /start_tests.sh swagger
coverage-upload-name: ""
- name: awx-collection - name: awx-collection
command: /start_tests.sh test_collection_all command: /start_tests.sh test_collection_all
coverage-upload-name: "awx-collection"
- name: api-schema - name: api-schema
command: >- command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
/start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ - name: ui-lint
github.event.pull_request.base.ref || github.ref_name command: make ui-lint
}} - name: ui-test-screens
coverage-upload-name: "" command: make ui-test-screens
- name: ui-test-general
command: make ui-test-general
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with:
show-progress: false
- name: Build awx_devel image for running checks
uses: ./.github/actions/awx_devel_image
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Run check ${{ matrix.tests.name }} - name: Run check ${{ matrix.tests.name }}
id: make-run run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make github_ci_runner
run: >-
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS -e GITHUB_OUTPUT -v "${GITHUB_OUTPUT}:${GITHUB_OUTPUT}:rw,Z"'
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
make docker-runner
- name: Upload test coverage to Codecov
if: >-
!cancelled()
&& steps.make-run.outputs.cov-report-files != ''
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: >-
${{
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
}}
files: >-
${{ steps.make-run.outputs.cov-report-files }}
flags: >-
CI-GHA,
pytest,
OS-${{
runner.os
}}
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov
if: >-
!cancelled()
&& steps.make-run.outputs.test-result-files != ''
uses: codecov/test-results-action@v1
with:
fail_ci_if_error: >-
${{
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
}}
files: >-
${{ steps.make-run.outputs.test-result-files }}
flags: >-
CI-GHA,
pytest,
OS-${{
runner.os
}}
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload awx jUnit test reports
if: >-
!cancelled()
&& steps.make-run.outputs.test-result-files != ''
&& github.event_name == 'push'
&& env.UPSTREAM_REPOSITORY_ID == github.repository_id
&& github.ref_name == github.event.repository.default_branch
run: |
for junit_file in $(echo '${{ steps.make-run.outputs.test-result-files }}' | sed 's/,/ /')
do
curl \
-v \
--user "${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_USER }}:${{ secrets.PDE_ORG_RESULTS_UPLOAD_PASSWORD }}" \
--form "xunit_xml=@${junit_file}" \
--form "component_name=${{ matrix.tests.coverage-upload-name || 'awx' }}" \
--form "git_commit_sha=${{ github.sha }}" \
--form "git_repository_url=https://github.com/${{ github.repository }}" \
"${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_URL }}/api/results/upload/"
done
dev-env: dev-env:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 60
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with:
show-progress: false
- uses: ./.github/actions/setup-python - name: Run smoke test
with: run: make github_ci_setup && ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
python-version: '3.x'
- uses: ./.github/actions/run_awx_devel
id: awx
with:
build-ui: false
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Run live dev env tests
run: docker exec tools_awx_1 /bin/bash -c "make live_test"
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
with:
log-filename: live-tests.log
awx-operator: awx-operator:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 60
env:
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
steps: steps:
- name: Checkout awx - name: Checkout awx
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
show-progress: false
path: awx path: awx
- uses: ./awx/.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Checkout awx-operator - name: Checkout awx-operator
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
show-progress: false\
repository: ansible/awx-operator repository: ansible/awx-operator
path: awx-operator path: awx-operator
- uses: ./awx/.github/actions/setup-python - name: Get python version from Makefile
working-directory: awx
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with: with:
working-directory: awx python-version: ${{ env.py_version }}
- name: Install playbook dependencies - name: Install playbook dependencies
run: | run: |
@@ -183,260 +78,39 @@ jobs:
- name: Build AWX image - name: Build AWX image
working-directory: awx working-directory: awx
run: | run: |
VERSION=`make version-for-buildyml` make awx-kube-build ansible-playbook -v tools/ansible/build.yml \
env: -e headless=yes \
COMPOSE_TAG: ci -e awx_image=awx \
DEV_DOCKER_TAG_BASE: local -e awx_image_tag=ci \
HEADLESS: yes -e ansible_python_interpreter=$(which python3)
- name: Run test deployment with awx-operator - name: Run test deployment with awx-operator
working-directory: awx-operator working-directory: awx-operator
run: | run: |
python3 -m pip install -r molecule/requirements.txt python3 -m pip install -r molecule/requirements.txt
python3 -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
ansible-galaxy collection install -r molecule/requirements.yml ansible-galaxy collection install -r molecule/requirements.yml
sudo rm -f $(which kustomize) sudo rm -f $(which kustomize)
make kustomize make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
env: env:
AWX_TEST_IMAGE: local/awx AWX_TEST_IMAGE: awx
AWX_TEST_VERSION: ci AWX_TEST_VERSION: ci
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
STORE_DEBUG_OUTPUT: true
- name: Upload debug output
if: failure()
uses: actions/upload-artifact@v4
with:
name: awx-operator-debug-output
path: ${{ env.DEBUG_OUTPUT_DIR }}
collection-sanity: collection-sanity:
name: awx_collection sanity name: awx_collection sanity
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
ansible:
- stable-2.17
# - devel
steps:
- name: Perform sanity testing
uses: ansible-community/ansible-test-gh-action@release/v1
with:
ansible-core-version: ${{ matrix.ansible }}
codecov-token: ${{ secrets.CODECOV_TOKEN }}
collection-root: awx_collection
pre-test-cmd: >-
ansible-playbook
-i localhost,
tools/template_galaxy.yml
-e collection_package=awx
-e collection_namespace=awx
-e collection_version=1.0.0
-e '{"awx_template_version": false}'
testing-type: sanity
- name: Upload awx jUnit test reports to the unified dashboard
if: >-
!cancelled()
&& steps.make-run.outputs.test-result-files != ''
&& github.event_name == 'push'
&& env.UPSTREAM_REPOSITORY_ID == github.repository_id
&& github.ref_name == github.event.repository.default_branch
run: |
for junit_file in $(echo '${{ steps.make-run.outputs.test-result-files }}' | sed 's/,/ /')
do
curl \
-v \
--user "${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_USER }}:${{ secrets.PDE_ORG_RESULTS_UPLOAD_PASSWORD }}" \
--form "xunit_xml=@${junit_file}" \
--form "component_name=awx" \
--form "git_commit_sha=${{ github.sha }}" \
--form "git_repository_url=https://github.com/${{ github.repository }}" \
"${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_URL }}/api/results/upload/"
done
collection-integration:
name: awx_collection integration
runs-on: ubuntu-latest
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
target-regex:
- name: a-h
regex: ^[a-h]
- name: i-p
regex: ^[i-p]
- name: r-z0-9
regex: ^[r-z0-9]
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
- uses: ./.github/actions/run_awx_devel
id: awx
with:
build-ui: false
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Install dependencies for running tests
run: |
python3 -m pip install -e ./awxkit/
python3 -m pip install -r awx_collection/requirements.txt
- name: Run integration tests
id: make-run
run: |
echo "::remove-matcher owner=python::" # Disable annoying annotations from setup-python
echo '[general]' > ~/.tower_cli.cfg
echo 'host = https://${{ steps.awx.outputs.ip }}:8043' >> ~/.tower_cli.cfg
echo 'username = admin' >> ~/.tower_cli.cfg
echo 'password = password' >> ~/.tower_cli.cfg
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
env:
ANSIBLE_TEST_PREFER_PODMAN: 1
- name: Upload test coverage to Codecov
if: >-
!cancelled()
&& steps.make-run.outputs.cov-report-files != ''
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: >-
${{
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
}}
files: >-
${{ steps.make-run.outputs.cov-report-files }}
flags: >-
CI-GHA,
ansible-test,
integration,
OS-${{
runner.os
}}
token: ${{ secrets.CODECOV_TOKEN }}
# Upload coverage report as artifact
- uses: actions/upload-artifact@v4
if: always()
with:
name: coverage-${{ matrix.target-regex.name }}
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
with:
log-filename: collection-integration-${{ matrix.target-regex.name }}.log
collection-integration-coverage-combine:
name: combine awx_collection integration coverage
runs-on: ubuntu-latest
timeout-minutes: 10
needs:
- collection-integration
strategy: strategy:
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with:
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
- name: Upgrade ansible-core - name: Upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core run: python3 -m pip install --upgrade ansible-core
- name: Download coverage artifacts A to H - name: Run sanity tests
uses: actions/download-artifact@v4 run: make test_collection_sanity
with:
name: coverage-a-h
path: coverage
- name: Download coverage artifacts I to P
uses: actions/download-artifact@v4
with:
name: coverage-i-p
path: coverage
- name: Download coverage artifacts Z to Z
uses: actions/download-artifact@v4
with:
name: coverage-r-z0-9
path: coverage
- name: Combine coverage
run: |
make COLLECTION_VERSION=100.100.100-git install_collection
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
cd ~/.ansible/collections/ansible_collections/awx/awx
ansible-test coverage combine --requirements
ansible-test coverage html
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
echo >> $GITHUB_STEP_SUMMARY
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
echo 'Download the HTML artifacts to view the coverage report.' >> $GITHUB_STEP_SUMMARY
# This is a huge hack, there's no official action for removing artifacts currently.
# Also ACTIONS_RUNTIME_URL and ACTIONS_RUNTIME_TOKEN aren't available in normal run
# steps, so we have to use github-script to get them.
#
# The advantage of doing this, though, is that we save on artifact storage space.
- name: Get secret artifact runtime URL
uses: actions/github-script@v6
id: get-runtime-url
with:
result-encoding: string
script: |
const { ACTIONS_RUNTIME_URL } = process.env;
return ACTIONS_RUNTIME_URL;
- name: Get secret artifact runtime token
uses: actions/github-script@v6
id: get-runtime-token
with:
result-encoding: string
script: |
const { ACTIONS_RUNTIME_TOKEN } = process.env;
return ACTIONS_RUNTIME_TOKEN;
- name: Remove intermediary artifacts
env: env:
ACTIONS_RUNTIME_URL: ${{ steps.get-runtime-url.outputs.result }} # needed due to cgroupsv2. This is fixed, but a stable release
ACTIONS_RUNTIME_TOKEN: ${{ steps.get-runtime-token.outputs.result }} # with the fix has not been made yet.
run: | ANSIBLE_TEST_PREFER_PODMAN: 1
echo "::add-mask::${ACTIONS_RUNTIME_TOKEN}"
artifacts=$(
curl -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
${ACTIONS_RUNTIME_URL}_apis/pipelines/workflows/${{ github.run_id }}/artifacts?api-version=6.0-preview \
| jq -r '.value | .[] | select(.name | startswith("coverage-")) | .url'
)
for artifact in $artifacts; do
curl -i -X DELETE -H "Accept: application/json;api-version=6.0-preview" -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" "$artifact"
done
- name: Upload coverage report as artifact
uses: actions/upload-artifact@v4
with:
name: awx-collection-integration-coverage-html
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/reports/coverage

View File

@@ -1,57 +0,0 @@
---
name: django-ansible-base requirements update
on:
workflow_dispatch:
schedule:
- cron: '0 6 * * *' # once an day @ 6 AM
permissions:
pull-requests: write
contents: write
jobs:
dab-pin-newest:
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
runs-on: ubuntu-latest
steps:
- id: dab-release
name: Get current django-ansible-base release version
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
with:
owner: ansible
repo: django-ansible-base
excludes: prerelease, draft
- name: Check out respository code
uses: actions/checkout@v4
- id: dab-pinned
name: Get current django-ansible-base pinned version
run:
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
- name: Update django-ansible-base pinned version to upstream release
run:
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
- name: Create Pull Request
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
with:
base: devel
branch: bump-django-ansible-base
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
body: |
##### SUMMARY
Automated .github/workflows/dab-release.yml
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
##### ISSUE TYPE
- Bug, Docs Fix or other nominal change
##### COMPONENT NAME
- API
commit-message: |
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
add-paths:
requirements/requirements_git.txt

View File

@@ -2,77 +2,54 @@
name: Build/Push Development Images name: Build/Push Development Images
env: env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
on: on:
workflow_dispatch:
push: push:
branches: branches:
- devel - devel
- release_* - release_*
- feature_* - feature_*
jobs: jobs:
push-development-images: push:
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 120
permissions: permissions:
packages: write packages: write
contents: read contents: read
strategy:
fail-fast: false
matrix:
build-targets:
- image-name: awx_devel
make-target: docker-compose-buildx
- image-name: awx_kube_devel
make-target: awx-kube-dev-buildx
- image-name: awx
make-target: awx-kube-buildx
steps: steps:
- uses: actions/checkout@v2
- name: Skipping build of awx image for non-awx repository - name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Set lower case owner name
run: | run: |
echo "Skipping build of awx image for non-awx repository" echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
exit 0
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
- uses: actions/checkout@v4
with:
show-progress: false
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set GITHUB_ENV variables
run: |
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
env: env:
OWNER: '${{ github.repository_owner }}' OWNER: '${{ github.repository_owner }}'
- uses: ./.github/actions/setup-python - name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.py_version }}
- name: Log in to registry - name: Log in to registry
run: | run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Setup node and npm for the new UI build - name: Pre-pull image to warm build cache
uses: actions/setup-node@v2
with:
node-version: '18'
if: matrix.build-targets.image-name == 'awx'
- name: Prebuild new UI for awx image (to speed up build process)
run: | run: |
make ui docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
if: matrix.build-targets.image-name == 'awx' docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
- uses: ./.github/actions/setup-ssh-agent - name: Build images
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Build and push AWX devel images
run: | run: |
make ${{ matrix.build-targets.make-target }} DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
- name: Push image
run: |
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}

View File

@@ -1,23 +0,0 @@
---
name: Docsite CI
on:
pull_request:
jobs:
docsite-build:
name: docsite test build
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
- name: install tox
run: pip install tox
- name: Assure docs can be built
run: tox -e docs

109
.github/workflows/e2e_test.yml vendored Normal file
View File

@@ -0,0 +1,109 @@
---
name: E2E Tests
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on:
pull_request_target:
types: [labeled]
jobs:
e2e-test:
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
runs-on: ubuntu-latest
timeout-minutes: 40
permissions:
packages: write
contents: read
strategy:
matrix:
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
steps:
- uses: actions/checkout@v2
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.py_version }}
- name: Install system deps
run: sudo apt-get install -y gettext
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Pre-pull image to warm build cache
run: |
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
- name: Build UI
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make ui-devel
- name: Start AWX
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make docker-compose &> make-docker-compose-output.log &
- name: Pull awx_cypress_base image
run: |
docker pull quay.io/awx/awx_cypress_base:latest
- name: Checkout test project
uses: actions/checkout@v2
with:
repository: ${{ github.repository_owner }}/tower-qa
ssh-key: ${{ secrets.QA_REPO_KEY }}
path: tower-qa
ref: devel
- name: Build cypress
run: |
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
docker build -t awx-pf-tests .
- name: Update default AWX password
run: |
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]
do
echo "Waiting for AWX..."
sleep 5;
done
echo "AWX is up, updating the password..."
docker exec -i tools_awx_1 sh <<-EOSH
awx-manage update_password --username=admin --password=password
EOSH
- name: Run E2E tests
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
run: |
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
export COMMIT_INFO_SHA=$GITHUB_SHA
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
AWX_IP=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tools_awx_1)
printenv > .env
echo "Executing tests:"
docker run \
--network '_sources_default' \
--ipc=host \
--env-file=.env \
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
-e CYPRESS_AWX_E2E_USERNAME=admin \
-e CYPRESS_AWX_E2E_PASSWORD='password' \
-e COMMAND="npm run cypress-concurrently-gha" \
-v /dev/shm:/dev/shm \
-v $PWD:/e2e \
-w /e2e \
awx-pf-tests run --project .
- name: Save AWX logs
uses: actions/upload-artifact@v2
with:
name: AWX-logs-${{ matrix.job }}
path: make-docker-compose-output.log

View File

@@ -2,12 +2,13 @@
name: Feature branch deletion cleanup name: Feature branch deletion cleanup
env: env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: delete on:
delete:
branches:
- feature_**
jobs: jobs:
branch_delete: push:
if: ${{ github.event.ref_type == 'branch' && startsWith(github.event.ref, 'feature_') }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
permissions: permissions:
packages: write packages: write
contents: read contents: read
@@ -20,4 +21,6 @@ jobs:
run: | run: |
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}" ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \ ansible localhost -c local -m aws_s3 \
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read" -a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"

View File

@@ -6,19 +6,14 @@ on:
- opened - opened
- reopened - reopened
permissions:
contents: write # to fetch code
issues: write # to label issues
jobs: jobs:
triage: triage:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
name: Label Issue name: Label Issue
steps: steps:
- name: Label Issue - name: Label Issue
uses: github/issue-labeler@v3.1 uses: github/issue-labeler@v2.4.1
with: with:
repo-token: "${{ secrets.GITHUB_TOKEN }}" repo-token: "${{ secrets.GITHUB_TOKEN }}"
not-before: 2021-12-07T07:00:00Z not-before: 2021-12-07T07:00:00Z
@@ -27,18 +22,12 @@ jobs:
community: community:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
name: Label Issue - Community name: Label Issue - Community
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with: - uses: actions/setup-python@v4
show-progress: false
- uses: ./.github/actions/setup-python
- name: Install python requests - name: Install python requests
run: pip install requests run: pip install requests
- name: Check if user is a member of Ansible org - name: Check if user is a member of Ansible org
uses: jannekem/run-python-script-action@v1 uses: jannekem/run-python-script-action@v1
id: check_user id: check_user

View File

@@ -7,14 +7,9 @@ on:
- reopened - reopened
- synchronize - synchronize
permissions:
contents: write # to determine modified files (actions/labeler)
pull-requests: write # to add labels to PRs (actions/labeler)
jobs: jobs:
triage: triage:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
name: Label PR name: Label PR
steps: steps:
@@ -26,17 +21,10 @@ jobs:
community: community:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
name: Label PR - Community name: Label PR - Community
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with: - uses: actions/setup-python@v4
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
- name: Install python requests - name: Install python requests
run: pip install requests run: pip install requests
- name: Check if user is a member of Ansible org - name: Check if user is a member of Ansible org

View File

@@ -7,10 +7,8 @@ on:
types: [opened, edited, reopened, synchronize] types: [opened, edited, reopened, synchronize]
jobs: jobs:
pr-check: pr-check:
if: github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')
name: Scan PR description for semantic versioning keywords name: Scan PR description for semantic versioning keywords
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
permissions: permissions:
packages: write packages: write
contents: read contents: read

View File

@@ -7,36 +7,22 @@ env:
on: on:
release: release:
types: [published] types: [published]
workflow_dispatch:
inputs:
tag_name:
description: 'Name for the tag of the release.'
required: true
permissions:
contents: read # to fetch code (actions/checkout)
jobs: jobs:
promote: promote:
if: endsWith(github.repository, '/awx') if: endsWith(github.repository, '/awx')
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 90
steps: steps:
- name: Set GitHub Env vars for workflow_dispatch event
if: ${{ github.event_name == 'workflow_dispatch' }}
run: |
echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV
- name: Set GitHub Env vars if release event
if: ${{ github.event_name == 'release' }}
run: |
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
- name: Checkout awx - name: Checkout awx
uses: actions/checkout@v4 uses: actions/checkout@v2
with:
show-progress: false
- uses: ./.github/actions/setup-python - name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.py_version }}
- name: Install dependencies - name: Install dependencies
run: | run: |
@@ -51,23 +37,14 @@ jobs:
if: ${{ github.repository_owner != 'ansible' }} if: ${{ github.repository_owner != 'ansible' }}
- name: Build collection and publish to galaxy - name: Build collection and publish to galaxy
env:
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
COLLECTION_VERSION: ${{ env.TAG_NAME }}
COLLECTION_TEMPLATE_VERSION: true
run: | run: |
sudo apt-get install jq COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
make build_collection if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count) echo "Galaxy release already done"; \
if [[ "$count" == "1" ]]; then else \
echo "Galaxy release already done";
elif [[ "$count" == "0" ]]; then
ansible-galaxy collection publish \ ansible-galaxy collection publish \
--token=${{ secrets.GALAXY_TOKEN }} \ --token=${{ secrets.GALAXY_TOKEN }} \
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz; awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
else
echo "Unexpected count from galaxy search: $count";
exit 1;
fi fi
- name: Set official pypi info - name: Set official pypi info
@@ -79,11 +56,9 @@ jobs:
if: ${{ github.repository_owner != 'ansible' }} if: ${{ github.repository_owner != 'ansible' }}
- name: Build awxkit and upload to pypi - name: Build awxkit and upload to pypi
env:
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.TAG_NAME }}
run: | run: |
git reset --hard git reset --hard
cd awxkit && python3 setup.py sdist bdist_wheel cd awxkit && python3 setup.py bdist_wheel
twine upload \ twine upload \
-r ${{ env.pypi_repo }} \ -r ${{ env.pypi_repo }} \
-u ${{ secrets.PYPI_USERNAME }} \ -u ${{ secrets.PYPI_USERNAME }} \
@@ -100,15 +75,11 @@ jobs:
- name: Re-tag and promote awx image - name: Re-tag and promote awx image
run: | run: |
docker buildx imagetools create \ docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \ docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
--tag quay.io/${{ github.repository }}:${{ env.TAG_NAME }} docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
docker buildx imagetools create \ docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \ docker push quay.io/${{ github.repository }}:latest
--tag quay.io/${{ github.repository }}:latest docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
- name: Re-tag and promote awx-ee image docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
run: |
docker buildx imagetools create \
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }} \
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }}

View File

@@ -23,7 +23,6 @@ jobs:
stage: stage:
if: endsWith(github.repository, '/awx') if: endsWith(github.repository, '/awx')
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 90
permissions: permissions:
packages: write packages: write
contents: write contents: write
@@ -45,95 +44,68 @@ jobs:
exit 0 exit 0
- name: Checkout awx - name: Checkout awx
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
show-progress: false
path: awx path: awx
- name: Checkout awx-operator - name: Get python version from Makefile
uses: actions/checkout@v4 run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with: with:
show-progress: false python-version: ${{ env.py_version }}
repository: ${{ github.repository_owner }}/awx-operator
path: awx-operator
- name: Checkout awx-logos - name: Checkout awx-logos
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
show-progress: false
repository: ansible/awx-logos repository: ansible/awx-logos
path: awx-logos path: awx-logos
- uses: ./awx/.github/actions/setup-python - name: Checkout awx-operator
uses: actions/checkout@v2
with: with:
working-directory: awx repository: ${{ github.repository_owner }}/awx-operator
path: awx-operator
- name: Install playbook dependencies - name: Install playbook dependencies
run: | run: |
python3 -m pip install docker python3 -m pip install docker
- name: Log into registry ghcr.io
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Copy logos for inclusion in sdist for official build
working-directory: awx
run: |
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
- name: Setup node and npm for new UI build
uses: actions/setup-node@v2
with:
node-version: '18'
- name: Prebuild new UI for awx image (to speed up build process)
working-directory: awx
run: make ui
- name: Set build env variables
run: |
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
env:
OWNER: ${{ github.repository_owner }}
- name: Build and stage AWX - name: Build and stage AWX
working-directory: awx working-directory: awx
env:
DOCKER_BUILDX_PUSH: true
HEADLESS: false
PLATFORMS: linux/amd64,linux/arm64
run: | run: |
make awx-kube-buildx ansible-playbook -v tools/ansible/build.yml \
-e registry=ghcr.io \
-e registry_username=${{ github.actor }} \
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
-e awx_image=${{ github.repository }} \
-e awx_version=${{ github.event.inputs.version }} \
-e ansible_python_interpreter=$(which python3) \
-e push=yes \
-e awx_official=yes
- name: Log in to GHCR
run: |
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Log in to Quay
run: |
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
- name: tag awx-ee:latest with version input - name: tag awx-ee:latest with version input
run: | run: |
docker buildx imagetools create \ docker pull quay.io/ansible/awx-ee:latest
quay.io/ansible/awx-ee:latest \ docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
--tag ${AWX_EE_TEST_IMAGE} docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
- name: Stage awx-operator image - name: Build and stage awx-operator
working-directory: awx-operator working-directory: awx-operator
run: | run: |
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \ BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }} \
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \ --build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
IMG=${AWX_OPERATOR_TEST_IMAGE} \ IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \
make docker-buildx VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push
- name: Pulling images for test deployment with awx-operator
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
run: |
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
docker pull -q ${AWX_EE_TEST_IMAGE}
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
- name: Run test deployment with awx-operator - name: Run test deployment with awx-operator
working-directory: awx-operator working-directory: awx-operator
@@ -143,6 +115,10 @@ jobs:
sudo rm -f $(which kustomize) sudo rm -f $(which kustomize)
make kustomize make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
env:
AWX_TEST_IMAGE: ${{ github.repository }}
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
- name: Create draft release for AWX - name: Create draft release for AWX
working-directory: awx working-directory: awx

View File

@@ -9,13 +9,10 @@ jobs:
name: Update Dependabot Prs name: Update Dependabot Prs
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui') if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@v4 uses: actions/checkout@v3
with:
show-progress: false
- name: Update PR Body - name: Update PR Body
env: env:

View File

@@ -5,7 +5,6 @@ env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: on:
workflow_dispatch:
push: push:
branches: branches:
- devel - devel
@@ -14,28 +13,27 @@ on:
jobs: jobs:
push: push:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 60
permissions: permissions:
packages: write packages: write
contents: read contents: read
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v2
with:
show-progress: false
- uses: ./.github/actions/setup-python - name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.py_version }}
- name: Log in to registry - name: Log in to registry
run: | run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- uses: ./.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Pre-pull image to warm build cache - name: Pre-pull image to warm build cache
run: | run: |
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || : docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
- name: Build image - name: Build image
run: | run: |
@@ -55,3 +53,5 @@ jobs:
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}" ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \ ansible localhost -c local -m aws_s3 \
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read" -a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"

38
.gitignore vendored
View File

@@ -20,10 +20,23 @@ awx/projects
awx/job_output awx/job_output
awx/public/media awx/public/media
awx/public/static awx/public/static
awx/ui/tests/test-results.xml
awx/ui/client/src/local_settings.json
awx/main/fixtures awx/main/fixtures
awx/*.log awx/*.log
tower/tower_warnings.log tower/tower_warnings.log
celerybeat-schedule celerybeat-schedule
awx/ui/static
awx/ui/build_test
awx/ui/client/languages
awx/ui/templates/ui/index.html
awx/ui/templates/ui/installing.html
awx/ui/node_modules/
awx/ui/src/locales/*/messages.js
awx/ui/coverage/
awx/ui/build
awx/ui/.env.local
awx/ui/instrumented
rsyslog.pid rsyslog.pid
tools/docker-compose/ansible/awx_dump.sql tools/docker-compose/ansible/awx_dump.sql
tools/docker-compose/Dockerfile tools/docker-compose/Dockerfile
@@ -31,11 +44,7 @@ tools/docker-compose/_build
tools/docker-compose/_sources tools/docker-compose/_sources
tools/docker-compose/overrides/ tools/docker-compose/overrides/
tools/docker-compose-minikube/_sources tools/docker-compose-minikube/_sources
tools/docker-compose/keycloak.awx.realm.json
!tools/docker-compose/editable_dependencies
tools/docker-compose/editable_dependencies/*
!tools/docker-compose/editable_dependencies/README.md
!tools/docker-compose/editable_dependencies/install.sh
# Tower setup playbook testing # Tower setup playbook testing
setup/test/roles/postgresql setup/test/roles/postgresql
@@ -65,6 +74,11 @@ __pycache__
/tmp /tmp
**/npm-debug.log* **/npm-debug.log*
# UI build flag files
awx/ui/.deps_built
awx/ui/.release_built
awx/ui/.release_deps_built
# Testing # Testing
.cache .cache
.coverage .coverage
@@ -142,18 +156,8 @@ use_dev_supervisor.txt
.idea/* .idea/*
*.unison.tmp *.unison.tmp
*.# *.#
/awx/ui/.ui-built
/Dockerfile
/_build/ /_build/
/_build_kube_dev/ /_build_kube_dev/
/Dockerfile
/Dockerfile.dev
/Dockerfile.kube-dev /Dockerfile.kube-dev
awx/ui/src
awx/ui/build
# Docs build stuff
docs/docsite/build/
_readthedocs/
# Pyenv
.python-version

View File

@@ -1,5 +0,0 @@
[allowlist]
description = "Documentation contains example secrets and passwords"
paths = [
"docs/docsite/rst/administration/oauth2_token_auth.rst",
]

View File

@@ -1,5 +0,0 @@
[tool.pip-tools]
resolver = "backtracking"
allow-unsafe = true
strip-extras = true
quiet = true

View File

@@ -1,16 +0,0 @@
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
version: 2
build:
os: ubuntu-22.04
tools:
python: >-
3.11
commands:
- pip install --user tox
- python3 -m tox -e docs --notest -v
- python3 -m tox -e docs --skip-pkg-install -q
- mkdir -p _readthedocs/html/
- mv docs/docsite/build/html/* _readthedocs/html/

113
.vscode/launch.json vendored
View File

@@ -1,113 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "run_ws_heartbeat",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_ws_heartbeat"],
"django": true,
"preLaunchTask": "stop awx-ws-heartbeat",
"postDebugTask": "start awx-ws-heartbeat"
},
{
"name": "run_cache_clear",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_cache_clear"],
"django": true,
"preLaunchTask": "stop awx-cache-clear",
"postDebugTask": "start awx-cache-clear"
},
{
"name": "run_callback_receiver",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_callback_receiver"],
"django": true,
"preLaunchTask": "stop awx-receiver",
"postDebugTask": "start awx-receiver"
},
{
"name": "run_dispatcher",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_dispatcher"],
"django": true,
"preLaunchTask": "stop awx-dispatcher",
"postDebugTask": "start awx-dispatcher"
},
{
"name": "run_rsyslog_configurer",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_rsyslog_configurer"],
"django": true,
"preLaunchTask": "stop awx-rsyslog-configurer",
"postDebugTask": "start awx-rsyslog-configurer"
},
{
"name": "run_cache_clear",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_cache_clear"],
"django": true,
"preLaunchTask": "stop awx-cache-clear",
"postDebugTask": "start awx-cache-clear"
},
{
"name": "run_wsrelay",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_wsrelay"],
"django": true,
"preLaunchTask": "stop awx-wsrelay",
"postDebugTask": "start awx-wsrelay"
},
{
"name": "daphne",
"type": "debugpy",
"request": "launch",
"program": "/var/lib/awx/venv/awx/bin/daphne",
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
"django": true,
"preLaunchTask": "stop awx-daphne",
"postDebugTask": "start awx-daphne"
},
{
"name": "runserver(uwsgi alternative)",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["runserver", "127.0.0.1:8052"],
"django": true,
"preLaunchTask": "stop awx-uwsgi",
"postDebugTask": "start awx-uwsgi"
},
{
"name": "runserver_plus(uwsgi alternative)",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["runserver_plus", "127.0.0.1:8052"],
"django": true,
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
"postDebugTask": "start awx-uwsgi"
},
{
"name": "shell_plus",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["shell_plus"],
"django": true,
},
]
}

100
.vscode/tasks.json vendored
View File

@@ -1,100 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "start awx-cache-clear",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-cache-clear"
},
{
"label": "stop awx-cache-clear",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-cache-clear"
},
{
"label": "start awx-daphne",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-daphne"
},
{
"label": "stop awx-daphne",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-daphne"
},
{
"label": "start awx-dispatcher",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-dispatcher"
},
{
"label": "stop awx-dispatcher",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-dispatcher"
},
{
"label": "start awx-receiver",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-receiver"
},
{
"label": "stop awx-receiver",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-receiver"
},
{
"label": "start awx-rsyslog-configurer",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
},
{
"label": "stop awx-rsyslog-configurer",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
},
{
"label": "start awx-rsyslogd",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-rsyslogd"
},
{
"label": "stop awx-rsyslogd",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
},
{
"label": "start awx-uwsgi",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-uwsgi"
},
{
"label": "stop awx-uwsgi",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-uwsgi"
},
{
"label": "stop awx-uwsgi and install Werkzeug",
"type": "shell",
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
},
{
"label": "start awx-ws-heartbeat",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
},
{
"label": "stop awx-ws-heartbeat",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
},
{
"label": "start awx-wsrelay",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-wsrelay"
},
{
"label": "stop awx-wsrelay",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-wsrelay"
}
]
}

View File

@@ -5,12 +5,11 @@ ignore: |
awx/main/tests/data/inventory/plugins/** awx/main/tests/data/inventory/plugins/**
# vault files # vault files
awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml
awx/ui/test/e2e/tests/smoke-vars.yml
awx/ui/node_modules
tools/docker-compose/_sources tools/docker-compose/_sources
# django template files # django template files
awx/api/templates/instance_install_bundle/** awx/api/templates/instance_install_bundle/**
.readthedocs.yaml
tools/loki
tools/otel
extends: default extends: default

View File

@@ -2,7 +2,7 @@
Hi there! We're excited to have you as a contributor. Hi there! We're excited to have you as a contributor.
Have questions about this document or anything not covered here? Create a topic using the [AWX tag on the Ansible Forum](https://forum.ansible.com/tag/awx). Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
## Table of contents ## Table of contents
@@ -30,7 +30,7 @@ Have questions about this document or anything not covered here? Create a topic
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md). - You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason. - Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see [git push docs](https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt). - If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see [git push docs](https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt).
- If submitting a large code change, it's a good idea to create a [forum topic tagged with 'awx'](https://forum.ansible.com/tag/awx), and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed. - If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.libera.chat, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com) - We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
## Setting up your development environment ## Setting up your development environment
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
#### Frontend Development #### Frontend Development
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md). See [the ui development documentation](awx/ui/CONTRIBUTING.md).
#### Fork and clone the AWX repo #### Fork and clone the AWX repo
@@ -121,18 +121,18 @@ If it has someone assigned to it then that person is the person responsible for
**NOTES** **NOTES**
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request. > Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the [Ansible Forum](https://forum.ansible.com/tag/awx). > If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md). > If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](./awx/ui/README.md).
### Translations ### Translations
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language. At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution. If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
## Submitting Pull Requests ## Submitting Pull Requests
@@ -143,13 +143,15 @@ Here are a few things you can do to help the visibility of your change, and incr
- No issues when running linters/code checkers - No issues when running linters/code checkers
- Python: black: `(container)/awx_devel$ make black` - Python: black: `(container)/awx_devel$ make black`
- Javascript: `(container)/awx_devel$ make ui-lint`
- No issues from unit tests - No issues from unit tests
- Python: py.test: `(container)/awx_devel$ make test` - Python: py.test: `(container)/awx_devel$ make test`
- JavaScript: `(container)/awx_devel$ make ui-test`
- Write tests for new functionality, update/add tests for bug fixes - Write tests for new functionality, update/add tests for bug fixes
- Make the smallest change possible - Make the smallest change possible
- Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/). - Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
It's generally a good idea to discuss features with us first by engaging on the [Ansible Forum](https://forum.ansible.com/tag/awx). It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
`git pull`, and `git rebase`, rather than `git merge`. `git pull`, and `git rebase`, rather than `git merge`.
@@ -159,11 +161,11 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR. When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
## Reporting Issues ## Reporting Issues
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md). We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
## Getting Help ## Getting Help
If you require additional assistance, please submit your question to the [Ansible Forum](https://forum.ansible.com/tag/awx). If you require additional assistance, please reach out to us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
For extra information on debugging tools, see [Debugging](./docs/debugging/). For extra information on debugging tools, see [Debugging](./docs/debugging/).

View File

@@ -4,6 +4,6 @@
Early versions of AWX did not support seamless upgrades between major versions and required the use of a backup and restore tool to perform upgrades. Early versions of AWX did not support seamless upgrades between major versions and required the use of a backup and restore tool to perform upgrades.
As of version 18.0, `awx-operator` is the preferred install/upgrade method. Users who wish to upgrade modern AWX installations should follow the instructions at: Users who wish to upgrade modern AWX installations should follow the instructions at:
https://github.com/ansible/awx-operator/blob/devel/docs/upgrade/upgrading.md https://github.com/ansible/awx/blob/devel/INSTALL.md#upgrading-from-previous-versions

View File

@@ -1,11 +1,11 @@
# Issues # Issues
## Reporting ## Reporting
Use the GitHub [issue tracker](https://github.com/ansible/awx/issues) for filing bugs. In order to save time, and help us respond to issues quickly, make sure to fill out as much of the issue template Use the GitHub [issue tracker](https://github.com/ansible/awx/issues) for filing bugs. In order to save time, and help us respond to issues quickly, make sure to fill out as much of the issue template
as possible. Version information, and an accurate reproducing scenario are critical to helping us identify the problem. as possible. Version information, and an accurate reproducing scenario are critical to helping us identify the problem.
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [Ansible Forum](https://forum.ansible.com/tag/awx). Please don't use the issue tracker as a way to ask how to do something. Instead, use the [mailing list](https://groups.google.com/forum/#!forum/awx-project) , and the `#ansible-awx` channel on irc.libera.chat to get help.
Before opening a new issue, please use the issue search feature to see if what you're experiencing has already been reported. If you have any extra detail to provide, please comment. Otherwise, rather than posting a "me too" comment, please consider giving it a ["thumbs up"](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comment) to give us an indication of the severity of the problem. Before opening a new issue, please use the issue search feature to see if what you're experiencing has already been reported. If you have any extra detail to provide, please comment. Otherwise, rather than posting a "me too" comment, please consider giving it a ["thumbs up"](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comment) to give us an indication of the severity of the problem.
@@ -14,7 +14,7 @@ Before opening a new issue, please use the issue search feature to see if what y
When reporting issues for the UI, we also appreciate having screen shots and any error messages from the web browser's console. It's not unusual for browser extensions When reporting issues for the UI, we also appreciate having screen shots and any error messages from the web browser's console. It's not unusual for browser extensions
and plugins to cause problems. Reporting those will also help speed up analyzing and resolving UI bugs. and plugins to cause problems. Reporting those will also help speed up analyzing and resolving UI bugs.
### API and backend issues ### API and backend issues
For the API and backend services, please capture all of the logs that you can from the time the problem occurred. For the API and backend services, please capture all of the logs that you can from the time the problem occurred.
@@ -31,7 +31,7 @@ If your issue isn't considered high priority, then please be patient as it may t
`state:needs_info` The issue needs more information. This could be more debug output, more specifics out the system such as version information. Any detail that is currently preventing this issue from moving forward. This should be considered a blocked state. `state:needs_info` The issue needs more information. This could be more debug output, more specifics out the system such as version information. Any detail that is currently preventing this issue from moving forward. This should be considered a blocked state.
`state:needs_review` The issue/pull request needs to be reviewed by other maintainers and contributors. This is usually used when there is a question out to another maintainer or when a person is less familiar with an area of the code base the issue is for. `state:needs_review` The issue/pull request needs to be reviewed by other maintainers and contributors. This is usually used when there is a question out to another maintainer or when a person is less familar with an area of the code base the issue is for.
`state:needs_revision` More commonly used on pull requests, this state represents that there are changes that are being waited on. `state:needs_revision` More commonly used on pull requests, this state represents that there are changes that are being waited on.
@@ -80,7 +80,7 @@ If any of those items are missing your pull request will still get the `needs_tr
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc... Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
These labels are determined by the template data. Please use the template and fill it out as accurately as possible. These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
The `state:needs_triage` label will remain on your pull request until a person has looked at it. The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request. You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
The comment will look something like `CC @matburt @wwitzel3 ...`. The comment will look something like `CC @matburt @wwitzel3 ...`.

View File

@@ -4,6 +4,7 @@ recursive-include awx *.mo
recursive-include awx/static * recursive-include awx/static *
recursive-include awx/templates *.html recursive-include awx/templates *.html
recursive-include awx/api/templates *.md *.html *.yml recursive-include awx/api/templates *.md *.html *.yml
recursive-include awx/ui/build *.html
recursive-include awx/ui/build * recursive-include awx/ui/build *
recursive-include awx/playbooks *.yml recursive-include awx/playbooks *.yml
recursive-include awx/lib/site-packages * recursive-include awx/lib/site-packages *
@@ -15,11 +16,12 @@ recursive-include licenses *
recursive-exclude awx devonly.py* recursive-exclude awx devonly.py*
recursive-exclude awx/api/tests * recursive-exclude awx/api/tests *
recursive-exclude awx/main/tests * recursive-exclude awx/main/tests *
recursive-exclude awx/ui/client *
recursive-exclude awx/settings local_settings.py* recursive-exclude awx/settings local_settings.py*
include tools/scripts/request_tower_configuration.sh include tools/scripts/request_tower_configuration.sh
include tools/scripts/request_tower_configuration.ps1 include tools/scripts/request_tower_configuration.ps1
include tools/scripts/automation-controller-service include tools/scripts/automation-controller-service
include tools/scripts/rsyslog-4xx-recovery include tools/scripts/failure-event-handler
include tools/scripts/awx-python include tools/scripts/awx-python
include awx/playbooks/library/mkfifo.py include awx/playbooks/library/mkfifo.py
include tools/sosreport/* include tools/sosreport/*

448
Makefile
View File

@@ -1,17 +1,12 @@
-include awx/ui/Makefile PYTHON ?= python3.9
DOCKER_COMPOSE ?= docker-compose
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
SHELL := bash
DOCKER_COMPOSE ?= docker compose
OFFICIAL ?= no OFFICIAL ?= no
NODE ?= node NODE ?= node
NPM_BIN ?= npm NPM_BIN ?= npm
KIND_BIN ?= $(shell which kind)
CHROMIUM_BIN=/tmp/chrome-linux/chrome CHROMIUM_BIN=/tmp/chrome-linux/chrome
GIT_REPO_NAME ?= $(shell basename `git rev-parse --show-toplevel`)
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD) GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
MANAGEMENT_COMMAND ?= awx-manage MANAGEMENT_COMMAND ?= awx-manage
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py 2> /dev/null) VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
# ansible-test requires semver compatable version, so we allow overrides to hack it # ansible-test requires semver compatable version, so we allow overrides to hack it
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3) COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
@@ -24,54 +19,36 @@ COLLECTION_TEST_TARGET ?=
# args for collection install # args for collection install
COLLECTION_PACKAGE ?= awx COLLECTION_PACKAGE ?= awx
COLLECTION_NAMESPACE ?= awx COLLECTION_NAMESPACE ?= awx
COLLECTION_INSTALL = $(HOME)/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE) COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
COLLECTION_TEMPLATE_VERSION ?= false COLLECTION_TEMPLATE_VERSION ?= false
# NOTE: This defaults the container image version to the branch that's active # NOTE: This defaults the container image version to the branch that's active
COMPOSE_TAG ?= $(GIT_BRANCH) COMPOSE_TAG ?= $(GIT_BRANCH)
MAIN_NODE_TYPE ?= hybrid MAIN_NODE_TYPE ?= hybrid
# If set to true docker-compose will also start a pgbouncer instance and use it # If set to true docker-compose will also start a keycloak instance
PGBOUNCER ?= false KEYCLOAK ?= false
# If set to true docker-compose will also start an ldap instance
LDAP ?= false
# If set to true docker-compose will also start a splunk instance # If set to true docker-compose will also start a splunk instance
SPLUNK ?= false SPLUNK ?= false
# If set to true docker-compose will also start a prometheus instance # If set to true docker-compose will also start a prometheus instance
PROMETHEUS ?= false PROMETHEUS ?= false
# If set to true docker-compose will also start a grafana instance # If set to true docker-compose will also start a grafana instance
GRAFANA ?= false GRAFANA ?= false
# If set to true docker-compose will also start a hashicorp vault instance
VAULT ?= false
# If set to true docker-compose will also start a hashicorp vault instance with TLS enabled
VAULT_TLS ?= false
# If set to true docker-compose will also start an OpenTelemetry Collector instance
OTEL ?= false
# If set to true docker-compose will also start a Loki instance
LOKI ?= false
# If set to true docker-compose will install editable dependencies
EDITABLE_DEPENDENCIES ?= false
# If set to true, use tls for postgres connection
PG_TLS ?= false
VENV_BASE ?= /var/lib/awx/venv VENV_BASE ?= /var/lib/awx/venv
DEV_DOCKER_OWNER ?= ansible DEV_DOCKER_TAG_BASE ?= ghcr.io/ansible
# Docker will only accept lowercase, so github names like Paul need to be paul DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_devel:$(COMPOSE_TAG)
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_kube_devel:$(COMPOSE_TAG)
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME):$(COMPOSE_TAG)
# Common command to use for running ansible-playbook
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
# Python packages to install only from source (not from binary wheels) # Python packages to install only from source (not from binary wheels)
# Comma separated list # Comma separated list
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
# These should be upgraded in the AWX and Ansible venv before attempting # These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements # to install the actual requirements
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==70.3.0 setuptools_scm[toml]==8.1.0 wheel==0.45.1 cython==3.0.11 VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4
NAME ?= awx NAME ?= awx
@@ -83,27 +60,13 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
I18N_FLAG_FILE = .i18n_built I18N_FLAG_FILE = .i18n_built
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
# DOCKER_CACHE=--no-cache make docker-compose-build
ifeq ($(DOCKER_CACHE),)
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
else
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
endif
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \ .PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
develop refresh adduser migrate dbchange \ develop refresh adduser migrate dbchange \
receiver test test_unit test_coverage coverage_html \ receiver test test_unit test_coverage coverage_html \
sdist \ sdist \
ui-release ui-devel \
VERSION PYTHON_VERSION docker-compose-sources \ VERSION PYTHON_VERSION docker-compose-sources \
.git/hooks/pre-commit .git/hooks/pre-commit github_ci_setup github_ci_runner
clean-tmp: clean-tmp:
rm -rf tmp/ rm -rf tmp/
@@ -121,10 +84,10 @@ clean-schema:
clean-languages: clean-languages:
rm -f $(I18N_FLAG_FILE) rm -f $(I18N_FLAG_FILE)
find ./awx/locale/ -type f -regex '.*\.mo$$' -delete find ./awx/locale/ -type f -regex ".*\.mo$" -delete
## Remove temporary build files, compiled Python files. ## Remove temporary build files, compiled Python files.
clean: clean-api clean-awxkit clean-dist clean: clean-ui clean-api clean-awxkit clean-dist
rm -rf awx/public rm -rf awx/public
rm -rf awx/lib/site-packages rm -rf awx/lib/site-packages
rm -rf awx/job_status rm -rf awx/job_status
@@ -223,12 +186,20 @@ migrate:
dbchange: dbchange:
$(MANAGEMENT_COMMAND) makemigrations $(MANAGEMENT_COMMAND) makemigrations
supervisor:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
supervisord --pidfile=/tmp/supervisor_pid -n
collectstatic: collectstatic:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi; \ fi; \
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1 $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
uwsgi: collectstatic uwsgi: collectstatic
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
@@ -236,7 +207,7 @@ uwsgi: collectstatic
uwsgi /etc/tower/uwsgi.ini uwsgi /etc/tower/uwsgi.ini
awx-autoreload: awx-autoreload:
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx @/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
daphne: daphne:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
@@ -244,6 +215,12 @@ daphne:
fi; \ fi; \
daphne -b 127.0.0.1 -p 8051 awx.asgi:channel_layer daphne -b 127.0.0.1 -p 8051 awx.asgi:channel_layer
wsbroadcast:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(PYTHON) manage.py run_wsbroadcast
## Run to start the background task dispatcher for development. ## Run to start the background task dispatcher for development.
dispatcher: dispatcher:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
@@ -251,6 +228,7 @@ dispatcher:
fi; \ fi; \
$(PYTHON) manage.py run_dispatcher $(PYTHON) manage.py run_dispatcher
## Run to start the zeromq callback receiver ## Run to start the zeromq callback receiver
receiver: receiver:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
@@ -267,34 +245,6 @@ jupyter:
fi; \ fi; \
$(MANAGEMENT_COMMAND) shell_plus --notebook $(MANAGEMENT_COMMAND) shell_plus --notebook
## Start the rsyslog configurer process in background in development environment.
run-rsyslog-configurer:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(PYTHON) manage.py run_rsyslog_configurer
## Start cache_clear process in background in development environment.
run-cache-clear:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(PYTHON) manage.py run_cache_clear
## Start the wsrelay process in background in development environment.
run-wsrelay:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(PYTHON) manage.py run_wsrelay
## Start the heartbeat process in background in development environment.
run-ws-heartbeat:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(PYTHON) manage.py run_ws_heartbeat
reports: reports:
mkdir -p $@ mkdir -p $@
@@ -316,25 +266,20 @@ swagger: reports
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi; \ fi; \
(set -o pipefail && py.test --cov --cov-report=xml --junitxml=reports/junit.xml $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report) (set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
check: black check: black
api-lint: api-lint:
BLACK_ARGS="--check" $(MAKE) black BLACK_ARGS="--check" make black
flake8 awx flake8 awx
yamllint -s . yamllint -s .
## Run egg_info_dev to generate awx.egg-info for development.
awx-link: awx-link:
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev [ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/$(PYTHON)/site-packages/awx.egg-link
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
PYTEST_ARGS ?= -n auto PYTEST_ARGS ?= -n auto
## Run all API unit tests. ## Run all API unit tests.
test: test:
@@ -345,29 +290,20 @@ test:
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3 cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
awx-manage check_migrations --dry-run --check -n 'missing_migration_file' awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
live_test: ## Login to Github container image registry, pull image, then build image.
cd awx/main/tests/live && py.test tests/ github_ci_setup:
# GITHUB_ACTOR is automatic github actions env var
## Run all API unit tests with coverage enabled. # CI_GITHUB_TOKEN is defined in .github files
test_coverage: echo $(CI_GITHUB_TOKEN) | docker login ghcr.io -u $(GITHUB_ACTOR) --password-stdin
$(MAKE) test PYTEST_ARGS="--create-db --cov --cov-report=xml --junitxml=reports/junit.xml" docker pull $(DEVEL_IMAGE_NAME) || : # Pre-pull image to warm build cache
@if [ "${GITHUB_ACTIONS}" = "true" ]; \ make docker-compose-build
then \
echo 'cov-report-files=awxkit/coverage.xml,reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=awxkit/report.xml,reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
test_migrations:
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test --create-db --cov=awx --cov-report=xml --junitxml=reports/junit.xml $(PYTEST_ARGS) $(TEST_DIRS)
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
## Runs AWX_DOCKER_CMD inside a new docker container. ## Runs AWX_DOCKER_CMD inside a new docker container.
docker-runner: docker-runner:
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z $(AWX_DOCKER_ARGS) --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD) docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
## Builds image and runs AWX_DOCKER_CMD in it, mainly for .github checks.
github_ci_runner: github_ci_setup docker-runner
test_collection: test_collection:
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
@@ -376,12 +312,7 @@ test_collection:
fi && \ fi && \
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
ansible --version ansible --version
py.test $(COLLECTION_TEST_DIRS) --cov --cov-report=xml --junitxml=reports/junit.xml -v py.test $(COLLECTION_TEST_DIRS) -v
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
# The python path needs to be modified so that the tests can find Ansible within the container # The python path needs to be modified so that the tests can find Ansible within the container
# First we will use anything expility set as PYTHONPATH # First we will use anything expility set as PYTHONPATH
# Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory) # Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory)
@@ -397,7 +328,7 @@ symlink_collection:
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL) ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
awx_collection_build: $(shell find awx_collection -type f) awx_collection_build: $(shell find awx_collection -type f)
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \ ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
-e collection_package=$(COLLECTION_PACKAGE) \ -e collection_package=$(COLLECTION_PACKAGE) \
-e collection_namespace=$(COLLECTION_NAMESPACE) \ -e collection_namespace=$(COLLECTION_NAMESPACE) \
-e collection_version=$(COLLECTION_VERSION) \ -e collection_version=$(COLLECTION_VERSION) \
@@ -415,30 +346,24 @@ test_collection_sanity:
rm -rf $(COLLECTION_INSTALL) rm -rf $(COLLECTION_INSTALL)
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
ansible --version ansible --version
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection COLLECTION_VERSION=1.0.0 make install_collection
cd $(COLLECTION_INSTALL) && \ cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
ansible-test sanity $(COLLECTION_SANITY_ARGS) --coverage --junit && \
ansible-test coverage xml --requirements --group-by command --group-by version
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=sanity*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
echo test-result-files="$$(find "$(COLLECTION_INSTALL)/tests/output/junit/" -type f -name '*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
fi
test_collection_integration: install_collection test_collection_integration: install_collection
cd $(COLLECTION_INSTALL) && \ cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
ansible-test integration --coverage -vvv $(COLLECTION_TEST_TARGET) && \
ansible-test coverage xml --requirements --group-by command --group-by version
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
fi
test_unit: test_unit:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi; \ fi; \
py.test awx/main/tests/unit awx/conf/tests/unit py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
## Run all API unit tests with coverage enabled.
test_coverage:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
## Output test coverage as HTML (into htmlcov directory). ## Output test coverage as HTML (into htmlcov directory).
coverage_html: coverage_html:
@@ -457,7 +382,75 @@ bulk_data:
fi; \ fi; \
$(PYTHON) tools/data_generators/rbac_dummy_data_generator.py --preset=$(DATA_GEN_PRESET) $(PYTHON) tools/data_generators/rbac_dummy_data_generator.py --preset=$(DATA_GEN_PRESET)
# UI TASKS
# --------------------------------------
UI_BUILD_FLAG_FILE = awx/ui/.ui-built
clean-ui:
rm -rf node_modules
rm -rf awx/ui/node_modules
rm -rf awx/ui/build
rm -rf awx/ui/src/locales/_build
rm -rf $(UI_BUILD_FLAG_FILE)
# the collectstatic command doesn't like it if this dir doesn't exist.
mkdir -p awx/ui/build/static
awx/ui/node_modules:
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
$(UI_BUILD_FLAG_FILE):
$(MAKE) awx/ui/node_modules
$(PYTHON) tools/scripts/compilemessages.py
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
touch $@
ui-release: $(UI_BUILD_FLAG_FILE)
ui-devel: awx/ui/node_modules
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
@if [ -d "/var/lib/awx" ] ; then \
mkdir -p /var/lib/awx/public/static/css; \
mkdir -p /var/lib/awx/public/static/js; \
mkdir -p /var/lib/awx/public/static/media; \
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css; \
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js; \
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media; \
fi
ui-devel-instrumented: awx/ui/node_modules
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
ui-devel-test: awx/ui/node_modules
$(NPM_BIN) --prefix awx/ui --loglevel warn run start
ui-lint:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui lint
$(NPM_BIN) run --prefix awx/ui prettier-check
ui-test:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui test
ui-test-screens:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui pretest
$(NPM_BIN) run --prefix awx/ui test-screens --runInBand
ui-test-general:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui pretest
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
HEADLESS ?= no
ifeq ($(HEADLESS), yes)
dist/$(SDIST_TAR_FILE): dist/$(SDIST_TAR_FILE):
else
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE)
endif
$(PYTHON) -m build -s $(PYTHON) -m build -s
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
@@ -488,41 +481,27 @@ endif
docker-compose-sources: .git/hooks/pre-commit docker-compose-sources: .git/hooks/pre-commit
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\ @if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \ ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
fi; fi;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \ ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
-e awx_image=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_devel \ -e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
-e awx_image_tag=$(COMPOSE_TAG) \ -e awx_image_tag=$(COMPOSE_TAG) \
-e receptor_image=$(RECEPTOR_IMAGE) \ -e receptor_image=$(RECEPTOR_IMAGE) \
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \ -e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
-e execution_node_count=$(EXECUTION_NODE_COUNT) \ -e execution_node_count=$(EXECUTION_NODE_COUNT) \
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \ -e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
-e enable_pgbouncer=$(PGBOUNCER) \ -e enable_keycloak=$(KEYCLOAK) \
-e enable_ldap=$(LDAP) \
-e enable_splunk=$(SPLUNK) \ -e enable_splunk=$(SPLUNK) \
-e enable_prometheus=$(PROMETHEUS) \ -e enable_prometheus=$(PROMETHEUS) \
-e enable_grafana=$(GRAFANA) \ -e enable_grafana=$(GRAFANA) $(EXTRA_SOURCES_ANSIBLE_OPTS)
-e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \
-e enable_otel=$(OTEL) \
-e enable_loki=$(LOKI) \
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
-e pg_tls=$(PG_TLS) \
$(EXTRA_SOURCES_ANSIBLE_OPTS)
docker-compose: awx/projects docker-compose-sources docker-compose: awx/projects docker-compose-sources
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
-e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS); \
$(MAKE) docker-compose-up
docker-compose-up:
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
docker-compose-down:
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
docker-compose-credential-plugins: awx/projects docker-compose-sources docker-compose-credential-plugins: awx/projects docker-compose-sources
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m" echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
@@ -551,105 +530,55 @@ docker-compose-container-group-clean:
fi fi
rm -rf tools/docker-compose-minikube/_sources/ rm -rf tools/docker-compose-minikube/_sources/
.PHONY: Dockerfile.dev ## Base development image build
## Generate Dockerfile.dev for awx_devel image docker-compose-build:
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2 ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \ DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
-e dockerfile_name=Dockerfile.dev \ --build-arg BUILDKIT_INLINE_CACHE=1 \
-e build_dev=True \ --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
-e receptor_image=$(RECEPTOR_IMAGE)
## Build awx_devel image for docker compose development environment
docker-compose-build: Dockerfile.dev
DOCKER_BUILDKIT=1 docker build \
--ssh default=$(SSH_AUTH_SOCK) \
-f Dockerfile.dev \
-t $(DEVEL_IMAGE_NAME) \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_DEVEL_CACHE_FLAG) .
.PHONY: docker-compose-buildx
## Build awx_devel image for docker compose development environment for multiple architectures
docker-compose-buildx: Dockerfile.dev
- docker buildx create --name docker-compose-buildx
docker buildx use docker-compose-buildx
- docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
--push \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_DEVEL_CACHE_FLAG) \
--platform=$(PLATFORMS) \
--tag $(DEVEL_IMAGE_NAME) \
-f Dockerfile.dev .
- docker buildx rm docker-compose-buildx
docker-clean: docker-clean:
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);) -$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);) -$(foreach image_id,$(shell docker images --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q) docker volume rm -f tools_awx_db tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
docker-refresh: docker-clean docker-compose docker-refresh: docker-clean docker-compose
## Docker Development Environment with Elastic Stack Connected
docker-compose-elk: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
docker-compose-cluster-elk: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
docker-compose-container-group: docker-compose-container-group:
MINIKUBE_CONTAINER_GROUP=true $(MAKE) docker-compose MINIKUBE_CONTAINER_GROUP=true make docker-compose
clean-elk:
docker stop tools_kibana_1
docker stop tools_logstash_1
docker stop tools_elasticsearch_1
docker rm tools_logstash_1
docker rm tools_elasticsearch_1
docker rm tools_kibana_1
psql-container:
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
VERSION: VERSION:
@echo "awx: $(VERSION)" @echo "awx: $(VERSION)"
PYTHON_VERSION: PYTHON_VERSION:
@echo "$(subst python,,$(PYTHON))" @echo "$(PYTHON)" | sed 's:python::'
.PHONY: version-for-buildyml
version-for-buildyml:
@echo $(firstword $(subst +, ,$(VERSION)))
# version-for-buildyml prints a special version string for build.yml,
# chopping off the sha after the '+' sign.
# tools/ansible/build.yml was doing this: make print-VERSION | cut -d + -f -1
# This does the same thing in native make without
# the pipe or the extra processes, and now the pb does `make version-for-buildyml`
# Example:
# 22.1.1.dev38+g523c0d9781 becomes 22.1.1.dev38
.PHONY: Dockerfile .PHONY: Dockerfile
## Generate Dockerfile for awx image
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2 Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \ ansible-playbook tools/ansible/dockerfile.yml -e receptor_image=$(RECEPTOR_IMAGE)
-e receptor_image=$(RECEPTOR_IMAGE) \
-e headless=$(HEADLESS)
## Build awx image for deployment on Kubernetes environment.
awx-kube-build: Dockerfile
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
--ssh default=$(SSH_AUTH_SOCK) \
--build-arg VERSION=$(VERSION) \
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
--build-arg HEADLESS=$(HEADLESS) \
$(DOCKER_KUBE_CACHE_FLAG) \
-t $(IMAGE_KUBE) .
## Build multi-arch awx image for deployment on Kubernetes environment.
awx-kube-buildx: Dockerfile
- docker buildx create --name awx-kube-buildx
docker buildx use awx-kube-buildx
- docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
--push \
--build-arg VERSION=$(VERSION) \
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
--build-arg HEADLESS=$(HEADLESS) \
--platform=$(PLATFORMS) \
$(DOCKER_KUBE_CACHE_FLAG) \
--tag $(IMAGE_KUBE) \
-f Dockerfile .
- docker buildx rm awx-kube-buildx
.PHONY: Dockerfile.kube-dev
## Generate Docker.kube-dev for awx_kube_devel image
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2 Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \ ansible-playbook tools/ansible/dockerfile.yml \
-e dockerfile_name=Dockerfile.kube-dev \ -e dockerfile_name=Dockerfile.kube-dev \
-e kube_dev=True \ -e kube_dev=True \
-e template_dest=_build_kube_dev \ -e template_dest=_build_kube_dev \
@@ -658,31 +587,29 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
## Build awx_kube_devel image for development on local Kubernetes environment. ## Build awx_kube_devel image for development on local Kubernetes environment.
awx-kube-dev-build: Dockerfile.kube-dev awx-kube-dev-build: Dockerfile.kube-dev
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \ DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
--ssh default=$(SSH_AUTH_SOCK) \
--build-arg BUILDKIT_INLINE_CACHE=1 \ --build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_KUBE_DEV_CACHE_FLAG) \ --cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
-t $(IMAGE_KUBE_DEV) . -t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment. ## Build awx image for deployment on Kubernetes environment.
awx-kube-dev-buildx: Dockerfile.kube-dev awx-kube-build: Dockerfile
- docker buildx create --name awx-kube-dev-buildx DOCKER_BUILDKIT=1 docker build -f Dockerfile \
docker buildx use awx-kube-dev-buildx --build-arg VERSION=$(VERSION) \
- docker buildx build \ --build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
--ssh default=$(SSH_AUTH_SOCK) \ --build-arg HEADLESS=$(HEADLESS) \
--push \ -t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
--platform=$(PLATFORMS) \
--tag $(IMAGE_KUBE_DEV) \
-f Dockerfile.kube-dev .
- docker buildx rm awx-kube-dev-buildx
kind-dev-load: awx-kube-dev-build
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
# Translation TASKS # Translation TASKS
# -------------------------------------- # --------------------------------------
## generate UI .pot file, an empty template of strings yet to be translated
pot: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
## generate UI .po files for each locale (will update translated strings for `en`)
po: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
## generate API django .pot .po ## generate API django .pot .po
messages: messages:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
@@ -690,7 +617,6 @@ messages:
fi; \ fi; \
$(PYTHON) manage.py makemessages -l en_us --keep-pot $(PYTHON) manage.py makemessages -l en_us --keep-pot
.PHONY: print-%
print-%: print-%:
@echo $($*) @echo $($*)
@@ -702,12 +628,12 @@ HELP_FILTER=.PHONY
## Display help targets ## Display help targets
help: help:
@printf "Available targets:\n" @printf "Available targets:\n"
@$(MAKE) -s help/generate | grep -vE "\w($(HELP_FILTER))" @make -s help/generate | grep -vE "\w($(HELP_FILTER))"
## Display help for all targets ## Display help for all targets
help/all: help/all:
@printf "Available targets:\n" @printf "Available targets:\n"
@$(MAKE) -s help/generate @make -s help/generate
## Generate help output from MAKEFILE_LIST ## Generate help output from MAKEFILE_LIST
help/generate: help/generate:
@@ -728,7 +654,3 @@ help/generate:
} \ } \
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u { lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
@printf "\n" @printf "\n"
## Display help for ui targets
help/ui:
@$(MAKE) -s help MAKEFILE_LIST="awx/ui/Makefile"

View File

@@ -1,24 +1,13 @@
[![CI](https://github.com/ansible/awx/actions/workflows/ci.yml/badge.svg?branch=devel)](https://github.com/ansible/awx/actions/workflows/ci.yml) [![codecov](https://codecov.io/github/ansible/awx/graph/badge.svg?token=4L4GSP9IAR)](https://codecov.io/github/ansible/awx) [![Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-yellow.svg)](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [![Apache v2 License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/ansible/awx/blob/devel/LICENSE.md) [![AWX on the Ansible Forum](https://img.shields.io/badge/mailing%20list-AWX-orange.svg)](https://forum.ansible.com/tag/awx) [![CI](https://github.com/ansible/awx/actions/workflows/ci.yml/badge.svg?branch=devel)](https://github.com/ansible/awx/actions/workflows/ci.yml) [![Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-yellow.svg)](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [![Apache v2 License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/ansible/awx/blob/devel/LICENSE.md) [![AWX Mailing List](https://img.shields.io/badge/mailing%20list-AWX-orange.svg)](https://groups.google.com/g/awx-project)
[![Ansible Matrix](https://img.shields.io/badge/matrix-Ansible%20Community-blueviolet.svg?logo=matrix)](https://chat.ansible.im/#/welcome) [![Ansible Discourse](https://img.shields.io/badge/discourse-Ansible%20Community-yellowgreen.svg?logo=discourse)](https://forum.ansible.com) [![IRC Chat - #ansible-awx](https://img.shields.io/badge/IRC-%23ansible--awx-blueviolet.svg)](https://libera.chat)
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" /> <img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
> [!CAUTION]
> The last release of this repository was released on Jul 2, 2024.
> **Releases of this project are now paused during a large scale refactoring.**
> For more information, follow [the Forum](https://forum.ansible.com/) and - more specifically - see the various communications on the matter:
>
> * [Blog: Upcoming Changes to the AWX Project](https://www.ansible.com/blog/upcoming-changes-to-the-awx-project/)
> * [Streamlining AWX Releases](https://forum.ansible.com/t/streamlining-awx-releases/6894) Primary update
> * [Refactoring AWX into a Pluggable, Service-Oriented Architecture](https://forum.ansible.com/t/refactoring-awx-into-a-pluggable-service-oriented-architecture/7404)
> * [Upcoming changes to AWX Operator installation methods](https://forum.ansible.com/t/upcoming-changes-to-awx-operator-installation-methods/7598)
> * [AWX UI and credential types transitioning to the new pluggable architecture](https://forum.ansible.com/t/awx-ui-and-credential-types-transitioning-to-the-new-pluggable-architecture/8027)
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is one of the upstream projects for [Red Hat Ansible Automation Platform](https://www.ansible.com/products/automation-platform). AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is one of the upstream projects for [Red Hat Ansible Automation Platform](https://www.ansible.com/products/automation-platform).
To install AWX, please view the [Install guide](./INSTALL.md). To install AWX, please view the [Install guide](./INSTALL.md).
To learn more about using AWX, view the [AWX docs site](https://ansible.readthedocs.io/projects/awx/en/latest/). To learn more about using AWX, and Tower, view the [Tower docs site](http://docs.ansible.com/ansible-tower/index.html).
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq). The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
@@ -29,9 +18,9 @@ Contributing
- Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX. - Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX.
- All code submissions are made through pull requests against the `devel` branch. - All code submissions are made through pull requests against the `devel` branch.
- All contributors must use `git commit --signoff` for any commit to be merged and agree that usage of `--signoff` constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md) - All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason. - Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
- If submitting a large code change, it's a good idea to join discuss via the [Ansible Forum](https://forum.ansible.com/tag/awx). This helps everyone know what's going on, and it also helps save time and effort if the community decides some changes are needed. - If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on web.libera.chat and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
Reporting Issues Reporting Issues
---------------- ----------------
@@ -41,11 +30,12 @@ If you're experiencing a problem that you feel is a bug in AWX or have ideas for
Code of Conduct Code of Conduct
--------------- ---------------
We require all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com) We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
Get Involved Get Involved
------------ ------------
We welcome your feedback and ideas via the [Ansible Forum](https://forum.ansible.com/tag/awx). We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
For a full list of all the ways to talk with the Ansible Community, see the [AWX Communication guide](https://ansible.readthedocs.io/projects/awx/en/latest/contributor/communication.html). - Join the `#ansible-awx` channel on irc.libera.chat
- Join the [mailing list](https://groups.google.com/forum/#!forum/awx-project)

View File

@@ -5,7 +5,6 @@ from __future__ import absolute_import, unicode_literals
import os import os
import sys import sys
import warnings import warnings
from importlib.metadata import PackageNotFoundError, version as _get_version
def get_version(): def get_version():
@@ -35,8 +34,10 @@ def version_file():
try: try:
__version__ = _get_version('awx') import pkg_resources
except PackageNotFoundError:
__version__ = pkg_resources.get_distribution('awx').version
except pkg_resources.DistributionNotFound:
__version__ = get_version() __version__ = get_version()
__all__ = ['__version__'] __all__ = ['__version__']
@@ -51,25 +52,124 @@ try:
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
MODE = 'production' MODE = 'production'
import hashlib
try: try:
import django # noqa: F401 import django # noqa: F401
HAS_DJANGO = True
except ImportError: except ImportError:
pass HAS_DJANGO = False
else: else:
from django.db.backends.base import schema
from django.db.models import indexes
from django.db.backends.utils import names_digest
from django.db import connection from django.db import connection
if HAS_DJANGO is True:
# See upgrade blocker note in requirements/README.md
try:
names_digest('foo', 'bar', 'baz', length=8)
except ValueError:
def names_digest(*args, length):
"""
Generate a 32-bit digest of a set of arguments that can be used to shorten
identifying names. Support for use in FIPS environments.
"""
h = hashlib.md5(usedforsecurity=False)
for arg in args:
h.update(arg.encode())
return h.hexdigest()[:length]
schema.names_digest = names_digest
indexes.names_digest = names_digest
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
commands.append(f[:-4])
except OSError:
pass
return commands
def oauth2_getattribute(self, attr):
# Custom method to override
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
from django.conf import settings
from oauth2_provider.settings import DEFAULTS
val = None
if (isinstance(attr, str)) and (attr in DEFAULTS) and (not attr.startswith('_')):
# certain Django OAuth Toolkit migrations actually reference
# setting lookups for references to model classes (e.g.,
# oauth2_settings.REFRESH_TOKEN_MODEL)
# If we're doing an OAuth2 setting lookup *while running* a migration,
# don't do our usual database settings lookup
val = settings.OAUTH2_PROVIDER.get(attr)
if val is None:
val = object.__getattribute__(self, attr)
return val
def prepare_env(): def prepare_env():
# Update the default settings environment variable based on current mode. # Update the default settings environment variable based on current mode.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings') os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
os.environ.setdefault('AWX_MODE', MODE)
# Hide DeprecationWarnings when running in production. Need to first load # Hide DeprecationWarnings when running in production. Need to first load
# settings to apply our filter after Django's own warnings filter. # settings to apply our filter after Django's own warnings filter.
from django.conf import settings from django.conf import settings
if not settings.DEBUG: # pragma: no cover if not settings.DEBUG: # pragma: no cover
warnings.simplefilter('ignore', DeprecationWarning) warnings.simplefilter('ignore', DeprecationWarning)
# Monkeypatch Django find_commands to also work with .pyc files.
import django.core.management
django.core.management.find_commands = find_commands
# Monkeypatch Oauth2 toolkit settings class to check for settings
# in django.conf settings each time, not just once during import
import oauth2_provider.settings
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
# database settings to use when management command is run as an external
# program via unit tests.
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
# Disable capturing all SQL queries in memory when in DEBUG mode.
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
# Use the default devserver addr/port defined in settings for runserver.
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
from django.core.management.commands import runserver as core_runserver
original_handle = core_runserver.Command.handle
def handle(self, *args, **options):
if not options.get('addrport'):
options['addrport'] = '%s:%d' % (default_addr, int(default_port))
elif options.get('addrport').isdigit():
options['addrport'] = '%s:%d' % (default_addr, int(options['addrport']))
return original_handle(self, *args, **options)
core_runserver.Command.handle = handle
def manage(): def manage():
@@ -79,12 +179,10 @@ def manage():
from django.conf import settings from django.conf import settings
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1 # enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
# The return of connection.pg_version is something like 12013
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development': if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
if (connection.pg_version // 10000) < 12: if (connection.pg_version // 10000) < 12:
sys.stderr.write("At a minimum, postgres version 12 is required\n") sys.stderr.write("Postgres version 12 is required\n")
sys.exit(1) sys.exit(1)
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover

View File

@@ -11,6 +11,9 @@ from django.utils.encoding import smart_str
# Django REST Framework # Django REST Framework
from rest_framework import authentication from rest_framework import authentication
# Django-OAuth-Toolkit
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
logger = logging.getLogger('awx.api.authentication') logger = logging.getLogger('awx.api.authentication')
@@ -33,3 +36,16 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
class SessionAuthentication(authentication.SessionAuthentication): class SessionAuthentication(authentication.SessionAuthentication):
def authenticate_header(self, request): def authenticate_header(self, request):
return 'Session' return 'Session'
class LoggedOAuth2Authentication(OAuth2Authentication):
def authenticate(self, request):
ret = super(LoggedOAuth2Authentication, self).authenticate(request)
if ret:
user, token = ret
username = user.username if user else '<none>'
logger.info(
smart_str(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk))
)
setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x])
return ret

View File

@@ -6,6 +6,9 @@ from rest_framework import serializers
# AWX # AWX
from awx.conf import fields, register, register_validate from awx.conf import fields, register, register_validate
from awx.api.fields import OAuth2ProviderField
from oauth2_provider.settings import oauth2_settings
from awx.sso.common import is_remote_auth_enabled
register( register(
@@ -32,7 +35,10 @@ register(
'DISABLE_LOCAL_AUTH', 'DISABLE_LOCAL_AUTH',
field_class=fields.BooleanField, field_class=fields.BooleanField,
label=_('Disable the built-in authentication system'), label=_('Disable the built-in authentication system'),
help_text=_("Controls whether users are prevented from using the built-in authentication system. "), help_text=_(
"Controls whether users are prevented from using the built-in authentication system. "
"You probably want to do this if you are using an LDAP or SAML integration."
),
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )
@@ -44,6 +50,41 @@ register(
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )
register(
'OAUTH2_PROVIDER',
field_class=OAuth2ProviderField,
default={
'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS,
},
label=_('OAuth 2 Timeout Settings'),
help_text=_(
'Dictionary for customizing OAuth 2 timeouts, available items are '
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
'the duration of refresh tokens, after expired access tokens, '
'in the number of seconds.'
),
category=_('Authentication'),
category_slug='authentication',
unit=_('seconds'),
)
register(
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
field_class=fields.BooleanField,
default=False,
label=_('Allow External Users to Create OAuth2 Tokens'),
help_text=_(
'For security reasons, users from external auth providers (LDAP, SAML, '
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
'To change this behavior, enable this setting. Existing tokens will '
'not be deleted when this setting is toggled off.'
),
category=_('Authentication'),
category_slug='authentication',
)
register( register(
'LOGIN_REDIRECT_OVERRIDE', 'LOGIN_REDIRECT_OVERRIDE',
field_class=fields.CharField, field_class=fields.CharField,
@@ -52,7 +93,6 @@ register(
default='', default='',
label=_('Login redirect override URL'), label=_('Login redirect override URL'),
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'), help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )
@@ -68,7 +108,7 @@ register(
def authentication_validate(serializer, attrs): def authentication_validate(serializer, attrs):
if attrs.get('DISABLE_LOCAL_AUTH', False): if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
raise serializers.ValidationError(_("There are no remote authentication systems configured.")) raise serializers.ValidationError(_("There are no remote authentication systems configured."))
return attrs return attrs

View File

@@ -9,6 +9,7 @@ from django.core.exceptions import ObjectDoesNotExist
from rest_framework import serializers from rest_framework import serializers
# AWX # AWX
from awx.conf import fields
from awx.main.models import Credential from awx.main.models import Credential
__all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimField'] __all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimField']
@@ -78,6 +79,19 @@ class VerbatimField(serializers.Field):
return value return value
class OAuth2ProviderField(fields.DictField):
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
child = fields.IntegerField(min_value=1)
def to_internal_value(self, data):
data = super(OAuth2ProviderField, self).to_internal_value(data)
invalid_flags = set(data.keys()) - self.valid_key_names
if invalid_flags:
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
return data
class DeprecatedCredentialField(serializers.IntegerField): class DeprecatedCredentialField(serializers.IntegerField):
def __init__(self, **kwargs): def __init__(self, **kwargs):
kwargs['allow_null'] = True kwargs['allow_null'] = True

450
awx/api/filters.py Normal file
View File

@@ -0,0 +1,450 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
# Python
import re
import json
from functools import reduce
# Django
from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist
from django.db import models
from django.db.models import Q, CharField, IntegerField, BooleanField, TextField, JSONField
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey
from django.db.models.functions import Cast
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
# Django REST Framework
from rest_framework.exceptions import ParseError, PermissionDenied
from rest_framework.filters import BaseFilterBackend
# AWX
from awx.main.utils import get_type_for_model, to_python_boolean
from awx.main.utils.db import get_all_field_names
class TypeFilterBackend(BaseFilterBackend):
"""
Filter on type field now returned with all objects.
"""
def filter_queryset(self, request, queryset, view):
try:
types = None
for key, value in request.query_params.items():
if key == 'type':
if ',' in value:
types = value.split(',')
else:
types = (value,)
if types:
types_map = {}
for ct in ContentType.objects.filter(Q(app_label='main') | Q(app_label='auth', model='user')):
ct_model = ct.model_class()
if not ct_model:
continue
ct_type = get_type_for_model(ct_model)
types_map[ct_type] = ct.pk
model = queryset.model
model_type = get_type_for_model(model)
if 'polymorphic_ctype' in get_all_field_names(model):
types_pks = set([v for k, v in types_map.items() if k in types])
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
elif model_type in types:
queryset = queryset
else:
queryset = queryset.none()
return queryset
except FieldError as e:
# Return a 400 for invalid field names.
raise ParseError(*e.args)
def get_fields_from_path(model, path):
"""
Given a Django ORM lookup path (possibly over multiple models)
Returns the fields in the line, and also the revised lookup path
ex., given
model=Organization
path='project__timeout'
returns tuple of fields traversed as well and a corrected path,
for special cases we do substitutions
([<IntegerField for timeout>], 'project__timeout')
"""
# Store of all the fields used to detect repeats
field_list = []
new_parts = []
for name in path.split('__'):
if model is None:
raise ParseError(_('No related model for field {}.').format(name))
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
if model._meta.object_name in ('Project', 'InventorySource'):
name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
name, name
)
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
name = 'polymorphic_ctype'
new_parts.append('polymorphic_ctype__model')
else:
new_parts.append(name)
if name in getattr(model, 'PASSWORD_FIELDS', ()):
raise PermissionDenied(_('Filtering on password fields is not allowed.'))
elif name == 'pk':
field = model._meta.pk
else:
name_alt = name.replace("_", "")
if name_alt in model._meta.fields_map.keys():
field = model._meta.fields_map[name_alt]
new_parts.pop()
new_parts.append(name_alt)
else:
field = model._meta.get_field(name)
if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False):
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
elif getattr(field, '__prevent_search__', False):
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
if field in field_list:
# Field traversed twice, could create infinite JOINs, DoSing Tower
raise ParseError(_('Loops not allowed in filters, detected on field {}.').format(field.name))
field_list.append(field)
model = getattr(field, 'related_model', None)
return field_list, '__'.join(new_parts)
def get_field_from_path(model, path):
"""
Given a Django ORM lookup path (possibly over multiple models)
Returns the last field in the line, and the revised lookup path
ex.
(<IntegerField for timeout>, 'project__timeout')
"""
field_list, new_path = get_fields_from_path(model, path)
return (field_list[-1], new_path)
class FieldLookupBackend(BaseFilterBackend):
"""
Filter using field lookups provided via query string parameters.
"""
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate', 'limit')
SUPPORTED_LOOKUPS = (
'exact',
'iexact',
'contains',
'icontains',
'startswith',
'istartswith',
'endswith',
'iendswith',
'regex',
'iregex',
'gt',
'gte',
'lt',
'lte',
'in',
'isnull',
'search',
)
# A list of fields that we know can be filtered on without the possibility
# of introducing duplicates
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
def get_fields_from_lookup(self, model, lookup):
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
path, suffix = lookup.rsplit('__', 1)
else:
path = lookup
suffix = 'exact'
if not path:
raise ParseError(_('Query string field name not provided.'))
# FIXME: Could build up a list of models used across relationships, use
# those lookups combined with request.user.get_queryset(Model) to make
# sure user cannot query using objects he could not view.
field_list, new_path = get_fields_from_path(model, path)
new_lookup = new_path
new_lookup = '__'.join([new_path, suffix])
return field_list, new_lookup
def get_field_from_lookup(self, model, lookup):
'''Method to match return type of single field, if needed.'''
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
return (field_list[-1], new_lookup)
def to_python_related(self, value):
value = force_str(value)
if value.lower() in ('none', 'null'):
return None
else:
return int(value)
def value_to_python_for_field(self, field, value):
if isinstance(field, models.BooleanField):
return to_python_boolean(value)
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
try:
return self.to_python_related(value)
except ValueError:
raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
else:
return field.to_python(value)
def value_to_python(self, model, lookup, value):
try:
lookup.encode("ascii")
except UnicodeEncodeError:
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
field = field_list[-1]
needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
# Type names are stored without underscores internally, but are presented and
# and serialized over the API containing underscores so we remove `_`
# for polymorphic_ctype__model lookups.
if new_lookup.startswith('polymorphic_ctype__model'):
value = value.replace('_', '')
elif new_lookup.endswith('__isnull'):
value = to_python_boolean(value)
elif new_lookup.endswith('__in'):
items = []
if not value:
raise ValueError('cannot provide empty value for __in')
for item in value.split(','):
items.append(self.value_to_python_for_field(field, item))
value = items
elif new_lookup.endswith('__regex') or new_lookup.endswith('__iregex'):
try:
re.compile(value)
except re.error as e:
raise ValueError(e.args[0])
elif new_lookup.endswith('__iexact'):
if not isinstance(field, (CharField, TextField)):
raise ValueError(f'{field.name} is not a text field and cannot be filtered by case-insensitive search')
elif new_lookup.endswith('__search'):
related_model = getattr(field, 'related_model', None)
if not related_model:
raise ValueError('%s is not searchable' % new_lookup[:-8])
new_lookups = []
for rm_field in related_model._meta.fields:
if rm_field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description', 'playbook'):
new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name))
return value, new_lookups, needs_distinct
else:
if isinstance(field, JSONField):
new_lookup = new_lookup.replace(field.name, f'{field.name}_as_txt')
value = self.value_to_python_for_field(field, value)
return value, new_lookup, needs_distinct
def filter_queryset(self, request, queryset, view):
try:
# Apply filters specified via query_params. Each entry in the lists
# below is (negate, field, value).
and_filters = []
or_filters = []
chain_filters = []
role_filters = []
search_filters = {}
needs_distinct = False
# Can only have two values: 'AND', 'OR'
# If 'AND' is used, an item must satisfy all conditions to show up in the results.
# If 'OR' is used, an item just needs to satisfy one condition to appear in results.
search_filter_relation = 'OR'
for key, values in request.query_params.lists():
if key in self.RESERVED_NAMES:
continue
# HACK: make `created` available via API for the Django User ORM model
# so it keep compatibility with other objects which exposes the `created` attr.
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
key = key.replace('created', 'date_joined')
# HACK: Make job event filtering by host name mostly work even
# when not capturing job event hosts M2M.
if queryset.model._meta.object_name == 'JobEvent' and key.startswith('hosts__name'):
key = key.replace('hosts__name', 'or__host__name')
or_filters.append((False, 'host__name__isnull', True))
# Custom __int filter suffix (internal use only).
q_int = False
if key.endswith('__int'):
key = key[:-5]
q_int = True
# RBAC filtering
if key == 'role_level':
role_filters.append(values[0])
continue
# Search across related objects.
if key.endswith('__search'):
if values and ',' in values[0]:
search_filter_relation = 'AND'
values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values])
for value in values:
search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_str(value))
assert isinstance(new_keys, list)
search_filters[search_value] = new_keys
# by definition, search *only* joins across relations,
# so it _always_ needs a .distinct()
needs_distinct = True
continue
# Custom chain__ and or__ filters, mutually exclusive (both can
# precede not__).
q_chain = False
q_or = False
if key.startswith('chain__'):
key = key[7:]
q_chain = True
elif key.startswith('or__'):
key = key[4:]
q_or = True
# Custom not__ filter prefix.
q_not = False
if key.startswith('not__'):
key = key[5:]
q_not = True
# Convert value(s) to python and add to the appropriate list.
for value in values:
if q_int:
value = int(value)
value, new_key, distinct = self.value_to_python(queryset.model, key, value)
if distinct:
needs_distinct = True
if '_as_txt' in new_key:
fname = next(item for item in new_key.split('__') if item.endswith('_as_txt'))
queryset = queryset.annotate(**{fname: Cast(fname[:-7], output_field=TextField())})
if q_chain:
chain_filters.append((q_not, new_key, value))
elif q_or:
or_filters.append((q_not, new_key, value))
else:
and_filters.append((q_not, new_key, value))
# Now build Q objects for database query filter.
if and_filters or or_filters or chain_filters or role_filters or search_filters:
args = []
for n, k, v in and_filters:
if n:
args.append(~Q(**{k: v}))
else:
args.append(Q(**{k: v}))
for role_name in role_filters:
if not hasattr(queryset.model, 'accessible_pk_qs'):
raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
if or_filters:
q = Q()
for n, k, v in or_filters:
if n:
q |= ~Q(**{k: v})
else:
q |= Q(**{k: v})
args.append(q)
if search_filters and search_filter_relation == 'OR':
q = Q()
for term, constrains in search_filters.items():
for constrain in constrains:
q |= Q(**{constrain: term})
args.append(q)
elif search_filters and search_filter_relation == 'AND':
for term, constrains in search_filters.items():
q_chain = Q()
for constrain in constrains:
q_chain |= Q(**{constrain: term})
queryset = queryset.filter(q_chain)
for n, k, v in chain_filters:
if n:
q = ~Q(**{k: v})
else:
q = Q(**{k: v})
queryset = queryset.filter(q)
queryset = queryset.filter(*args)
if needs_distinct:
queryset = queryset.distinct()
return queryset
except (FieldError, FieldDoesNotExist, ValueError, TypeError) as e:
raise ParseError(e.args[0])
except ValidationError as e:
raise ParseError(json.dumps(e.messages, ensure_ascii=False))
class OrderByBackend(BaseFilterBackend):
"""
Filter to apply ordering based on query string parameters.
"""
def filter_queryset(self, request, queryset, view):
try:
order_by = None
for key, value in request.query_params.items():
if key in ('order', 'order_by'):
order_by = value
if ',' in value:
order_by = value.split(',')
else:
order_by = (value,)
default_order_by = self.get_default_ordering(view)
# glue the order by and default order by together so that the default is the backup option
order_by = list(order_by or []) + list(default_order_by or [])
if order_by:
order_by = self._validate_ordering_fields(queryset.model, order_by)
# Special handling of the type field for ordering. In this
# case, we're not sorting exactly on the type field, but
# given the limited number of views with multiple types,
# sorting on polymorphic_ctype.model is effectively the same.
new_order_by = []
if 'polymorphic_ctype' in get_all_field_names(queryset.model):
for field in order_by:
if field == 'type':
new_order_by.append('polymorphic_ctype__model')
elif field == '-type':
new_order_by.append('-polymorphic_ctype__model')
else:
new_order_by.append(field)
else:
for field in order_by:
if field not in ('type', '-type'):
new_order_by.append(field)
queryset = queryset.order_by(*new_order_by)
return queryset
except FieldError as e:
# Return a 400 for invalid field names.
raise ParseError(*e.args)
def get_default_ordering(self, view):
ordering = getattr(view, 'ordering', None)
if isinstance(ordering, str):
return (ordering,)
return ordering
def _validate_ordering_fields(self, model, order_by):
for field_name in order_by:
# strip off the negation prefix `-` if it exists
prefix = ''
path = field_name
if field_name[0] == '-':
prefix = field_name[0]
path = field_name[1:]
try:
field, new_path = get_field_from_path(model, path)
new_path = '{}{}'.format(prefix, new_path)
except (FieldError, FieldDoesNotExist) as e:
raise ParseError(e.args[0])
yield new_path

View File

@@ -5,16 +5,18 @@
import inspect import inspect
import logging import logging
import time import time
import uuid
# Django # Django
from django.conf import settings from django.conf import settings
from django.contrib.auth import views as auth_views from django.contrib.auth import views as auth_views
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.core.cache import cache
from django.core.exceptions import FieldDoesNotExist from django.core.exceptions import FieldDoesNotExist
from django.db import connection, transaction from django.db import connection, transaction
from django.db.models.fields.related import OneToOneRel from django.db.models.fields.related import OneToOneRel
from django.http import QueryDict, JsonResponse from django.http import QueryDict
from django.shortcuts import get_object_or_404, redirect from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string from django.template.loader import render_to_string
from django.utils.encoding import smart_str from django.utils.encoding import smart_str
from django.utils.safestring import mark_safe from django.utils.safestring import mark_safe
@@ -30,23 +32,13 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import StaticHTMLRenderer from rest_framework.renderers import StaticHTMLRenderer
from rest_framework.negotiation import DefaultContentNegotiation from rest_framework.negotiation import DefaultContentNegotiation
# Shared code for the AWX platform
from awx_plugins.interfaces._temporary_private_licensing_api import detect_server_product_name
# django-ansible-base
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
from ansible_base.lib.utils.models import get_all_field_names
from ansible_base.lib.utils.requests import get_remote_host, is_proxied_request
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
from ansible_base.rbac.permission_registry import permission_registry
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
# AWX # AWX
from awx.api.filters import FieldLookupBackend
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
from awx.main.models.rbac import give_creator_permissions from awx.main.access import access_registry
from awx.main.access import optimize_queryset
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http from awx.main.utils.db import get_all_field_names
from awx.main.utils.licensing import server_product_name
from awx.main.views import ApiErrorView from awx.main.views import ApiErrorView
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
from awx.api.versioning import URLPathVersioning from awx.api.versioning import URLPathVersioning
@@ -81,14 +73,7 @@ analytics_logger = logging.getLogger('awx.analytics.performance')
class LoggedLoginView(auth_views.LoginView): class LoggedLoginView(auth_views.LoginView):
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
if is_proxied_request():
next = request.GET.get('next', "")
if next:
next = f"?next={next}"
return redirect(f"/{next}")
# The django.auth.contrib login form doesn't perform the content # The django.auth.contrib login form doesn't perform the content
# negotiation we've come to expect from DRF; add in code to catch # negotiation we've come to expect from DRF; add in code to catch
# situations where Accept != text/html (or */*) and reply with # situations where Accept != text/html (or */*) and reply with
@@ -104,46 +89,26 @@ class LoggedLoginView(auth_views.LoginView):
return super(LoggedLoginView, self).get(request, *args, **kwargs) return super(LoggedLoginView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
if is_proxied_request():
# Give a message, saying to login via AAP
return JsonResponse(
{
'detail': _('Please log in via Platform Authentication.'),
},
status=status.HTTP_401_UNAUTHORIZED,
)
ret = super(LoggedLoginView, self).post(request, *args, **kwargs) ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
if request.user.is_authenticated: if request.user.is_authenticated:
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip))) logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
ret.set_cookie( ret.set_cookie('userLoggedIn', 'true')
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
)
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
return ret return ret
else: else:
if 'username' in self.request.POST: if 'username' in self.request.POST:
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip))) logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
ret.status_code = 401 ret.status_code = 401
return ret return ret
class LoggedLogoutView(auth_views.LogoutView): class LoggedLogoutView(auth_views.LogoutView):
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
if is_proxied_request():
# 1) We intentionally don't obey ?next= here, just always redirect to platform login
# 2) Hack to prevent rewrites of Location header
qs = "?__gateway_no_rewrite__=1&next=/"
return redirect(f"/api/gateway/v1/logout/{qs}")
original_user = getattr(request, 'user', None) original_user = getattr(request, 'user', None)
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs) ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
current_user = getattr(request, 'user', None) current_user = getattr(request, 'user', None)
ret.set_cookie('userLoggedIn', 'false', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False)) ret.set_cookie('userLoggedIn', 'false')
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user: if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
logger.info("User {} logged out.".format(original_user.username)) logger.info("User {} logged out.".format(original_user.username))
return ret return ret
@@ -161,10 +126,10 @@ def get_view_description(view, html=False):
def get_default_schema(): def get_default_schema():
if settings.DYNACONF.is_development_mode: if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import schema_view from awx.api.swagger import AutoSchema
return schema_view return AutoSchema()
else: else:
return views.APIView.schema return views.APIView.schema
@@ -178,23 +143,22 @@ class APIView(views.APIView):
Store the Django REST Framework Request object as an attribute on the Store the Django REST Framework Request object as an attribute on the
normal Django request, store time the request started. normal Django request, store time the request started.
""" """
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
self.time_started = time.time() self.time_started = time.time()
if getattr(settings, 'SQL_DEBUG', False): if getattr(settings, 'SQL_DEBUG', False):
self.queries_before = len(connection.queries) self.queries_before = len(connection.queries)
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
remote_headers = settings.REMOTE_HOST_HEADERS
else:
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure # If there are any custom headers in REMOTE_HOST_HEADERS, make sure
# they respect the allowed proxy list # they respect the allowed proxy list
if settings.PROXY_IP_ALLOWED_LIST: if all(
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers): [
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS) settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
]
):
for custom_header in settings.REMOTE_HOST_HEADERS:
if custom_header.startswith('HTTP_'):
request.environ.pop(custom_header, None)
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs) drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
request.drf_request = drf_request request.drf_request = drf_request
@@ -207,7 +171,7 @@ class APIView(views.APIView):
self.__init_request_error__ = exc self.__init_request_error__ = exc
except UnsupportedMediaType as exc: except UnsupportedMediaType as exc:
exc.detail = _( exc.detail = _(
'You did not use correct Content-Type in your HTTP request. If you are using our REST API, the Content-Type must be application/json' 'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
) )
self.__init_request_error__ = exc self.__init_request_error__ = exc
return drf_request return drf_request
@@ -239,21 +203,17 @@ class APIView(views.APIView):
return response return response
if response.status_code >= 400: if response.status_code >= 400:
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
msg_data = { msg_data = {
'status_code': response.status_code, 'status_code': response.status_code,
'user_name': request.user, 'user_name': request.user,
'url_path': request.path, 'url_path': request.path,
'remote_addr': ip, 'remote_addr': request.META.get('REMOTE_ADDR', None),
} }
if type(response.data) is dict: if type(response.data) is dict:
msg_data['error'] = response.data.get('error', response.status_text) msg_data['error'] = response.data.get('error', response.status_text)
elif type(response.data) is list: elif type(response.data) is list:
if len(response.data) > 0 and isinstance(response.data[0], str): msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
msg_data['error'] = str(response.data[0])
else:
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
else: else:
msg_data['error'] = response.status_text msg_data['error'] = response.status_text
@@ -267,17 +227,15 @@ class APIView(views.APIView):
if hasattr(self, '__init_request_error__'): if hasattr(self, '__init_request_error__'):
response = self.handle_exception(self.__init_request_error__) response = self.handle_exception(self.__init_request_error__)
if response.status_code == 401: if response.status_code == 401:
if response.data and 'detail' in response.data: response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
logger.info(status_msg) logger.info(status_msg)
else: else:
logger.warning(status_msg) logger.warning(status_msg)
response = super(APIView, self).finalize_response(request, response, *args, **kwargs) response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
time_started = getattr(self, 'time_started', None) time_started = getattr(self, 'time_started', None)
if request.user.is_authenticated: response['X-API-Product-Version'] = get_awx_version()
response['X-API-Product-Version'] = get_awx_version() response['X-API-Product-Name'] = server_product_name()
response['X-API-Product-Name'] = detect_server_product_name()
response['X-API-Node'] = settings.CLUSTER_HOST_ID response['X-API-Node'] = settings.CLUSTER_HOST_ID
if time_started: if time_started:
@@ -374,6 +332,12 @@ class APIView(views.APIView):
kwargs.pop('version') kwargs.pop('version')
return super(APIView, self).dispatch(request, *args, **kwargs) return super(APIView, self).dispatch(request, *args, **kwargs)
def check_permissions(self, request):
if request.method not in ('GET', 'OPTIONS', 'HEAD'):
if 'write' not in getattr(request.user, 'oauth_scopes', ['write']):
raise PermissionDenied()
return super(APIView, self).check_permissions(request)
class GenericAPIView(generics.GenericAPIView, APIView): class GenericAPIView(generics.GenericAPIView, APIView):
# Base class for all model-based views. # Base class for all model-based views.
@@ -400,7 +364,12 @@ class GenericAPIView(generics.GenericAPIView, APIView):
return self.queryset._clone() return self.queryset._clone()
elif self.model is not None: elif self.model is not None:
qs = self.model._default_manager qs = self.model._default_manager
qs = optimize_queryset(qs) if self.model in access_registry:
access_class = access_registry[self.model]
if access_class.select_related:
qs = qs.select_related(*access_class.select_related)
if access_class.prefetch_related:
qs = qs.prefetch_related(*access_class.prefetch_related)
return qs return qs
else: else:
return super(GenericAPIView, self).get_queryset() return super(GenericAPIView, self).get_queryset()
@@ -508,11 +477,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView): class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
# Base class for a list view that allows creating new objects. # Base class for a list view that allows creating new objects.
def perform_create(self, serializer): pass
super().perform_create(serializer)
if serializer.Meta.model in permission_registry.all_registered_models:
if self.request and self.request.user:
give_creator_permissions(self.request.user, serializer.instance)
class ParentMixin(object): class ParentMixin(object):
@@ -547,9 +512,6 @@ class SubListAPIView(ParentMixin, ListAPIView):
# And optionally (user must have given access permission on parent object # And optionally (user must have given access permission on parent object
# to view sublist): # to view sublist):
# parent_access = 'read' # parent_access = 'read'
# filter_read_permission sets whether or not to override the default intersection behavior
# implemented here
filter_read_permission = True
def get_description_context(self): def get_description_context(self):
d = super(SubListAPIView, self).get_description_context() d = super(SubListAPIView, self).get_description_context()
@@ -564,16 +526,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
def get_queryset(self): def get_queryset(self):
parent = self.get_parent_object() parent = self.get_parent_object()
self.check_parent_access(parent) self.check_parent_access(parent)
if not self.filter_read_permission: qs = self.request.user.get_queryset(self.model).distinct()
return optimize_queryset(self.get_sublist_queryset(parent)) sublist_qs = self.get_sublist_queryset(parent)
qs = self.request.user.get_queryset(self.model) return qs & sublist_qs
if hasattr(self, 'parent_key'):
# This is vastly preferable for ReverseForeignKey relationships
return qs.filter(**{self.parent_key: parent})
return qs.distinct() & self.get_sublist_queryset(parent).distinct()
def get_sublist_queryset(self, parent): def get_sublist_queryset(self, parent):
return getattrd(parent, self.relationship) return getattrd(parent, self.relationship).distinct()
class DestroyAPIView(generics.DestroyAPIView): class DestroyAPIView(generics.DestroyAPIView):
@@ -622,6 +580,15 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
d.update({'parent_key': getattr(self, 'parent_key', None)}) d.update({'parent_key': getattr(self, 'parent_key', None)})
return d return d
def get_queryset(self):
if hasattr(self, 'parent_key'):
# Prefer this filtering because ForeignKey allows us more assumptions
parent = self.get_parent_object()
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
return qs.filter(**{self.parent_key: parent})
return super(SubListCreateAPIView, self).get_queryset()
def create(self, request, *args, **kwargs): def create(self, request, *args, **kwargs):
# If the object ID was not specified, it probably doesn't exist in the # If the object ID was not specified, it probably doesn't exist in the
# DB yet. We want to see if we can create it. The URL may choose to # DB yet. We want to see if we can create it. The URL may choose to
@@ -832,7 +799,6 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
class ResourceAccessList(ParentMixin, ListAPIView): class ResourceAccessList(ParentMixin, ListAPIView):
deprecated = True
serializer_class = ResourceAccessListElementSerializer serializer_class = ResourceAccessListElementSerializer
ordering = ('username',) ordering = ('username',)
@@ -840,15 +806,6 @@ class ResourceAccessList(ParentMixin, ListAPIView):
obj = self.get_parent_object() obj = self.get_parent_object()
content_type = ContentType.objects.get_for_model(obj) content_type = ContentType.objects.get_for_model(obj)
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
auditor_role = RoleDefinition.objects.filter(name="Controller System Auditor").first()
if auditor_role:
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
return qs.distinct()
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id)) roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
ancestors = set() ancestors = set()
@@ -1008,13 +965,18 @@ class CopyAPIView(GenericAPIView):
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '') None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
) )
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all(): if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
give_creator_permissions(request.user, new_obj) new_obj.admin_role.members.add(request.user)
if sub_objs: if sub_objs:
# store the copied object dict into cache, because it's
# often too large for postgres' notification bus
# (which has a default maximum message size of 8k)
key = 'deep-copy-{}'.format(str(uuid.uuid4()))
cache.set(key, sub_objs, timeout=3600)
permission_check_func = None permission_check_func = None
if hasattr(type(self), 'deep_copy_permission_check_func'): if hasattr(type(self), 'deep_copy_permission_check_func'):
permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func') permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
trigger_delayed_deep_copy( trigger_delayed_deep_copy(
self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, permission_check_func=permission_check_func self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, key, permission_check_func=permission_check_func
) )
serializer = self._get_copy_return_serializer(new_obj) serializer = self._get_copy_return_serializer(new_obj)
headers = {'Location': new_obj.get_absolute_url(request=request)} headers = {'Location': new_obj.get_absolute_url(request=request)}

View File

@@ -36,13 +36,11 @@ class Metadata(metadata.SimpleMetadata):
field_info = OrderedDict() field_info = OrderedDict()
field_info['type'] = self.label_lookup[field] field_info['type'] = self.label_lookup[field]
field_info['required'] = getattr(field, 'required', False) field_info['required'] = getattr(field, 'required', False)
field_info['hidden'] = getattr(field, 'hidden', False)
text_attrs = [ text_attrs = [
'read_only', 'read_only',
'label', 'label',
'help_text', 'help_text',
'warning_text',
'min_length', 'min_length',
'max_length', 'max_length',
'min_value', 'min_value',
@@ -73,7 +71,7 @@ class Metadata(metadata.SimpleMetadata):
'url': _('URL for this {}.'), 'url': _('URL for this {}.'),
'related': _('Data structure with URLs of related resources.'), 'related': _('Data structure with URLs of related resources.'),
'summary_fields': _( 'summary_fields': _(
'Data structure with name/description for related resources. The output for some objects may be limited for performance reasons.' 'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
), ),
'created': _('Timestamp when this {} was created.'), 'created': _('Timestamp when this {} was created.'),
'modified': _('Timestamp when this {} was last modified.'), 'modified': _('Timestamp when this {} was last modified.'),
@@ -103,7 +101,7 @@ class Metadata(metadata.SimpleMetadata):
default = field.get_default() default = field.get_default()
if type(default) is UUID: if type(default) is UUID:
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx' default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost': if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
default = '{}://{}'.format(self.request.scheme, self.request.get_host()) default = '{}://{}'.format(self.request.scheme, self.request.get_host())
field_info['default'] = default field_info['default'] = default
except serializers.SkipField: except serializers.SkipField:

View File

@@ -25,7 +25,6 @@ __all__ = [
'UserPermission', 'UserPermission',
'IsSystemAdminOrAuditor', 'IsSystemAdminOrAuditor',
'WorkflowApprovalPermission', 'WorkflowApprovalPermission',
'AnalyticsPermission',
] ]
@@ -251,16 +250,3 @@ class IsSystemAdminOrAuditor(permissions.BasePermission):
class WebhookKeyPermission(permissions.BasePermission): class WebhookKeyPermission(permissions.BasePermission):
def has_object_permission(self, request, view, obj): def has_object_permission(self, request, view, obj):
return request.user.can_access(view.model, 'admin', obj, request.data) return request.user.can_access(view.model, 'admin', obj, request.data)
class AnalyticsPermission(permissions.BasePermission):
"""
Allows GET/POST/OPTIONS to system admins and system auditors.
"""
def has_permission(self, request, view):
if not (request.user and request.user.is_authenticated):
return False
if request.method in ["GET", "POST", "OPTIONS"]:
return request.user.is_superuser or request.user.is_system_auditor
return request.user.is_superuser

File diff suppressed because it is too large Load Diff

View File

@@ -1,55 +1,97 @@
import json
import warnings import warnings
from coreapi.document import Object, Link
from rest_framework import exceptions
from rest_framework.permissions import AllowAny from rest_framework.permissions import AllowAny
from drf_yasg import openapi from rest_framework.renderers import CoreJSONRenderer
from drf_yasg.inspectors import SwaggerAutoSchema from rest_framework.response import Response
from drf_yasg.views import get_schema_view from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
from rest_framework.views import APIView
from rest_framework_swagger import renderers
class CustomSwaggerAutoSchema(SwaggerAutoSchema): class SuperUserSchemaGenerator(SchemaGenerator):
"""Custom SwaggerAutoSchema to add swagger_topic to tags.""" def has_view_permissions(self, path, method, view):
#
# Generate the Swagger schema as if you were a superuser and
# permissions didn't matter; this short-circuits the schema path
# discovery to include _all_ potential paths in the API.
#
return True
def get_tags(self, operation_keys=None):
tags = [] class AutoSchema(DRFAuthSchema):
def get_link(self, path, method, base_url):
link = super(AutoSchema, self).get_link(path, method, base_url)
try: try:
if hasattr(self.view, 'get_serializer'): serializer = self.view.get_serializer()
serializer = self.view.get_serializer()
else:
serializer = None
except Exception: except Exception:
serializer = None serializer = None
warnings.warn( warnings.warn(
'{}.get_serializer() raised an exception during ' '{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be ' 'schema generation. Serializer fields will not be '
'generated for {}.'.format(self.view.__class__.__name__, operation_keys) 'generated for {} {}.'.format(self.view.__class__.__name__, method, path)
) )
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
# auto-generate a topic/tag for the serializer based on its model
if hasattr(self.view, 'swagger_topic'): if hasattr(self.view, 'swagger_topic'):
tags.append(str(self.view.swagger_topic).title()) link.__dict__['topic'] = str(self.view.swagger_topic).title()
elif serializer and hasattr(serializer, 'Meta'): elif serializer and hasattr(serializer, 'Meta'):
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title()) link.__dict__['topic'] = str(serializer.Meta.model._meta.verbose_name_plural).title()
elif hasattr(self.view, 'model'): elif hasattr(self.view, 'model'):
tags.append(str(self.view.model._meta.verbose_name_plural).title()) link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
else: else:
tags = ['api'] # Fallback to default value warnings.warn('Could not determine a Swagger tag for path {}'.format(path))
return link
if not tags: def get_description(self, path, method):
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}') setattr(self.view.request, 'swagger_method', method)
return tags description = super(AutoSchema, self).get_description(path, method)
return description
def is_deprecated(self):
"""Return `True` if this operation is to be marked as deprecated."""
return getattr(self.view, 'deprecated', False)
schema_view = get_schema_view( class SwaggerSchemaView(APIView):
openapi.Info( _ignore_model_permissions = True
title='AWX API', exclude_from_schema = True
default_version='v2', permission_classes = [AllowAny]
description='AWX API Documentation', renderer_classes = [CoreJSONRenderer, renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer]
terms_of_service='https://www.google.com/policies/terms/',
contact=openapi.Contact(email='contact@snippets.local'), def get(self, request):
license=openapi.License(name='Apache License'), generator = SuperUserSchemaGenerator(title='Ansible Automation Platform controller API', patterns=None, urlconf=None)
), schema = generator.get_schema(request=request)
public=True, # python core-api doesn't support the deprecation yet, so track it
permission_classes=[AllowAny], # ourselves and return it in a response header
) _deprecated = []
# By default, DRF OpenAPI serialization places all endpoints in
# a single node based on their root path (/api). Instead, we want to
# group them by topic/tag so that they're categorized in the rendered
# output
document = schema._data.pop('api')
for path, node in document.items():
if isinstance(node, Object):
for action in node.values():
topic = getattr(action, 'topic', None)
if topic:
schema._data.setdefault(topic, Object())
schema._data[topic]._data[path] = node
if isinstance(action, Object):
for link in action.links.values():
if link.deprecated:
_deprecated.append(link.url)
elif isinstance(node, Link):
topic = getattr(node, 'topic', None)
if topic:
schema._data.setdefault(topic, Object())
schema._data[topic]._data[path] = node
if not schema:
raise exceptions.ValidationError('The schema generator did not return a schema Document')
return Response(schema, headers={'X-Deprecated-Paths': json.dumps(_deprecated)})

View File

@@ -0,0 +1,114 @@
# Token Handling using OAuth2
This page lists OAuth 2 utility endpoints used for authorization, token refresh and revoke.
Note endpoints other than `/api/o/authorize/` are not meant to be used in browsers and do not
support HTTP GET. The endpoints here strictly follow
[RFC specs for OAuth2](https://tools.ietf.org/html/rfc6749), so please use that for detailed
reference. Note AWX net location default to `http://localhost:8013` in examples:
## Create Token for an Application using Authorization code grant type
Given an application "AuthCodeApp" of grant type `authorization-code`,
from the client app, the user makes a GET to the Authorize endpoint with
* `response_type`
* `client_id`
* `redirect_uris`
* `scope`
AWX will respond with the authorization `code` and `state`
to the redirect_uri specified in the application. The client application will then make a POST to the
`api/o/token/` endpoint on AWX with
* `code`
* `client_id`
* `client_secret`
* `grant_type`
* `redirect_uri`
AWX will respond with the `access_token`, `token_type`, `refresh_token`, and `expires_in`. For more
information on testing this flow, refer to [django-oauth-toolkit](http://django-oauth-toolkit.readthedocs.io/en/latest/tutorial/tutorial_01.html#test-your-authorization-server).
## Create Token for an Application using Password grant type
Log in is not required for `password` grant type, so a simple `curl` can be used to acquire a personal access token
via `/api/o/token/` with
* `grant_type`: Required to be "password"
* `username`
* `password`
* `client_id`: Associated application must have grant_type "password"
* `client_secret`
For example:
```bash
curl -X POST \
-H "Content-Type: application/x-www-form-urlencoded" \
-d "grant_type=password&username=<username>&password=<password>&scope=read" \
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569e
IaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
http://localhost:8013/api/o/token/ -i
```
In the above post request, parameters `username` and `password` are username and password of the related
AWX user of the underlying application, and the authentication information is of format
`<client_id>:<client_secret>`, where `client_id` and `client_secret` are the corresponding fields of
underlying application.
Upon success, access token, refresh token and other information are given in the response body in JSON
format:
```text
{
"access_token": "9epHOqHhnXUcgYK8QanOmUQPSgX92g",
"token_type": "Bearer",
"expires_in": 31536000000,
"refresh_token": "jMRX6QvzOTf046KHee3TU5mT3nyXsz",
"scope": "read"
}
```
## Refresh an existing access token
The `/api/o/token/` endpoint is used for refreshing access token:
```bash
curl -X POST \
-H "Content-Type: application/x-www-form-urlencoded" \
-d "grant_type=refresh_token&refresh_token=AL0NK9TTpv0qp54dGbC4VUZtsZ9r8z" \
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
http://localhost:8013/api/o/token/ -i
```
In the above post request, `refresh_token` is provided by `refresh_token` field of the access token
above. The authentication information is of format `<client_id>:<client_secret>`, where `client_id`
and `client_secret` are the corresponding fields of underlying related application of the access token.
Upon success, the new (refreshed) access token with the same scope information as the previous one is
given in the response body in JSON format:
```text
{
"access_token": "NDInWxGJI4iZgqpsreujjbvzCfJqgR",
"token_type": "Bearer",
"expires_in": 31536000000,
"refresh_token": "DqOrmz8bx3srlHkZNKmDpqA86bnQkT",
"scope": "read write"
}
```
Internally, the refresh operation deletes the existing token and a new token is created immediately
after, with information like scope and related application identical to the original one. We can
verify by checking the new token is present at the `api/v2/tokens` endpoint.
## Revoke an access token
Revoking an access token is the same as deleting the token resource object.
Revoking is done by POSTing to `/api/o/revoke_token/` with the token to revoke as parameter:
```bash
curl -X POST -d "token=rQONsve372fQwuc2pn76k3IHDCYpi7" \
-H "Content-Type: application/x-www-form-urlencoded" \
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
http://localhost:8013/api/o/revoke_token/ -i
```
`200 OK` means a successful delete.

View File

@@ -1,22 +0,0 @@
# Bulk Host Delete
This endpoint allows the client to delete multiple hosts from inventories.
They may do this by providing a list of hosts ID's to be deleted.
Example:
{
"hosts": [1, 2, 3, 4, 5]
}
Return data:
{
"hosts": {
"1": "The host a1 was deleted",
"2": "The host a2 was deleted",
"3": "The host a3 was deleted",
"4": "The host a4 was deleted",
"5": "The host a5 was deleted",
}
}

View File

@@ -1,18 +0,0 @@
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
{% ifmeth DELETE %}
# Delete {{ model_verbose_name|title|anora }}:
Make a DELETE request to this resource to soft-delete this {{ model_verbose_name }}.
A soft deletion will mark the `deleted` field as true and exclude the host
metric from license calculations.
This may be undone later if the same hostname is automated again afterwards.
{% endifmeth %}

View File

@@ -2,35 +2,21 @@ receptor_user: awx
receptor_group: awx receptor_group: awx
receptor_verify: true receptor_verify: true
receptor_tls: true receptor_tls: true
receptor_mintls13: false
{% if instance.node_type == "execution" %}
receptor_work_commands: receptor_work_commands:
ansible-runner: ansible-runner:
command: ansible-runner command: ansible-runner
params: worker params: worker
allowruntimeparams: true allowruntimeparams: true
verifysignature: true verifysignature: true
additional_python_packages: custom_worksign_public_keyfile: receptor/work-public-key.pem
- ansible-runner
{% endif %}
custom_worksign_public_keyfile: receptor/work_public_key.pem
custom_tls_certfile: receptor/tls/receptor.crt custom_tls_certfile: receptor/tls/receptor.crt
custom_tls_keyfile: receptor/tls/receptor.key custom_tls_keyfile: receptor/tls/receptor.key
custom_ca_certfile: receptor/tls/ca/mesh-CA.crt custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
{% if listener_port %} receptor_protocol: 'tcp'
receptor_protocol: {{ listener_protocol }}
receptor_listener: true receptor_listener: true
receptor_port: {{ listener_port }} receptor_port: {{ instance.listener_port }}
{% else %} receptor_dependencies:
receptor_listener: false - python39-pip
{% endif %}
{% if peers %}
receptor_peers:
{% for peer in peers %}
- address: {{ peer.address }}
protocol: {{ peer.protocol }}
{% endfor %}
{% endif %}
{% verbatim %} {% verbatim %}
podman_user: "{{ receptor_user }}" podman_user: "{{ receptor_user }}"
podman_group: "{{ receptor_group }}" podman_group: "{{ receptor_group }}"

View File

@@ -1,22 +1,20 @@
{% verbatim %}
--- ---
- hosts: all - hosts: all
become: yes become: yes
tasks: tasks:
- name: Create the receptor group
group:
{% verbatim %}
name: "{{ receptor_group }}"
{% endverbatim %}
state: present
- name: Create the receptor user - name: Create the receptor user
user: user:
{% verbatim %}
name: "{{ receptor_user }}" name: "{{ receptor_user }}"
{% endverbatim %}
shell: /bin/bash shell: /bin/bash
{% if instance.node_type == "execution" %} - name: Enable Copr repo for Receptor
command: dnf copr enable ansible-awx/receptor -y
- import_role: - import_role:
name: ansible.receptor.podman name: ansible.receptor.podman
{% endif %}
- import_role: - import_role:
name: ansible.receptor.setup name: ansible.receptor.setup
- name: Install ansible-runner
pip:
name: ansible-runner
executable: pip3.9
{% endverbatim %}

View File

@@ -1,4 +1,4 @@
--- ---
collections: collections:
- name: ansible.receptor - name: ansible.receptor
version: 2.0.3 version: 1.1.0

View File

@@ -1,31 +0,0 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
import awx.api.views.analytics as analytics
urls = [
re_path(r'^$', analytics.AnalyticsRootView.as_view(), name='analytics_root_view'),
re_path(r'^authorized/$', analytics.AnalyticsAuthorizedView.as_view(), name='analytics_authorized'),
re_path(r'^reports/$', analytics.AnalyticsReportsList.as_view(), name='analytics_reports_list'),
re_path(r'^report/(?P<slug>[\w-]+)/$', analytics.AnalyticsReportDetail.as_view(), name='analytics_report_detail'),
re_path(r'^report_options/$', analytics.AnalyticsReportOptionsList.as_view(), name='analytics_report_options_list'),
re_path(r'^adoption_rate/$', analytics.AnalyticsAdoptionRateList.as_view(), name='analytics_adoption_rate'),
re_path(r'^adoption_rate_options/$', analytics.AnalyticsAdoptionRateList.as_view(), name='analytics_adoption_rate_options'),
re_path(r'^event_explorer/$', analytics.AnalyticsEventExplorerList.as_view(), name='analytics_event_explorer'),
re_path(r'^event_explorer_options/$', analytics.AnalyticsEventExplorerList.as_view(), name='analytics_event_explorer_options'),
re_path(r'^host_explorer/$', analytics.AnalyticsHostExplorerList.as_view(), name='analytics_host_explorer'),
re_path(r'^host_explorer_options/$', analytics.AnalyticsHostExplorerList.as_view(), name='analytics_host_explorer_options'),
re_path(r'^job_explorer/$', analytics.AnalyticsJobExplorerList.as_view(), name='analytics_job_explorer'),
re_path(r'^job_explorer_options/$', analytics.AnalyticsJobExplorerList.as_view(), name='analytics_job_explorer_options'),
re_path(r'^probe_templates/$', analytics.AnalyticsProbeTemplatesList.as_view(), name='analytics_probe_templates_explorer'),
re_path(r'^probe_templates_options/$', analytics.AnalyticsProbeTemplatesList.as_view(), name='analytics_probe_templates_options'),
re_path(r'^probe_template_for_hosts/$', analytics.AnalyticsProbeTemplateForHostsList.as_view(), name='analytics_probe_template_for_hosts_explorer'),
re_path(r'^probe_template_for_hosts_options/$', analytics.AnalyticsProbeTemplateForHostsList.as_view(), name='analytics_probe_template_for_hosts_options'),
re_path(r'^roi_templates/$', analytics.AnalyticsRoiTemplatesList.as_view(), name='analytics_roi_templates_explorer'),
re_path(r'^roi_templates_options/$', analytics.AnalyticsRoiTemplatesList.as_view(), name='analytics_roi_templates_options'),
]
__all__ = ['urls']

View File

@@ -1,10 +0,0 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
from awx.api.views import HostMetricList, HostMetricDetail
urls = [re_path(r'^$', HostMetricList.as_view(), name='host_metric_list'), re_path(r'^(?P<pk>[0-9]+)/$', HostMetricDetail.as_view(), name='host_metric_detail')]
__all__ = ['urls']

View File

@@ -10,7 +10,6 @@ from awx.api.views import (
InstanceInstanceGroupsList, InstanceInstanceGroupsList,
InstanceHealthCheck, InstanceHealthCheck,
InstancePeersList, InstancePeersList,
InstanceReceptorAddressesList,
) )
from awx.api.views.instance_install_bundle import InstanceInstallBundle from awx.api.views.instance_install_bundle import InstanceInstallBundle
@@ -22,7 +21,6 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'), re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'), re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'), re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'),
re_path(r'^(?P<pk>[0-9]+)/receptor_addresses/$', InstanceReceptorAddressesList.as_view(), name='instance_receptor_addresses_list'),
re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'), re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'),
] ]

View File

@@ -6,10 +6,7 @@ from django.urls import re_path
from awx.api.views.inventory import ( from awx.api.views.inventory import (
InventoryList, InventoryList,
InventoryDetail, InventoryDetail,
ConstructedInventoryDetail,
ConstructedInventoryList,
InventoryActivityStreamList, InventoryActivityStreamList,
InventoryInputInventoriesList,
InventoryJobTemplateList, InventoryJobTemplateList,
InventoryAccessList, InventoryAccessList,
InventoryObjectRolesList, InventoryObjectRolesList,
@@ -40,7 +37,6 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'), re_path(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
re_path(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'), re_path(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
re_path(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'), re_path(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
re_path(r'^(?P<pk>[0-9]+)/input_inventories/$', InventoryInputInventoriesList.as_view(), name='inventory_input_inventories'),
re_path(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'), re_path(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'), re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
re_path(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'), re_path(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
@@ -52,10 +48,4 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), re_path(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
] ]
# Constructed inventory special views __all__ = ['urls']
constructed_inventory_urls = [
re_path(r'^$', ConstructedInventoryList.as_view(), name='constructed_inventory_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ConstructedInventoryDetail.as_view(), name='constructed_inventory_detail'),
]
__all__ = ['urls', 'constructed_inventory_urls']

27
awx/api/urls/oauth2.py Normal file
View File

@@ -0,0 +1,27 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
from awx.api.views import (
OAuth2ApplicationList,
OAuth2ApplicationDetail,
ApplicationOAuth2TokenList,
OAuth2ApplicationActivityStreamList,
OAuth2TokenList,
OAuth2TokenDetail,
OAuth2TokenActivityStreamList,
)
urls = [
re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
re_path(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
re_path(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'),
re_path(r'^applications/(?P<pk>[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'),
re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
re_path(r'^tokens/(?P<pk>[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'),
re_path(r'^tokens/(?P<pk>[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'),
]
__all__ = ['urls']

View File

@@ -0,0 +1,45 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from datetime import timedelta
from django.utils.timezone import now
from django.conf import settings
from django.urls import re_path
from oauthlib import oauth2
from oauth2_provider import views
from awx.main.models import RefreshToken
from awx.api.views.root import ApiOAuthAuthorizationRootView
class TokenView(views.TokenView):
def create_token_response(self, request):
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
# properly expired (ugh):
#
# https://github.com/jazzband/django-oauth-toolkit/issues/746
#
# This code detects and auto-expires them on refresh grant
# requests.
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
refresh_token = RefreshToken.objects.filter(token=request.POST['refresh_token']).first()
if refresh_token:
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
if refresh_token.created + timedelta(seconds=expire_seconds) < now():
return request.build_absolute_uri(), {}, 'The refresh token has expired.', '403'
try:
return super(TokenView, self).create_token_response(request)
except oauth2.AccessDeniedError as e:
return request.build_absolute_uri(), {}, str(e), '403'
urls = [
re_path(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'),
re_path(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"),
re_path(r"^token/$", TokenView.as_view(), name="token"),
re_path(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"),
]
__all__ = ['urls']

View File

@@ -25,7 +25,7 @@ from awx.api.views.organization import (
OrganizationObjectRolesList, OrganizationObjectRolesList,
OrganizationAccessList, OrganizationAccessList,
) )
from awx.api.views import OrganizationCredentialList from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
urls = [ urls = [
@@ -66,6 +66,7 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), re_path(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'),
re_path(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), re_path(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
re_path(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'), re_path(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
re_path(r'^(?P<pk>[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'),
] ]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -1,17 +0,0 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
from awx.api.views import (
ReceptorAddressesList,
ReceptorAddressDetail,
)
urls = [
re_path(r'^$', ReceptorAddressesList.as_view(), name='receptor_addresses_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ReceptorAddressDetail.as_view(), name='receptor_address_detail'),
]
__all__ = ['urls']

View File

@@ -15,6 +15,7 @@ from awx.api.views.root import (
ApiV2AttachView, ApiV2AttachView,
) )
from awx.api.views import ( from awx.api.views import (
AuthView,
UserMeList, UserMeList,
DashboardView, DashboardView,
DashboardJobsGraphView, DashboardJobsGraphView,
@@ -25,30 +26,30 @@ from awx.api.views import (
JobTemplateCredentialsList, JobTemplateCredentialsList,
SchedulePreview, SchedulePreview,
ScheduleZoneInfo, ScheduleZoneInfo,
HostMetricSummaryMonthlyList, OAuth2ApplicationList,
OAuth2TokenList,
ApplicationOAuth2TokenList,
OAuth2ApplicationDetail,
) )
from awx.api.views.bulk import ( from awx.api.views.bulk import (
BulkView, BulkView,
BulkHostCreateView, BulkHostCreateView,
BulkHostDeleteView,
BulkJobLaunchView, BulkJobLaunchView,
) )
from awx.api.views.mesh_visualizer import MeshVisualizer from awx.api.views.mesh_visualizer import MeshVisualizer
from awx.api.views.metrics import MetricsView from awx.api.views.metrics import MetricsView
from awx.api.views.analytics import AWX_ANALYTICS_API_PREFIX
from .organization import urls as organization_urls from .organization import urls as organization_urls
from .user import urls as user_urls from .user import urls as user_urls
from .project import urls as project_urls from .project import urls as project_urls
from .project_update import urls as project_update_urls from .project_update import urls as project_update_urls
from .inventory import urls as inventory_urls, constructed_inventory_urls from .inventory import urls as inventory_urls
from .execution_environments import urls as execution_environment_urls from .execution_environments import urls as execution_environment_urls
from .team import urls as team_urls from .team import urls as team_urls
from .host import urls as host_urls from .host import urls as host_urls
from .host_metric import urls as host_metric_urls
from .group import urls as group_urls from .group import urls as group_urls
from .inventory_source import urls as inventory_source_urls from .inventory_source import urls as inventory_source_urls
from .inventory_update import urls as inventory_update_urls from .inventory_update import urls as inventory_update_urls
@@ -75,10 +76,11 @@ from .schedule import urls as schedule_urls
from .activity_stream import urls as activity_stream_urls from .activity_stream import urls as activity_stream_urls
from .instance import urls as instance_urls from .instance import urls as instance_urls
from .instance_group import urls as instance_group_urls from .instance_group import urls as instance_group_urls
from .oauth2 import urls as oauth2_urls
from .oauth2_root import urls as oauth2_root_urls
from .workflow_approval_template import urls as workflow_approval_template_urls from .workflow_approval_template import urls as workflow_approval_template_urls
from .workflow_approval import urls as workflow_approval_urls from .workflow_approval import urls as workflow_approval_urls
from .analytics import urls as analytics_urls
from .receptor_address import urls as receptor_address_urls
v2_urls = [ v2_urls = [
re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'), re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
@@ -89,11 +91,17 @@ v2_urls = [
re_path(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'), re_path(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
re_path(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'), re_path(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
re_path(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'), re_path(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
re_path(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
re_path(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'),
re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
re_path(r'^', include(oauth2_urls)),
re_path(r'^metrics/$', MetricsView.as_view(), name='metrics_view'), re_path(r'^metrics/$', MetricsView.as_view(), name='metrics_view'),
re_path(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), re_path(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
re_path(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), re_path(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
re_path(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), re_path(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'),
re_path(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), re_path(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'),
re_path(r'^auth/$', AuthView.as_view()),
re_path(r'^me/$', UserMeList.as_view(), name='user_me_list'), re_path(r'^me/$', UserMeList.as_view(), name='user_me_list'),
re_path(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), re_path(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
re_path(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'), re_path(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'),
@@ -109,10 +117,7 @@ v2_urls = [
re_path(r'^project_updates/', include(project_update_urls)), re_path(r'^project_updates/', include(project_update_urls)),
re_path(r'^teams/', include(team_urls)), re_path(r'^teams/', include(team_urls)),
re_path(r'^inventories/', include(inventory_urls)), re_path(r'^inventories/', include(inventory_urls)),
re_path(r'^constructed_inventories/', include(constructed_inventory_urls)),
re_path(r'^hosts/', include(host_urls)), re_path(r'^hosts/', include(host_urls)),
re_path(r'^host_metrics/', include(host_metric_urls)),
re_path(r'^host_metric_summary_monthly/$', HostMetricSummaryMonthlyList.as_view(), name='host_metric_summary_monthly_list'),
re_path(r'^groups/', include(group_urls)), re_path(r'^groups/', include(group_urls)),
re_path(r'^inventory_sources/', include(inventory_source_urls)), re_path(r'^inventory_sources/', include(inventory_source_urls)),
re_path(r'^inventory_updates/', include(inventory_update_urls)), re_path(r'^inventory_updates/', include(inventory_update_urls)),
@@ -136,14 +141,11 @@ v2_urls = [
re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'), re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'),
re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'), re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'),
re_path(r'^activity_stream/', include(activity_stream_urls)), re_path(r'^activity_stream/', include(activity_stream_urls)),
re_path(rf'^{AWX_ANALYTICS_API_PREFIX}/', include(analytics_urls)),
re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)), re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)),
re_path(r'^workflow_approvals/', include(workflow_approval_urls)), re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'), re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'), re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'),
re_path(r'^bulk/host_delete/$', BulkHostDeleteView.as_view(), name='bulk_host_delete'),
re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'), re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'),
re_path(r'^receptor_addresses/', include(receptor_address_urls)),
] ]
@@ -153,16 +155,14 @@ urlpatterns = [
re_path(r'^(?P<version>(v2))/', include(v2_urls)), re_path(r'^(?P<version>(v2))/', include(v2_urls)),
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'), re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'), re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
re_path(r'^o/', include(oauth2_root_urls)),
] ]
if MODE == 'development': if MODE == 'development':
# Only include these if we are in the development environment # Only include these if we are in the development environment
from awx.api.swagger import schema_view from awx.api.swagger import SwaggerSchemaView
urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
from awx.api.urls.debug import urls as debug_urls from awx.api.urls.debug import urls as debug_urls
urlpatterns += [re_path(r'^debug/', include(debug_urls))] urlpatterns += [re_path(r'^debug/', include(debug_urls))]
urlpatterns += [
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
]

View File

@@ -14,6 +14,10 @@ from awx.api.views import (
UserRolesList, UserRolesList,
UserActivityStreamList, UserActivityStreamList,
UserAccessList, UserAccessList,
OAuth2ApplicationList,
OAuth2UserTokenList,
UserPersonalTokenList,
UserAuthorizedTokenList,
) )
urls = [ urls = [
@@ -27,6 +31,10 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'), re_path(r'^(?P<pk>[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'),
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'),
re_path(r'^(?P<pk>[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), re_path(r'^(?P<pk>[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'),
re_path(r'^(?P<pk>[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
re_path(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'),
re_path(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'),
re_path(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'),
] ]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -1,11 +1,10 @@
from django.urls import re_path from django.urls import re_path
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver, BitbucketDcWebhookReceiver from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
urlpatterns = [ urlpatterns = [
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'), re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'), re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'), re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
re_path(r'^bitbucket_dc/$', BitbucketDcWebhookReceiver.as_view(), name='webhook_receiver_bitbucket_dc'),
] ]

View File

@@ -2,21 +2,28 @@
# All Rights Reserved. # All Rights Reserved.
from django.conf import settings from django.conf import settings
from django.urls import NoReverseMatch
from rest_framework.reverse import reverse as drf_reverse from rest_framework.reverse import _reverse
from rest_framework.versioning import URLPathVersioning as BaseVersioning from rest_framework.versioning import URLPathVersioning as BaseVersioning
def is_optional_api_urlpattern_prefix_request(request): def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request: """
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"): Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
return True query string parameters.
return False """
scheme = getattr(request, 'versioning_scheme', None)
if scheme is not None:
try:
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
except NoReverseMatch:
# In case the versioning scheme reversal fails, fallback to the
# default implementation
url = _reverse(viewname, args, kwargs, request, format, **extra)
else:
url = _reverse(viewname, args, kwargs, request, format, **extra)
def transform_optional_api_urlpattern_prefix_url(request, url):
if is_optional_api_urlpattern_prefix_request(request):
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
return url return url

View File

@@ -17,6 +17,7 @@ from collections import OrderedDict
from urllib3.exceptions import ConnectTimeoutError from urllib3.exceptions import ConnectTimeoutError
# Django # Django
from django.conf import settings from django.conf import settings
from django.core.exceptions import FieldError, ObjectDoesNotExist from django.core.exceptions import FieldError, ObjectDoesNotExist
@@ -29,14 +30,15 @@ from django.utils.safestring import mark_safe
from django.utils.timezone import now from django.utils.timezone import now
from django.views.decorators.csrf import csrf_exempt from django.views.decorators.csrf import csrf_exempt
from django.template.loader import render_to_string from django.template.loader import render_to_string
from django.http import HttpResponse, HttpResponseRedirect from django.http import HttpResponse
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
# Django REST Framework # Django REST Framework
from rest_framework.exceptions import APIException, PermissionDenied, ParseError, NotFound from rest_framework.exceptions import APIException, PermissionDenied, ParseError, NotFound
from rest_framework.parsers import FormParser from rest_framework.parsers import FormParser
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.renderers import JSONRenderer, StaticHTMLRenderer from rest_framework.renderers import JSONRenderer, StaticHTMLRenderer
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.settings import api_settings from rest_framework.settings import api_settings
@@ -47,20 +49,21 @@ from rest_framework import status
from rest_framework_yaml.parsers import YAMLParser from rest_framework_yaml.parsers import YAMLParser
from rest_framework_yaml.renderers import YAMLRenderer from rest_framework_yaml.renderers import YAMLRenderer
# ansi2html # ANSIConv
from ansi2html import Ansi2HTMLConverter import ansiconv
# Python Social Auth
from social_core.backends.utils import load_backends
# Django OAuth Toolkit
from oauth2_provider.models import get_access_token_model
import pytz import pytz
from wsgiref.util import FileWrapper from wsgiref.util import FileWrapper
# django-ansible-base
from ansible_base.lib.utils.requests import get_remote_hosts
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
# AWX # AWX
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
from awx.main.access import get_user_queryset from awx.main.access import get_user_queryset, HostAccess
from awx.api.generics import ( from awx.api.generics import (
APIView, APIView,
BaseUsersList, BaseUsersList,
@@ -85,7 +88,6 @@ from awx.api.generics import (
from awx.api.views.labels import LabelSubListCreateAttachDetachView from awx.api.views.labels import LabelSubListCreateAttachDetachView
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main import models from awx.main import models
from awx.main.models.rbac import get_role_definition
from awx.main.utils import ( from awx.main.utils import (
camelcase_to_underscore, camelcase_to_underscore,
extract_ansible_vars, extract_ansible_vars,
@@ -97,7 +99,6 @@ from awx.main.utils import (
) )
from awx.main.utils.encryption import encrypt_value from awx.main.utils.encryption import encrypt_value
from awx.main.utils.filters import SmartFilter from awx.main.utils.filters import SmartFilter
from awx.main.utils.plugins import compute_cloud_inventory_sources
from awx.main.redact import UriCleaner from awx.main.redact import UriCleaner
from awx.api.permissions import ( from awx.api.permissions import (
JobTemplateCallbackPermission, JobTemplateCallbackPermission,
@@ -128,10 +129,6 @@ logger = logging.getLogger('awx.api.views')
def unpartitioned_event_horizon(cls): def unpartitioned_event_horizon(cls):
with connection.cursor() as cursor:
cursor.execute(f"SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE table_name = '_unpartitioned_{cls._meta.db_table}';")
if not cursor.fetchone():
return 0
with connection.cursor() as cursor: with connection.cursor() as cursor:
try: try:
cursor.execute(f'SELECT MAX(id) FROM _unpartitioned_{cls._meta.db_table}') cursor.execute(f'SELECT MAX(id) FROM _unpartitioned_{cls._meta.db_table}')
@@ -272,24 +269,16 @@ class DashboardJobsGraphView(APIView):
success_query = user_unified_jobs.filter(status='successful') success_query = user_unified_jobs.filter(status='successful')
failed_query = user_unified_jobs.filter(status='failed') failed_query = user_unified_jobs.filter(status='failed')
canceled_query = user_unified_jobs.filter(status='canceled')
error_query = user_unified_jobs.filter(status='error')
if job_type == 'inv_sync': if job_type == 'inv_sync':
success_query = success_query.filter(instance_of=models.InventoryUpdate) success_query = success_query.filter(instance_of=models.InventoryUpdate)
failed_query = failed_query.filter(instance_of=models.InventoryUpdate) failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
canceled_query = canceled_query.filter(instance_of=models.InventoryUpdate)
error_query = error_query.filter(instance_of=models.InventoryUpdate)
elif job_type == 'playbook_run': elif job_type == 'playbook_run':
success_query = success_query.filter(instance_of=models.Job) success_query = success_query.filter(instance_of=models.Job)
failed_query = failed_query.filter(instance_of=models.Job) failed_query = failed_query.filter(instance_of=models.Job)
canceled_query = canceled_query.filter(instance_of=models.Job)
error_query = error_query.filter(instance_of=models.Job)
elif job_type == 'scm_update': elif job_type == 'scm_update':
success_query = success_query.filter(instance_of=models.ProjectUpdate) success_query = success_query.filter(instance_of=models.ProjectUpdate)
failed_query = failed_query.filter(instance_of=models.ProjectUpdate) failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
canceled_query = canceled_query.filter(instance_of=models.ProjectUpdate)
error_query = error_query.filter(instance_of=models.ProjectUpdate)
end = now() end = now()
interval = 'day' interval = 'day'
@@ -305,12 +294,10 @@ class DashboardJobsGraphView(APIView):
else: else:
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST) return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
dashboard_data = {"jobs": {"successful": [], "failed": [], "canceled": [], "error": []}} dashboard_data = {"jobs": {"successful": [], "failed": []}}
succ_list = dashboard_data['jobs']['successful'] succ_list = dashboard_data['jobs']['successful']
fail_list = dashboard_data['jobs']['failed'] fail_list = dashboard_data['jobs']['failed']
canceled_list = dashboard_data['jobs']['canceled']
error_list = dashboard_data['jobs']['error']
qs_s = ( qs_s = (
success_query.filter(finished__range=(start, end)) success_query.filter(finished__range=(start, end))
@@ -328,22 +315,6 @@ class DashboardJobsGraphView(APIView):
.annotate(agg=Count('id', distinct=True)) .annotate(agg=Count('id', distinct=True))
) )
data_f = {item['d']: item['agg'] for item in qs_f} data_f = {item['d']: item['agg'] for item in qs_f}
qs_c = (
canceled_query.filter(finished__range=(start, end))
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
.order_by()
.values('d')
.annotate(agg=Count('id', distinct=True))
)
data_c = {item['d']: item['agg'] for item in qs_c}
qs_e = (
error_query.filter(finished__range=(start, end))
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
.order_by()
.values('d')
.annotate(agg=Count('id', distinct=True))
)
data_e = {item['d']: item['agg'] for item in qs_e}
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0) start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
for d in itertools.count(): for d in itertools.count():
@@ -352,8 +323,6 @@ class DashboardJobsGraphView(APIView):
break break
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)]) succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)]) fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
canceled_list.append([time.mktime(date.timetuple()), data_c.get(date, 0)])
error_list.append([time.mktime(date.timetuple()), data_e.get(date, 0)])
return Response(dashboard_data) return Response(dashboard_data)
@@ -365,34 +334,25 @@ class InstanceList(ListCreateAPIView):
search_fields = ('hostname',) search_fields = ('hostname',)
ordering = ('id',) ordering = ('id',)
def get_queryset(self):
qs = super().get_queryset().prefetch_related('receptor_addresses')
return qs
class InstanceDetail(RetrieveUpdateAPIView): class InstanceDetail(RetrieveUpdateAPIView):
name = _("Instance Detail") name = _("Instance Detail")
model = models.Instance model = models.Instance
serializer_class = serializers.InstanceSerializer serializer_class = serializers.InstanceSerializer
def get_queryset(self):
qs = super().get_queryset().prefetch_related('receptor_addresses')
return qs
def update_raw_data(self, data): def update_raw_data(self, data):
# these fields are only valid on creation of an instance, so they unwanted on detail view # these fields are only valid on creation of an instance, so they unwanted on detail view
data.pop('listener_port', None)
data.pop('node_type', None) data.pop('node_type', None)
data.pop('hostname', None) data.pop('hostname', None)
data.pop('ip_address', None)
return super(InstanceDetail, self).update_raw_data(data) return super(InstanceDetail, self).update_raw_data(data)
def update(self, request, *args, **kwargs): def update(self, request, *args, **kwargs):
r = super(InstanceDetail, self).update(request, *args, **kwargs) r = super(InstanceDetail, self).update(request, *args, **kwargs)
if status.is_success(r.status_code): if status.is_success(r.status_code):
obj = self.get_object() obj = self.get_object()
capacity_changed = obj.set_capacity_value() obj.set_capacity_value()
if capacity_changed: obj.save(update_fields=['capacity'])
obj.save(update_fields=['capacity'])
r.data = serializers.InstanceSerializer(obj, context=self.get_serializer_context()).to_representation(obj) r.data = serializers.InstanceSerializer(obj, context=self.get_serializer_context()).to_representation(obj)
return r return r
@@ -411,37 +371,13 @@ class InstanceUnifiedJobsList(SubListAPIView):
class InstancePeersList(SubListAPIView): class InstancePeersList(SubListAPIView):
name = _("Peers") name = _("Instance Peers")
model = models.ReceptorAddress
serializer_class = serializers.ReceptorAddressSerializer
parent_model = models.Instance parent_model = models.Instance
model = models.Instance
serializer_class = serializers.InstanceSerializer
parent_access = 'read' parent_access = 'read'
search_fields = {'hostname'}
relationship = 'peers' relationship = 'peers'
search_fields = ('address',)
class InstanceReceptorAddressesList(SubListAPIView):
name = _("Receptor Addresses")
model = models.ReceptorAddress
parent_key = 'instance'
parent_model = models.Instance
serializer_class = serializers.ReceptorAddressSerializer
search_fields = ('address',)
class ReceptorAddressesList(ListAPIView):
name = _("Receptor Addresses")
model = models.ReceptorAddress
serializer_class = serializers.ReceptorAddressSerializer
search_fields = ('address',)
class ReceptorAddressDetail(RetrieveAPIView):
name = _("Receptor Address Detail")
model = models.ReceptorAddress
serializer_class = serializers.ReceptorAddressSerializer
parent_model = models.Instance
relationship = 'receptor_addresses'
class InstanceInstanceGroupsList(InstanceGroupMembershipMixin, SubListCreateAttachDetachAPIView): class InstanceInstanceGroupsList(InstanceGroupMembershipMixin, SubListCreateAttachDetachAPIView):
@@ -536,7 +472,6 @@ class InstanceGroupAccessList(ResourceAccessList):
class InstanceGroupObjectRolesList(SubListAPIView): class InstanceGroupObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.InstanceGroup parent_model = models.InstanceGroup
@@ -631,7 +566,7 @@ class LaunchConfigCredentialsBase(SubListAttachDetachAPIView):
if self.relationship not in ask_mapping: if self.relationship not in ask_mapping:
return {"msg": _("Related template cannot accept {} on launch.").format(self.relationship)} return {"msg": _("Related template cannot accept {} on launch.").format(self.relationship)}
elif sub.passwords_needed: elif sub.passwords_needed:
return {"msg": _("Credential that requires user input on launch cannot be used in saved launch configuration.")} return {"msg": _("Credential that requires user input on launch " "cannot be used in saved launch configuration.")}
ask_field_name = ask_mapping[self.relationship] ask_field_name = ask_mapping[self.relationship]
@@ -671,81 +606,51 @@ class ScheduleUnifiedJobsList(SubListAPIView):
name = _('Schedule Jobs List') name = _('Schedule Jobs List')
def immutablesharedfields(cls): class AuthView(APIView):
''' '''List enabled single-sign-on endpoints'''
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
Works by overriding these view methods: authentication_classes = []
- create permission_classes = (AllowAny,)
- delete swagger_topic = 'System Configuration'
- perform_update
create and delete are overridden to raise a PermissionDenied exception.
perform_update is overridden to check if any shared fields are being modified,
and raise a PermissionDenied exception if so.
'''
# create instead of perform_create because some of our views
# override create instead of perform_create
if hasattr(cls, 'create'):
cls.original_create = cls.create
@functools.wraps(cls.create) def get(self, request):
def create_wrapper(*args, **kwargs): from rest_framework.reverse import reverse
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
return cls.original_create(*args, **kwargs)
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
cls.create = create_wrapper data = OrderedDict()
err_backend, err_message = request.session.get('social_auth_error', (None, None))
if hasattr(cls, 'delete'): auth_backends = list(load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items())
cls.original_delete = cls.delete # Return auth backends in consistent order: Google, GitHub, SAML.
auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0])
@functools.wraps(cls.delete) for name, backend in auth_backends:
def delete_wrapper(*args, **kwargs): login_url = reverse('social:begin', args=(name,))
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT: complete_url = request.build_absolute_uri(reverse('social:complete', args=(name,)))
return cls.original_delete(*args, **kwargs) backend_data = {'login_url': login_url, 'complete_url': complete_url}
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')}) if name == 'saml':
backend_data['metadata_url'] = reverse('sso:saml_metadata')
cls.delete = delete_wrapper for idp in sorted(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.keys()):
saml_backend_data = dict(backend_data.items())
if hasattr(cls, 'perform_update'): saml_backend_data['login_url'] = '%s?idp=%s' % (login_url, idp)
cls.original_perform_update = cls.perform_update full_backend_name = '%s:%s' % (name, idp)
if (err_backend == full_backend_name or err_backend == name) and err_message:
@functools.wraps(cls.perform_update) saml_backend_data['error'] = err_message
def update_wrapper(*args, **kwargs): data[full_backend_name] = saml_backend_data
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT: else:
view, serializer = args if err_backend == name and err_message:
instance = view.get_object() backend_data['error'] = err_message
if instance: data[name] = backend_data
if isinstance(instance, models.Organization): return Response(data)
shared_fields = OrganizationType._declared_fields.keys()
elif isinstance(instance, models.User):
shared_fields = UserType._declared_fields.keys()
elif isinstance(instance, models.Team):
shared_fields = TeamType._declared_fields.keys()
attrs = serializer.validated_data
for field in shared_fields:
if field in attrs and getattr(instance, field) != attrs[field]:
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
return cls.original_perform_update(*args, **kwargs)
cls.perform_update = update_wrapper
return cls
@immutablesharedfields
class TeamList(ListCreateAPIView): class TeamList(ListCreateAPIView):
model = models.Team model = models.Team
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
@immutablesharedfields
class TeamDetail(RetrieveUpdateDestroyAPIView): class TeamDetail(RetrieveUpdateDestroyAPIView):
model = models.Team model = models.Team
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
@immutablesharedfields
class TeamUsersList(BaseUsersList): class TeamUsersList(BaseUsersList):
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
@@ -755,7 +660,6 @@ class TeamUsersList(BaseUsersList):
class TeamRolesList(SubListAttachDetachAPIView): class TeamRolesList(SubListAttachDetachAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializerWithParentAccess serializer_class = serializers.RoleSerializerWithParentAccess
metadata_class = RoleMetadata metadata_class = RoleMetadata
@@ -795,12 +699,10 @@ class TeamRolesList(SubListAttachDetachAPIView):
class TeamObjectRolesList(SubListAPIView): class TeamObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Team parent_model = models.Team
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -818,15 +720,8 @@ class TeamProjectsList(SubListAPIView):
self.check_parent_access(team) self.check_parent_access(team)
model_ct = ContentType.objects.get_for_model(self.model) model_ct = ContentType.objects.get_for_model(self.model)
parent_ct = ContentType.objects.get_for_model(self.parent_model) parent_ct = ContentType.objects.get_for_model(self.parent_model)
proj_roles = models.Role.objects.filter(Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk), content_type=model_ct)
rd = get_role_definition(team.member_role) return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
if role is None:
# Team has no permissions, therefore team has no projects
return self.model.objects.none()
else:
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
class TeamActivityStreamList(SubListAPIView): class TeamActivityStreamList(SubListAPIView):
@@ -841,23 +736,10 @@ class TeamActivityStreamList(SubListAPIView):
self.check_parent_access(parent) self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model) qs = self.request.user.get_queryset(self.model)
return qs.filter( return qs.filter(
Q(team=parent) Q(team=parent)
| Q( | Q(project__in=models.Project.accessible_objects(parent, 'read_role'))
project__in=RoleEvaluation.objects.filter( | Q(credential__in=models.Credential.accessible_objects(parent, 'read_role'))
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Project).id, codename='view_project'
)
.values_list('object_id')
.distinct()
)
| Q(
credential__in=RoleEvaluation.objects.filter(
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Credential).id, codename='view_credential'
)
.values_list('object_id')
.distinct()
)
) )
@@ -913,7 +795,13 @@ class ExecutionEnvironmentActivityStreamList(SubListAPIView):
parent_model = models.ExecutionEnvironment parent_model = models.ExecutionEnvironment
relationship = 'activitystream_set' relationship = 'activitystream_set'
search_fields = ('changes',) search_fields = ('changes',)
filter_read_permission = False
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
return qs.filter(execution_environment=parent)
class ProjectList(ListCreateAPIView): class ProjectList(ListCreateAPIView):
@@ -1109,12 +997,10 @@ class ProjectAccessList(ResourceAccessList):
class ProjectObjectRolesList(SubListAPIView): class ProjectObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Project parent_model = models.Project
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -1127,7 +1013,6 @@ class ProjectCopy(CopyAPIView):
copy_return_serializer_class = serializers.ProjectSerializer copy_return_serializer_class = serializers.ProjectSerializer
@immutablesharedfields
class UserList(ListCreateAPIView): class UserList(ListCreateAPIView):
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
@@ -1145,6 +1030,121 @@ class UserMeList(ListAPIView):
return self.model.objects.filter(pk=self.request.user.pk) return self.model.objects.filter(pk=self.request.user.pk)
class OAuth2ApplicationList(ListCreateAPIView):
name = _("OAuth 2 Applications")
model = models.OAuth2Application
serializer_class = serializers.OAuth2ApplicationSerializer
swagger_topic = 'Authentication'
class OAuth2ApplicationDetail(RetrieveUpdateDestroyAPIView):
name = _("OAuth 2 Application Detail")
model = models.OAuth2Application
serializer_class = serializers.OAuth2ApplicationSerializer
swagger_topic = 'Authentication'
def update_raw_data(self, data):
data.pop('client_secret', None)
return super(OAuth2ApplicationDetail, self).update_raw_data(data)
class ApplicationOAuth2TokenList(SubListCreateAPIView):
name = _("OAuth 2 Application Tokens")
model = models.OAuth2AccessToken
serializer_class = serializers.OAuth2TokenSerializer
parent_model = models.OAuth2Application
relationship = 'oauth2accesstoken_set'
parent_key = 'application'
swagger_topic = 'Authentication'
class OAuth2ApplicationActivityStreamList(SubListAPIView):
model = models.ActivityStream
serializer_class = serializers.ActivityStreamSerializer
parent_model = models.OAuth2Application
relationship = 'activitystream_set'
swagger_topic = 'Authentication'
search_fields = ('changes',)
class OAuth2TokenList(ListCreateAPIView):
name = _("OAuth2 Tokens")
model = models.OAuth2AccessToken
serializer_class = serializers.OAuth2TokenSerializer
swagger_topic = 'Authentication'
class OAuth2UserTokenList(SubListCreateAPIView):
name = _("OAuth2 User Tokens")
model = models.OAuth2AccessToken
serializer_class = serializers.OAuth2TokenSerializer
parent_model = models.User
relationship = 'main_oauth2accesstoken'
parent_key = 'user'
swagger_topic = 'Authentication'
class UserAuthorizedTokenList(SubListCreateAPIView):
name = _("OAuth2 User Authorized Access Tokens")
model = models.OAuth2AccessToken
serializer_class = serializers.UserAuthorizedTokenSerializer
parent_model = models.User
relationship = 'oauth2accesstoken_set'
parent_key = 'user'
swagger_topic = 'Authentication'
def get_queryset(self):
return get_access_token_model().objects.filter(application__isnull=False, user=self.request.user)
class OrganizationApplicationList(SubListCreateAPIView):
name = _("Organization OAuth2 Applications")
model = models.OAuth2Application
serializer_class = serializers.OAuth2ApplicationSerializer
parent_model = models.Organization
relationship = 'applications'
parent_key = 'organization'
swagger_topic = 'Authentication'
class UserPersonalTokenList(SubListCreateAPIView):
name = _("OAuth2 Personal Access Tokens")
model = models.OAuth2AccessToken
serializer_class = serializers.UserPersonalTokenSerializer
parent_model = models.User
relationship = 'main_oauth2accesstoken'
parent_key = 'user'
swagger_topic = 'Authentication'
def get_queryset(self):
return get_access_token_model().objects.filter(application__isnull=True, user=self.request.user)
class OAuth2TokenDetail(RetrieveUpdateDestroyAPIView):
name = _("OAuth Token Detail")
model = models.OAuth2AccessToken
serializer_class = serializers.OAuth2TokenDetailSerializer
swagger_topic = 'Authentication'
class OAuth2TokenActivityStreamList(SubListAPIView):
model = models.ActivityStream
serializer_class = serializers.ActivityStreamSerializer
parent_model = models.OAuth2AccessToken
relationship = 'activitystream_set'
swagger_topic = 'Authentication'
search_fields = ('changes',)
class UserTeamsList(SubListAPIView): class UserTeamsList(SubListAPIView):
model = models.Team model = models.Team
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
@@ -1158,7 +1158,6 @@ class UserTeamsList(SubListAPIView):
class UserRolesList(SubListAttachDetachAPIView): class UserRolesList(SubListAttachDetachAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializerWithParentAccess serializer_class = serializers.RoleSerializerWithParentAccess
metadata_class = RoleMetadata metadata_class = RoleMetadata
@@ -1183,16 +1182,7 @@ class UserRolesList(SubListAttachDetachAPIView):
user = get_object_or_400(models.User, pk=self.kwargs['pk']) user = get_object_or_400(models.User, pk=self.kwargs['pk'])
role = get_object_or_400(models.Role, pk=sub_id) role = get_object_or_400(models.Role, pk=sub_id)
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type} credential_content_type = ContentType.objects.get_for_model(models.Credential)
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
for model in [models.Organization, models.Team]:
ct = content_types[model]
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
return Response(data, status=status.HTTP_403_FORBIDDEN)
credential_content_type = content_types[models.Credential]
if role.content_type == credential_content_type: if role.content_type == credential_content_type:
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role: if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization")) data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
@@ -1264,7 +1254,6 @@ class UserActivityStreamList(SubListAPIView):
return qs.filter(Q(actor=parent) | Q(user__in=[parent])) return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
@immutablesharedfields
class UserDetail(RetrieveUpdateDestroyAPIView): class UserDetail(RetrieveUpdateDestroyAPIView):
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
@@ -1410,7 +1399,7 @@ class OrganizationCredentialList(SubListCreateAPIView):
self.check_parent_access(organization) self.check_parent_access(organization)
user_visible = models.Credential.accessible_objects(self.request.user, 'read_role').all() user_visible = models.Credential.accessible_objects(self.request.user, 'read_role').all()
org_set = models.Credential.objects.filter(organization=organization) org_set = models.Credential.accessible_objects(organization.admin_role, 'read_role').all()
if self.request.user.is_superuser or self.request.user.is_system_auditor: if self.request.user.is_superuser or self.request.user.is_system_auditor:
return org_set return org_set
@@ -1443,12 +1432,10 @@ class CredentialAccessList(ResourceAccessList):
class CredentialObjectRolesList(SubListAPIView): class CredentialObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Credential parent_model = models.Credential
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -1561,40 +1548,6 @@ class HostRelatedSearchMixin(object):
return ret return ret
class HostMetricList(ListAPIView):
name = _("Host Metrics List")
model = models.HostMetric
serializer_class = serializers.HostMetricSerializer
permission_classes = (IsSystemAdminOrAuditor,)
search_fields = ('hostname', 'deleted')
def get_queryset(self):
return self.model.objects.all()
class HostMetricDetail(RetrieveDestroyAPIView):
name = _("Host Metric Detail")
model = models.HostMetric
serializer_class = serializers.HostMetricSerializer
permission_classes = (IsSystemAdminOrAuditor,)
def delete(self, request, *args, **kwargs):
self.get_object().soft_delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class HostMetricSummaryMonthlyList(ListAPIView):
name = _("Host Metrics Summary Monthly")
model = models.HostMetricSummaryMonthly
serializer_class = serializers.HostMetricSummaryMonthlySerializer
permission_classes = (IsSystemAdminOrAuditor,)
search_fields = ('date',)
def get_queryset(self):
return self.model.objects.all()
class HostList(HostRelatedSearchMixin, ListCreateAPIView): class HostList(HostRelatedSearchMixin, ListCreateAPIView):
always_allow_superuser = False always_allow_superuser = False
model = models.Host model = models.Host
@@ -1623,8 +1576,6 @@ class HostDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
def delete(self, request, *args, **kwargs): def delete(self, request, *args, **kwargs):
if self.get_object().inventory.pending_deletion: if self.get_object().inventory.pending_deletion:
return Response({"error": _("The inventory for this host is already being deleted.")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("The inventory for this host is already being deleted.")}, status=status.HTTP_400_BAD_REQUEST)
if self.get_object().inventory.kind == 'constructed':
return Response({"error": _("Delete constructed inventory hosts from input inventory.")}, status=status.HTTP_400_BAD_REQUEST)
return super(HostDetail, self).delete(request, *args, **kwargs) return super(HostDetail, self).delete(request, *args, **kwargs)
@@ -1632,14 +1583,6 @@ class HostAnsibleFactsDetail(RetrieveAPIView):
model = models.Host model = models.Host
serializer_class = serializers.AnsibleFactsSerializer serializer_class = serializers.AnsibleFactsSerializer
def get(self, request, *args, **kwargs):
obj = self.get_object()
if obj.inventory.kind == 'constructed':
# If this is a constructed inventory host, it is not the source of truth about facts
# redirect to the original input inventory host instead
return HttpResponseRedirect(reverse('api:host_ansible_facts_detail', kwargs={'pk': obj.instance_id}, request=self.request))
return super().get(request, *args, **kwargs)
class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIView): class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIView):
model = models.Host model = models.Host
@@ -1647,7 +1590,13 @@ class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIVie
parent_model = models.Inventory parent_model = models.Inventory
relationship = 'hosts' relationship = 'hosts'
parent_key = 'inventory' parent_key = 'inventory'
filter_read_permission = False
def get_queryset(self):
inventory = self.get_parent_object()
qs = getattrd(inventory, self.relationship).all()
# Apply queryset optimizations
qs = qs.select_related(*HostAccess.select_related).prefetch_related(*HostAccess.prefetch_related)
return qs
class HostGroupsList(SubListCreateAttachDetachAPIView): class HostGroupsList(SubListCreateAttachDetachAPIView):
@@ -2079,9 +2028,9 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
parent = self.get_parent_object() parent = self.get_parent_object()
if parent.source not in compute_cloud_inventory_sources(): if parent.source not in models.CLOUD_INVENTORY_SOURCES:
return Response( return Response(
dict(msg=_("Notification Templates can only be assigned when source is one of {}.").format(compute_cloud_inventory_sources(), parent.source)), dict(msg=_("Notification Templates can only be assigned when source is one of {}.").format(models.CLOUD_INVENTORY_SOURCES, parent.source)),
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
return super(InventorySourceNotificationTemplatesAnyList, self).post(request, *args, **kwargs) return super(InventorySourceNotificationTemplatesAnyList, self).post(request, *args, **kwargs)
@@ -2235,16 +2184,12 @@ class JobTemplateList(ListCreateAPIView):
serializer_class = serializers.JobTemplateSerializer serializer_class = serializers.JobTemplateSerializer
always_allow_superuser = False always_allow_superuser = False
def check_permissions(self, request): def post(self, request, *args, **kwargs):
if request.method == 'POST': ret = super(JobTemplateList, self).post(request, *args, **kwargs)
if request.user.is_anonymous: if ret.status_code == 201:
self.permission_denied(request) job_template = models.JobTemplate.objects.get(id=ret.data['id'])
else: job_template.admin_role.members.add(request.user)
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data) return ret
if not can_access:
self.permission_denied(request, message=messages)
super(JobTemplateList, self).check_permissions(request)
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
@@ -2524,7 +2469,7 @@ class JobTemplateSurveySpec(GenericAPIView):
return Response( return Response(
dict( dict(
error=_( error=_(
"$encrypted$ is a reserved keyword for password question defaults, survey question {idx} is type {survey_item[type]}." "$encrypted$ is a reserved keyword for password question defaults, " "survey question {idx} is type {survey_item[type]}."
).format(**context) ).format(**context)
), ),
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
@@ -2592,7 +2537,16 @@ class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView):
serializer_class = serializers.CredentialSerializer serializer_class = serializers.CredentialSerializer
parent_model = models.JobTemplate parent_model = models.JobTemplate
relationship = 'credentials' relationship = 'credentials'
filter_read_permission = False
def get_queryset(self):
# Return the full list of credentials
parent = self.get_parent_object()
self.check_parent_access(parent)
sublist_qs = getattrd(parent, self.relationship)
sublist_qs = sublist_qs.prefetch_related(
'created_by', 'modified_by', 'admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members'
)
return sublist_qs
def is_valid_relation(self, parent, sub, created=False): def is_valid_relation(self, parent, sub, created=False):
if sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]: if sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]:
@@ -2625,7 +2579,12 @@ class JobTemplateCallback(GenericAPIView):
host for the current request. host for the current request.
""" """
# Find the list of remote host names/IPs to check. # Find the list of remote host names/IPs to check.
remote_hosts = set(get_remote_hosts(self.request)) remote_hosts = set()
for header in settings.REMOTE_HOST_HEADERS:
for value in self.request.META.get(header, '').split(','):
value = value.strip()
if value:
remote_hosts.add(value)
# Add the reverse lookup of IP addresses. # Add the reverse lookup of IP addresses.
for rh in list(remote_hosts): for rh in list(remote_hosts):
try: try:
@@ -2689,10 +2648,7 @@ class JobTemplateCallback(GenericAPIView):
# Permission class should have already validated host_config_key. # Permission class should have already validated host_config_key.
job_template = self.get_object() job_template = self.get_object()
# Attempt to find matching hosts based on remote address. # Attempt to find matching hosts based on remote address.
if job_template.inventory: matching_hosts = self.find_matching_hosts()
matching_hosts = self.find_matching_hosts()
else:
return Response({"msg": _("Cannot start automatically, an inventory is required.")}, status=status.HTTP_400_BAD_REQUEST)
# If the host is not found, update the inventory before trying to # If the host is not found, update the inventory before trying to
# match again. # match again.
inventory_sources_already_updated = [] inventory_sources_already_updated = []
@@ -2777,7 +2733,6 @@ class JobTemplateInstanceGroupsList(SubListAttachDetachAPIView):
serializer_class = serializers.InstanceGroupSerializer serializer_class = serializers.InstanceGroupSerializer
parent_model = models.JobTemplate parent_model = models.JobTemplate
relationship = 'instance_groups' relationship = 'instance_groups'
filter_read_permission = False
class JobTemplateAccessList(ResourceAccessList): class JobTemplateAccessList(ResourceAccessList):
@@ -2786,12 +2741,10 @@ class JobTemplateAccessList(ResourceAccessList):
class JobTemplateObjectRolesList(SubListAPIView): class JobTemplateObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.JobTemplate parent_model = models.JobTemplate
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -2870,7 +2823,16 @@ class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, Su
relationship = '' relationship = ''
enforce_parent_relationship = 'workflow_job_template' enforce_parent_relationship = 'workflow_job_template'
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
filter_read_permission = False
'''
Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by
'relationship'
'''
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).all()
def is_valid_relation(self, parent, sub, created=False): def is_valid_relation(self, parent, sub, created=False):
if created: if created:
@@ -2945,7 +2907,14 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
parent_model = models.WorkflowJobNode parent_model = models.WorkflowJobNode
relationship = '' relationship = ''
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
filter_read_permission = False
#
# Limit the set of WorkflowJobNodes to the related nodes of specified by self.relationship
#
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).all()
class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList): class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList):
@@ -2965,17 +2934,6 @@ class WorkflowJobTemplateList(ListCreateAPIView):
serializer_class = serializers.WorkflowJobTemplateSerializer serializer_class = serializers.WorkflowJobTemplateSerializer
always_allow_superuser = False always_allow_superuser = False
def check_permissions(self, request):
if request.method == 'POST':
if request.user.is_anonymous:
self.permission_denied(request)
else:
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
if not can_access:
self.permission_denied(request, message=messages)
super(WorkflowJobTemplateList, self).check_permissions(request)
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = models.WorkflowJobTemplate model = models.WorkflowJobTemplate
@@ -3135,8 +3093,11 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
relationship = 'workflow_job_template_nodes' relationship = 'workflow_job_template_nodes'
parent_key = 'workflow_job_template' parent_key = 'workflow_job_template'
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
ordering = ('id',) # assure ordering by id for consistency
filter_read_permission = False def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobTemplateJobsList(SubListAPIView): class WorkflowJobTemplateJobsList(SubListAPIView):
@@ -3185,12 +3146,10 @@ class WorkflowJobTemplateAccessList(ResourceAccessList):
class WorkflowJobTemplateObjectRolesList(SubListAPIView): class WorkflowJobTemplateObjectRolesList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.WorkflowJobTemplate parent_model = models.WorkflowJobTemplate
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -3230,8 +3189,11 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
relationship = 'workflow_job_nodes' relationship = 'workflow_job_nodes'
parent_key = 'workflow_job' parent_key = 'workflow_job'
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
ordering = ('id',) # assure ordering by id for consistency
filter_read_permission = False def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobCancel(GenericCancelView): class WorkflowJobCancel(GenericCancelView):
@@ -3366,6 +3328,7 @@ class JobLabelList(SubListAPIView):
serializer_class = serializers.LabelSerializer serializer_class = serializers.LabelSerializer
parent_model = models.Job parent_model = models.Job
relationship = 'labels' relationship = 'labels'
parent_key = 'job'
class WorkflowJobLabelList(JobLabelList): class WorkflowJobLabelList(JobLabelList):
@@ -3435,7 +3398,6 @@ class JobRelaunch(RetrieveAPIView):
copy_kwargs = {} copy_kwargs = {}
retry_hosts = serializer.validated_data.get('hosts', None) retry_hosts = serializer.validated_data.get('hosts', None)
job_type = serializer.validated_data.get('job_type', None)
if retry_hosts and retry_hosts != 'all': if retry_hosts and retry_hosts != 'all':
if obj.status in ACTIVE_STATES: if obj.status in ACTIVE_STATES:
return Response( return Response(
@@ -3456,8 +3418,6 @@ class JobRelaunch(RetrieveAPIView):
) )
copy_kwargs['limit'] = ','.join(retry_host_list) copy_kwargs['limit'] = ','.join(retry_host_list)
if job_type:
copy_kwargs['job_type'] = job_type
new_job = obj.copy_unified_job(**copy_kwargs) new_job = obj.copy_unified_job(**copy_kwargs)
result = new_job.signal_start(**serializer.validated_data['credential_passwords']) result = new_job.signal_start(**serializer.validated_data['credential_passwords'])
if not result: if not result:
@@ -3547,7 +3507,11 @@ class BaseJobHostSummariesList(SubListAPIView):
relationship = 'job_host_summaries' relationship = 'job_host_summaries'
name = _('Job Host Summaries List') name = _('Job Host Summaries List')
search_fields = ('host_name',) search_fields = ('host_name',)
filter_read_permission = False
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).select_related('job', 'job__job_template', 'host')
class HostJobHostSummariesList(BaseJobHostSummariesList): class HostJobHostSummariesList(BaseJobHostSummariesList):
@@ -4057,8 +4021,7 @@ class UnifiedJobStdout(RetrieveAPIView):
# Remove any ANSI escape sequences containing job event data. # Remove any ANSI escape sequences containing job event data.
content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content) content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content)
conv = Ansi2HTMLConverter() body = ansiconv.to_html(html.escape(content))
body = conv.convert(html.escape(content))
context = {'title': get_view_name(self.__class__), 'body': mark_safe(body), 'dark': dark_bg, 'content_only': content_only} context = {'title': get_view_name(self.__class__), 'body': mark_safe(body), 'dark': dark_bg, 'content_only': content_only}
data = render_to_string('api/stdout.html', context).strip() data = render_to_string('api/stdout.html', context).strip()
@@ -4092,7 +4055,7 @@ class UnifiedJobStdout(RetrieveAPIView):
return super(UnifiedJobStdout, self).retrieve(request, *args, **kwargs) return super(UnifiedJobStdout, self).retrieve(request, *args, **kwargs)
except models.StdoutMaxBytesExceeded as e: except models.StdoutMaxBytesExceeded as e:
response_message = _( response_message = _(
"Standard Output too large to display ({text_size} bytes), only download supported for sizes over {supported_size} bytes." "Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes."
).format(text_size=e.total, supported_size=e.supported) ).format(text_size=e.total, supported_size=e.supported)
if request.accepted_renderer.format == 'json': if request.accepted_renderer.format == 'json':
return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message}) return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message})
@@ -4203,7 +4166,6 @@ class ActivityStreamDetail(RetrieveAPIView):
class RoleList(ListAPIView): class RoleList(ListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
permission_classes = (IsAuthenticated,) permission_classes = (IsAuthenticated,)
@@ -4211,13 +4173,11 @@ class RoleList(ListAPIView):
class RoleDetail(RetrieveAPIView): class RoleDetail(RetrieveAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
class RoleUsersList(SubListAttachDetachAPIView): class RoleUsersList(SubListAttachDetachAPIView):
deprecated = True
model = models.User model = models.User
serializer_class = serializers.UserSerializer serializer_class = serializers.UserSerializer
parent_model = models.Role parent_model = models.Role
@@ -4238,15 +4198,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
user = get_object_or_400(models.User, pk=sub_id) user = get_object_or_400(models.User, pk=sub_id)
role = self.get_parent_object() role = self.get_parent_object()
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type} credential_content_type = ContentType.objects.get_for_model(models.Credential)
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
for model in [models.Organization, models.Team]:
ct = content_types[model]
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
return Response(data, status=status.HTTP_403_FORBIDDEN)
credential_content_type = content_types[models.Credential]
if role.content_type == credential_content_type: if role.content_type == credential_content_type:
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role: if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization")) data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
@@ -4260,7 +4212,6 @@ class RoleUsersList(SubListAttachDetachAPIView):
class RoleTeamsList(SubListAttachDetachAPIView): class RoleTeamsList(SubListAttachDetachAPIView):
deprecated = True
model = models.Team model = models.Team
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
parent_model = models.Role parent_model = models.Role
@@ -4305,12 +4256,10 @@ class RoleTeamsList(SubListAttachDetachAPIView):
team.member_role.children.remove(role) team.member_role.children.remove(role)
else: else:
team.member_role.children.add(role) team.member_role.children.add(role)
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class RoleParentsList(SubListAPIView): class RoleParentsList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Role parent_model = models.Role
@@ -4324,7 +4273,6 @@ class RoleParentsList(SubListAPIView):
class RoleChildrenList(SubListAPIView): class RoleChildrenList(SubListAPIView):
deprecated = True
model = models.Role model = models.Role
serializer_class = serializers.RoleSerializer serializer_class = serializers.RoleSerializer
parent_model = models.Role parent_model = models.Role

View File

@@ -1,317 +0,0 @@
import requests
import logging
import urllib.parse as urlparse
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from django.utils import translation
from awx.api.generics import APIView, Response
from awx.api.permissions import AnalyticsPermission
from awx.api.versioning import reverse
from awx.main.utils import get_awx_version
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
from rest_framework import status
from collections import OrderedDict
AUTOMATION_ANALYTICS_API_URL_PATH = "/api/tower-analytics/v1"
AWX_ANALYTICS_API_PREFIX = 'analytics'
ERROR_UPLOAD_NOT_ENABLED = "analytics-upload-not-enabled"
ERROR_MISSING_URL = "missing-url"
ERROR_MISSING_USER = "missing-user"
ERROR_MISSING_PASSWORD = "missing-password"
ERROR_NO_DATA_OR_ENTITLEMENT = "no-data-or-entitlement"
ERROR_NOT_FOUND = "not-found"
ERROR_UNAUTHORIZED = "unauthorized"
ERROR_UNKNOWN = "unknown"
ERROR_UNSUPPORTED_METHOD = "unsupported-method"
logger = logging.getLogger('awx.api.views.analytics')
class MissingSettings(Exception):
"""Settings are not correct Exception"""
pass
class GetNotAllowedMixin(object):
def get(self, request, format=None):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
class AnalyticsRootView(APIView):
permission_classes = (AnalyticsPermission,)
name = _('Automation Analytics')
swagger_topic = 'Automation Analytics'
def get(self, request, format=None):
data = OrderedDict()
data['authorized'] = reverse('api:analytics_authorized', request=request)
data['reports'] = reverse('api:analytics_reports_list', request=request)
data['report_options'] = reverse('api:analytics_report_options_list', request=request)
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request)
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request)
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request)
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request)
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request)
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request)
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request)
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request)
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request)
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request)
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request)
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request)
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request)
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request)
return Response(data)
class AnalyticsGenericView(APIView):
"""
Example:
headers = {
'Content-Type': 'application/json',
}
params = {
'limit': '20',
'offset': '0',
'sort_by': 'name:asc',
}
json_data = {
'limit': '20',
'offset': '0',
'sort_options': 'name',
'sort_order': 'asc',
'tags': [],
'slug': [],
'name': [],
'description': '',
}
response = requests.post(f'{AUTOMATION_ANALYTICS_API_URL}/reports/', params=params,
headers=headers, json=json_data)
return Response(response.json(), status=response.status_code)
"""
permission_classes = (AnalyticsPermission,)
@staticmethod
def _request_headers(request):
headers = {}
for header in ['Content-Type', 'Content-Length', 'Accept-Encoding', 'User-Agent', 'Accept']:
if request.headers.get(header, None):
headers[header] = request.headers.get(header)
headers['X-Rh-Analytics-Source'] = 'controller'
headers['X-Rh-Analytics-Source-Version'] = get_awx_version()
headers['Accept-Language'] = translation.get_language()
return headers
@staticmethod
def _get_analytics_path(request_path):
parts = request_path.split(f'{AWX_ANALYTICS_API_PREFIX}/')
path_specific = parts[-1]
return f"{AUTOMATION_ANALYTICS_API_URL_PATH}/{path_specific}"
def _get_analytics_url(self, request_path):
analytics_path = self._get_analytics_path(request_path)
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
if not url:
raise MissingSettings(ERROR_MISSING_URL)
url_parts = urlparse.urlsplit(url)
analytics_url = urlparse.urlunsplit([url_parts.scheme, url_parts.netloc, analytics_path, url_parts.query, url_parts.fragment])
return analytics_url
@staticmethod
def _get_setting(setting_name, default, error_message):
setting = getattr(settings, setting_name, default)
if not setting:
raise MissingSettings(error_message)
return setting
@staticmethod
def _error_response(keyword, message=None, remote=True, remote_status_code=None, status_code=status.HTTP_403_FORBIDDEN):
text = {"error": {"remote": remote, "remote_status": remote_status_code, "keyword": keyword}}
if message:
text["error"]["message"] = message
return Response(text, status=status_code)
def _error_response_404(self, response):
try:
json_response = response.json()
# Subscription/entitlement problem or missing tenant data in AA db => HTTP 403
message = json_response.get('error', None)
if message:
return self._error_response(ERROR_NO_DATA_OR_ENTITLEMENT, message, remote=True, remote_status_code=response.status_code)
# Standard 404 problem => HTTP 404
message = json_response.get('detail', None) or response.text
except requests.exceptions.JSONDecodeError:
# Unexpected text => still HTTP 404
message = response.text
return self._error_response(ERROR_NOT_FOUND, message, remote=True, remote_status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND)
@staticmethod
def _update_response_links(json_response):
if not json_response.get('links', None):
return
for key, value in json_response['links'].items():
if value:
json_response['links'][key] = value.replace(AUTOMATION_ANALYTICS_API_URL_PATH, f"/api/v2/{AWX_ANALYTICS_API_PREFIX}")
def _forward_response(self, response):
try:
content_type = response.headers.get('content-type', '')
if content_type.find('application/json') != -1:
json_response = response.json()
self._update_response_links(json_response)
return Response(json_response, status=response.status_code)
except Exception as e:
logger.error(f"Analytics API: Response error: {e}")
return Response(response.content, status=response.status_code)
@staticmethod
def _base_auth_request(request: requests.Request, method: str, url: str, user: str, pw: str, headers: dict[str, str]) -> requests.Response:
response = requests.request(
method,
url,
auth=(user, pw),
verify=settings.INSIGHTS_CERT_PATH,
params=getattr(request, 'query_params', {}),
headers=headers,
json=getattr(request, 'data', {}),
timeout=(31, 31),
)
return response
def _send_to_analytics(self, request, method):
try:
headers = self._request_headers(request)
self._get_setting('INSIGHTS_TRACKING_STATE', False, ERROR_UPLOAD_NOT_ENABLED)
if method not in ["GET", "POST", "OPTIONS"]:
return self._error_response(ERROR_UNSUPPORTED_METHOD, method, remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
url = self._get_analytics_url(request.path)
try:
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
response = client.make_request(
method,
url,
headers=headers,
verify=settings.INSIGHTS_CERT_PATH,
params=getattr(request, 'query_params', {}),
json=getattr(request, 'data', {}),
timeout=(31, 31),
)
except requests.RequestException:
logger.error("Automation Analytics API request failed, trying base auth method")
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
except MissingSettings:
rh_user = self._get_setting('SUBSCRIPTIONS_USERNAME', None, ERROR_MISSING_USER)
rh_password = self._get_setting('SUBSCRIPTIONS_PASSWORD', None, ERROR_MISSING_PASSWORD)
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
#
# Missing or wrong user/pass
#
if response.status_code == status.HTTP_401_UNAUTHORIZED:
text = (response.text or '').rstrip("\n")
return self._error_response(ERROR_UNAUTHORIZED, text, remote=True, remote_status_code=response.status_code)
#
# Not found, No entitlement or No data in Analytics
#
elif response.status_code == status.HTTP_404_NOT_FOUND:
return self._error_response_404(response)
#
# Success or not a 401/404 errors are just forwarded
#
else:
return self._forward_response(response)
except MissingSettings as e:
logger.warning(f"Analytics API: Setting missing: {e.args[0]}")
return self._error_response(e.args[0], remote=False)
except requests.exceptions.RequestException as e:
logger.error(f"Analytics API: Request error: {e}")
return self._error_response(ERROR_UNKNOWN, str(e), remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
logger.error(f"Analytics API: Error: {e}")
return self._error_response(ERROR_UNKNOWN, str(e), remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
class AnalyticsGenericListView(AnalyticsGenericView):
def get(self, request, format=None):
return self._send_to_analytics(request, method="GET")
def post(self, request, format=None):
return self._send_to_analytics(request, method="POST")
def options(self, request, format=None):
return self._send_to_analytics(request, method="OPTIONS")
class AnalyticsGenericDetailView(AnalyticsGenericView):
def get(self, request, slug, format=None):
return self._send_to_analytics(request, method="GET")
def post(self, request, slug, format=None):
return self._send_to_analytics(request, method="POST")
def options(self, request, slug, format=None):
return self._send_to_analytics(request, method="OPTIONS")
class AnalyticsAuthorizedView(AnalyticsGenericListView):
name = _("Authorized")
class AnalyticsReportsList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Reports")
swagger_topic = "Automation Analytics"
class AnalyticsReportDetail(AnalyticsGenericDetailView):
name = _("Report")
class AnalyticsReportOptionsList(AnalyticsGenericListView):
name = _("Report Options")
class AnalyticsAdoptionRateList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Adoption Rate")
class AnalyticsEventExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Event Explorer")
class AnalyticsHostExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Host Explorer")
class AnalyticsJobExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Job Explorer")
class AnalyticsProbeTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Probe Templates")
class AnalyticsProbeTemplateForHostsList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Probe Template For Hosts")
class AnalyticsRoiTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("ROI Templates")

View File

@@ -1,7 +1,5 @@
from collections import OrderedDict from collections import OrderedDict
from django.utils.translation import gettext_lazy as _
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer from rest_framework.renderers import JSONRenderer
from rest_framework.reverse import reverse from rest_framework.reverse import reverse
@@ -20,9 +18,6 @@ from awx.api import (
class BulkView(APIView): class BulkView(APIView):
name = _('Bulk')
swagger_topic = 'Bulk'
permission_classes = [IsAuthenticated] permission_classes = [IsAuthenticated]
renderer_classes = [ renderer_classes = [
renderers.BrowsableAPIRenderer, renderers.BrowsableAPIRenderer,
@@ -34,7 +29,6 @@ class BulkView(APIView):
'''List top level resources''' '''List top level resources'''
data = OrderedDict() data = OrderedDict()
data['host_create'] = reverse('api:bulk_host_create', request=request) data['host_create'] = reverse('api:bulk_host_create', request=request)
data['host_delete'] = reverse('api:bulk_host_delete', request=request)
data['job_launch'] = reverse('api:bulk_job_launch', request=request) data['job_launch'] = reverse('api:bulk_job_launch', request=request)
return Response(data) return Response(data)
@@ -73,20 +67,3 @@ class BulkHostCreateView(GenericAPIView):
result = serializer.create(serializer.validated_data) result = serializer.create(serializer.validated_data)
return Response(result, status=status.HTTP_201_CREATED) return Response(result, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class BulkHostDeleteView(GenericAPIView):
permission_classes = [IsAuthenticated]
model = Host
serializer_class = serializers.BulkHostDeleteSerializer
allowed_methods = ['GET', 'POST', 'OPTIONS']
def get(self, request):
return Response({"detail": "Bulk delete hosts with this endpoint"}, status=status.HTTP_200_OK)
def post(self, request):
serializer = serializers.BulkHostDeleteSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
result = serializer.delete(serializer.validated_data)
return Response(result, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -6,8 +6,6 @@ import io
import ipaddress import ipaddress
import os import os
import tarfile import tarfile
import time
import re
import asn1 import asn1
from awx.api import serializers from awx.api import serializers
@@ -42,8 +40,6 @@ RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
# │ │ └── receptor.key # │ │ └── receptor.key
# │ └── work-public-key.pem # │ └── work-public-key.pem
# └── requirements.yml # └── requirements.yml
class InstanceInstallBundle(GenericAPIView): class InstanceInstallBundle(GenericAPIView):
name = _('Install Bundle') name = _('Install Bundle')
model = models.Instance model = models.Instance
@@ -53,54 +49,56 @@ class InstanceInstallBundle(GenericAPIView):
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
instance_obj = self.get_object() instance_obj = self.get_object()
if instance_obj.node_type not in ('execution', 'hop'): if instance_obj.node_type not in ('execution',):
return Response( return Response(
data=dict(msg=_('Install bundle can only be generated for execution or hop nodes.')), data=dict(msg=_('Install bundle can only be generated for execution nodes.')),
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
with io.BytesIO() as f: with io.BytesIO() as f:
with tarfile.open(fileobj=f, mode='w:gz') as tar: with tarfile.open(fileobj=f, mode='w:gz') as tar:
# copy /etc/receptor/tls/ca/mesh-CA.crt to receptor/tls/ca in the tar file # copy /etc/receptor/tls/ca/receptor-ca.crt to receptor/tls/ca in the tar file
tar.add(os.path.realpath('/etc/receptor/tls/ca/mesh-CA.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/mesh-CA.crt") tar.add(
os.path.realpath('/etc/receptor/tls/ca/receptor-ca.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/receptor-ca.crt"
)
# copy /etc/receptor/work_public_key.pem to receptor/work_public_key.pem # copy /etc/receptor/signing/work-public-key.pem to receptor/work-public-key.pem
tar.add('/etc/receptor/work_public_key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work_public_key.pem") tar.add('/etc/receptor/signing/work-public-key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work-public-key.pem")
# generate and write the receptor key to receptor/tls/receptor.key in the tar file # generate and write the receptor key to receptor/tls/receptor.key in the tar file
key, cert = generate_receptor_tls(instance_obj) key, cert = generate_receptor_tls(instance_obj)
def tar_addfile(tarinfo, filecontent):
tarinfo.mtime = time.time()
tarinfo.size = len(filecontent)
tar.addfile(tarinfo, io.BytesIO(filecontent))
key_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.key") key_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.key")
tar_addfile(key_tarinfo, key) key_tarinfo.size = len(key)
tar.addfile(key_tarinfo, io.BytesIO(key))
cert_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.crt") cert_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.crt")
cert_tarinfo.size = len(cert) cert_tarinfo.size = len(cert)
tar_addfile(cert_tarinfo, cert) tar.addfile(cert_tarinfo, io.BytesIO(cert))
# generate and write install_receptor.yml to the tar file # generate and write install_receptor.yml to the tar file
playbook = generate_playbook(instance_obj).encode('utf-8') playbook = generate_playbook().encode('utf-8')
playbook_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/install_receptor.yml") playbook_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/install_receptor.yml")
tar_addfile(playbook_tarinfo, playbook) playbook_tarinfo.size = len(playbook)
tar.addfile(playbook_tarinfo, io.BytesIO(playbook))
# generate and write inventory.yml to the tar file # generate and write inventory.yml to the tar file
inventory_yml = generate_inventory_yml(instance_obj).encode('utf-8') inventory_yml = generate_inventory_yml(instance_obj).encode('utf-8')
inventory_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/inventory.yml") inventory_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/inventory.yml")
tar_addfile(inventory_yml_tarinfo, inventory_yml) inventory_yml_tarinfo.size = len(inventory_yml)
tar.addfile(inventory_yml_tarinfo, io.BytesIO(inventory_yml))
# generate and write group_vars/all.yml to the tar file # generate and write group_vars/all.yml to the tar file
group_vars = generate_group_vars_all_yml(instance_obj).encode('utf-8') group_vars = generate_group_vars_all_yml(instance_obj).encode('utf-8')
group_vars_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/group_vars/all.yml") group_vars_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/group_vars/all.yml")
tar_addfile(group_vars_tarinfo, group_vars) group_vars_tarinfo.size = len(group_vars)
tar.addfile(group_vars_tarinfo, io.BytesIO(group_vars))
# generate and write requirements.yml to the tar file # generate and write requirements.yml to the tar file
requirements_yml = generate_requirements_yml().encode('utf-8') requirements_yml = generate_requirements_yml().encode('utf-8')
requirements_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/requirements.yml") requirements_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/requirements.yml")
tar_addfile(requirements_yml_tarinfo, requirements_yml) requirements_yml_tarinfo.size = len(requirements_yml)
tar.addfile(requirements_yml_tarinfo, io.BytesIO(requirements_yml))
# respond with the tarfile # respond with the tarfile
f.seek(0) f.seek(0)
@@ -109,10 +107,8 @@ class InstanceInstallBundle(GenericAPIView):
return response return response
def generate_playbook(instance_obj): def generate_playbook():
playbook_yaml = render_to_string("instance_install_bundle/install_receptor.yml", context=dict(instance=instance_obj)) return render_to_string("instance_install_bundle/install_receptor.yml")
# convert consecutive newlines with a single newline
return re.sub(r'\n+', '\n', playbook_yaml)
def generate_requirements_yml(): def generate_requirements_yml():
@@ -124,21 +120,7 @@ def generate_inventory_yml(instance_obj):
def generate_group_vars_all_yml(instance_obj): def generate_group_vars_all_yml(instance_obj):
# get peers return render_to_string("instance_install_bundle/group_vars/all.yml", context=dict(instance=instance_obj))
peers = []
for addr in instance_obj.peers.select_related('instance'):
peers.append(dict(address=addr.get_full_address(), protocol=addr.protocol))
context = dict(instance=instance_obj, peers=peers)
canonical_addr = instance_obj.canonical_address
if canonical_addr:
context['listener_port'] = canonical_addr.port
protocol = canonical_addr.protocol if canonical_addr.protocol != 'wss' else 'ws'
context['listener_protocol'] = protocol
all_yaml = render_to_string("instance_install_bundle/group_vars/all.yml", context=context)
# convert consecutive newlines with a single newline
return re.sub(r'\n+', '\n', all_yaml)
def generate_receptor_tls(instance_obj): def generate_receptor_tls(instance_obj):
@@ -179,14 +161,14 @@ def generate_receptor_tls(instance_obj):
.sign(key, hashes.SHA256()) .sign(key, hashes.SHA256())
) )
# sign csr with the receptor ca key from /etc/receptor/ca/mesh-CA.key # sign csr with the receptor ca key from /etc/receptor/ca/receptor-ca.key
with open('/etc/receptor/tls/ca/mesh-CA.key', 'rb') as f: with open('/etc/receptor/tls/ca/receptor-ca.key', 'rb') as f:
ca_key = serialization.load_pem_private_key( ca_key = serialization.load_pem_private_key(
f.read(), f.read(),
password=None, password=None,
) )
with open('/etc/receptor/tls/ca/mesh-CA.crt', 'rb') as f: with open('/etc/receptor/tls/ca/receptor-ca.crt', 'rb') as f:
ca_cert = x509.load_pem_x509_certificate(f.read()) ca_cert = x509.load_pem_x509_certificate(f.read())
cert = ( cert = (

View File

@@ -14,7 +14,6 @@ from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import PermissionDenied from rest_framework.exceptions import PermissionDenied
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework import serializers
# AWX # AWX
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
@@ -32,7 +31,6 @@ from awx.api.views.labels import LabelSubListCreateAttachDetachView
from awx.api.serializers import ( from awx.api.serializers import (
InventorySerializer, InventorySerializer,
ConstructedInventorySerializer,
ActivityStreamSerializer, ActivityStreamSerializer,
RoleSerializer, RoleSerializer,
InstanceGroupSerializer, InstanceGroupSerializer,
@@ -81,9 +79,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
# Do not allow changes to an Inventory kind. # Do not allow changes to an Inventory kind.
if kind is not None and obj.kind != kind: if kind is not None and obj.kind != kind:
return Response( return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
dict(error=_('You cannot turn a regular inventory into a "smart" or "constructed" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED
)
return super(InventoryDetail, self).update(request, *args, **kwargs) return super(InventoryDetail, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
@@ -98,29 +94,6 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST) return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
class ConstructedInventoryDetail(InventoryDetail):
serializer_class = ConstructedInventorySerializer
class ConstructedInventoryList(InventoryList):
serializer_class = ConstructedInventorySerializer
def get_queryset(self):
r = super().get_queryset()
return r.filter(kind='constructed')
class InventoryInputInventoriesList(SubListAttachDetachAPIView):
model = Inventory
serializer_class = InventorySerializer
parent_model = Inventory
relationship = 'input_inventories'
def is_valid_relation(self, parent, sub, created=False):
if sub.kind == 'constructed':
raise serializers.ValidationError({'error': 'You cannot add a constructed inventory to another constructed inventory.'})
class InventoryActivityStreamList(SubListAPIView): class InventoryActivityStreamList(SubListAPIView):
model = ActivityStream model = ActivityStream
serializer_class = ActivityStreamSerializer serializer_class = ActivityStreamSerializer
@@ -152,7 +125,6 @@ class InventoryObjectRolesList(SubListAPIView):
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Inventory parent_model = Inventory
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()

View File

@@ -17,7 +17,7 @@ class MeshVisualizer(APIView):
def get(self, request, format=None): def get(self, request, format=None):
data = { data = {
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data, 'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target__instance', 'source'), many=True).data, 'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
} }
return Response(data) return Response(data)

View File

@@ -15,7 +15,6 @@ from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from awx.main.constants import ACTIVE_STATES from awx.main.constants import ACTIVE_STATES
from awx.main.models import Organization
from awx.main.utils import get_object_or_400 from awx.main.utils import get_object_or_400
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task
from awx.main.models.organization import Team from awx.main.models.organization import Team
@@ -51,7 +50,7 @@ class UnifiedJobDeletionMixin(object):
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
else: else:
# if it has been > 1 minute, events are probably lost # if it has been > 1 minute, events are probably lost
logger.warning('Allowing deletion of {} through the API without all events processed.'.format(obj.log_format)) logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
# Manually cascade delete events if unpartitioned job # Manually cascade delete events if unpartitioned job
if obj.has_unpartitioned_events: if obj.has_unpartitioned_events:
@@ -61,21 +60,6 @@ class UnifiedJobDeletionMixin(object):
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class OrganizationInstanceGroupMembershipMixin(object):
"""
This mixin overloads attach/detach so that it calls Organization.save(),
to ensure instance group updates are persisted
"""
def unattach(self, request, *args, **kwargs):
with transaction.atomic():
organization_queryset = Organization.objects.select_for_update()
organization = organization_queryset.get(pk=self.get_parent_object().id)
response = super(OrganizationInstanceGroupMembershipMixin, self).unattach(request, *args, **kwargs)
organization.save()
return response
class InstanceGroupMembershipMixin(object): class InstanceGroupMembershipMixin(object):
""" """
This mixin overloads attach/detach so that it calls InstanceGroup.save(), This mixin overloads attach/detach so that it calls InstanceGroup.save(),

View File

@@ -52,19 +52,22 @@ from awx.api.serializers import (
WorkflowJobTemplateSerializer, WorkflowJobTemplateSerializer,
CredentialSerializer, CredentialSerializer,
) )
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin, OrganizationInstanceGroupMembershipMixin from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
from awx.api.views import immutablesharedfields
logger = logging.getLogger('awx.api.views.organization') logger = logging.getLogger('awx.api.views.organization')
@immutablesharedfields
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView): class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
model = Organization model = Organization
serializer_class = OrganizationSerializer serializer_class = OrganizationSerializer
def get_queryset(self):
qs = Organization.accessible_objects(self.request.user, 'read_role')
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
qs = qs.prefetch_related('created_by', 'modified_by')
return qs
@immutablesharedfields
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Organization model = Organization
serializer_class = OrganizationSerializer serializer_class = OrganizationSerializer
@@ -107,7 +110,6 @@ class OrganizationInventoriesList(SubListAPIView):
relationship = 'inventories' relationship = 'inventories'
@immutablesharedfields
class OrganizationUsersList(BaseUsersList): class OrganizationUsersList(BaseUsersList):
model = User model = User
serializer_class = UserSerializer serializer_class = UserSerializer
@@ -116,7 +118,6 @@ class OrganizationUsersList(BaseUsersList):
ordering = ('username',) ordering = ('username',)
@immutablesharedfields
class OrganizationAdminsList(BaseUsersList): class OrganizationAdminsList(BaseUsersList):
model = User model = User
serializer_class = UserSerializer serializer_class = UserSerializer
@@ -155,7 +156,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
parent_key = 'organization' parent_key = 'organization'
@immutablesharedfields
class OrganizationTeamsList(SubListCreateAttachDetachAPIView): class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
model = Team model = Team
serializer_class = TeamSerializer serializer_class = TeamSerializer
@@ -202,12 +202,11 @@ class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemp
relationship = 'notification_templates_approvals' relationship = 'notification_templates_approvals'
class OrganizationInstanceGroupsList(OrganizationInstanceGroupMembershipMixin, SubListAttachDetachAPIView): class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
model = InstanceGroup model = InstanceGroup
serializer_class = InstanceGroupSerializer serializer_class = InstanceGroupSerializer
parent_model = Organization parent_model = Organization
relationship = 'instance_groups' relationship = 'instance_groups'
filter_read_permission = False
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView): class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
@@ -215,7 +214,6 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
serializer_class = CredentialSerializer serializer_class = CredentialSerializer
parent_model = Organization parent_model = Organization
relationship = 'galaxy_credentials' relationship = 'galaxy_credentials'
filter_read_permission = False
def is_valid_relation(self, parent, sub, created=False): def is_valid_relation(self, parent, sub, created=False):
if sub.kind != 'galaxy_api_token': if sub.kind != 'galaxy_api_token':
@@ -232,7 +230,6 @@ class OrganizationObjectRolesList(SubListAPIView):
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Organization parent_model = Organization
search_fields = ('role_field', 'content_type__model') search_fields = ('role_field', 'content_type__model')
deprecated = True
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()

View File

@@ -13,7 +13,6 @@ from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.csrf import ensure_csrf_cookie
from django.template.loader import render_to_string from django.template.loader import render_to_string
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.urls import reverse as django_reverse
from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
@@ -21,14 +20,13 @@ from rest_framework import status
import requests import requests
from awx import MODE
from awx.api.generics import APIView from awx.api.generics import APIView
from awx.conf.registry import settings_registry from awx.conf.registry import settings_registry
from awx.main.analytics import all_collectors from awx.main.analytics import all_collectors
from awx.main.ha import is_ha_environment from awx.main.ha import is_ha_environment
from awx.main.utils import get_awx_version, get_custom_venv_choices from awx.main.utils import get_awx_version, get_custom_venv_choices
from awx.main.utils.licensing import validate_entitlement_manifest from awx.main.utils.licensing import validate_entitlement_manifest
from awx.api.versioning import URLPathVersioning, reverse, drf_reverse from awx.api.versioning import reverse, drf_reverse
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
from awx.main.utils import set_environ from awx.main.utils import set_environ
@@ -40,22 +38,36 @@ logger = logging.getLogger('awx.api.views.root')
class ApiRootView(APIView): class ApiRootView(APIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
name = _('REST API') name = _('REST API')
versioning_class = URLPathVersioning versioning_class = None
swagger_topic = 'Versioning' swagger_topic = 'Versioning'
@method_decorator(ensure_csrf_cookie) @method_decorator(ensure_csrf_cookie)
def get(self, request, format=None): def get(self, request, format=None):
'''List supported API versions''' '''List supported API versions'''
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
data = OrderedDict() data = OrderedDict()
data['description'] = _('AWX REST API') data['description'] = _('AWX REST API')
data['current_version'] = v2 data['current_version'] = v2
data['available_versions'] = dict(v2=v2) data['available_versions'] = dict(v2=v2)
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
data['custom_logo'] = settings.CUSTOM_LOGO data['custom_logo'] = settings.CUSTOM_LOGO
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
if MODE == 'development': return Response(data)
data['swagger'] = drf_reverse('api:schema-swagger-ui')
class ApiOAuthAuthorizationRootView(APIView):
permission_classes = (AllowAny,)
name = _("API OAuth 2 Authorization Root")
versioning_class = None
swagger_topic = 'Authentication'
def get(self, request, format=None):
data = OrderedDict()
data['authorize'] = drf_reverse('api:authorize')
data['token'] = drf_reverse('api:token')
data['revoke_token'] = drf_reverse('api:revoke-token')
return Response(data) return Response(data)
@@ -69,7 +81,6 @@ class ApiVersionRootView(APIView):
data['ping'] = reverse('api:api_v2_ping_view', request=request) data['ping'] = reverse('api:api_v2_ping_view', request=request)
data['instances'] = reverse('api:instance_list', request=request) data['instances'] = reverse('api:instance_list', request=request)
data['instance_groups'] = reverse('api:instance_group_list', request=request) data['instance_groups'] = reverse('api:instance_group_list', request=request)
data['receptor_addresses'] = reverse('api:receptor_addresses_list', request=request)
data['config'] = reverse('api:api_v2_config_view', request=request) data['config'] = reverse('api:api_v2_config_view', request=request)
data['settings'] = reverse('api:setting_category_list', request=request) data['settings'] = reverse('api:setting_category_list', request=request)
data['me'] = reverse('api:user_me_list', request=request) data['me'] = reverse('api:user_me_list', request=request)
@@ -83,15 +94,14 @@ class ApiVersionRootView(APIView):
data['credentials'] = reverse('api:credential_list', request=request) data['credentials'] = reverse('api:credential_list', request=request)
data['credential_types'] = reverse('api:credential_type_list', request=request) data['credential_types'] = reverse('api:credential_type_list', request=request)
data['credential_input_sources'] = reverse('api:credential_input_source_list', request=request) data['credential_input_sources'] = reverse('api:credential_input_source_list', request=request)
data['applications'] = reverse('api:o_auth2_application_list', request=request)
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
data['metrics'] = reverse('api:metrics_view', request=request) data['metrics'] = reverse('api:metrics_view', request=request)
data['inventory'] = reverse('api:inventory_list', request=request) data['inventory'] = reverse('api:inventory_list', request=request)
data['constructed_inventory'] = reverse('api:constructed_inventory_list', request=request)
data['inventory_sources'] = reverse('api:inventory_source_list', request=request) data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
data['inventory_updates'] = reverse('api:inventory_update_list', request=request) data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
data['groups'] = reverse('api:group_list', request=request) data['groups'] = reverse('api:group_list', request=request)
data['hosts'] = reverse('api:host_list', request=request) data['hosts'] = reverse('api:host_list', request=request)
data['host_metrics'] = reverse('api:host_metric_list', request=request)
data['host_metric_summary_monthly'] = reverse('api:host_metric_summary_monthly_list', request=request)
data['job_templates'] = reverse('api:job_template_list', request=request) data['job_templates'] = reverse('api:job_template_list', request=request)
data['jobs'] = reverse('api:job_list', request=request) data['jobs'] = reverse('api:job_list', request=request)
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request) data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
@@ -112,11 +122,6 @@ class ApiVersionRootView(APIView):
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request) data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request) data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
data['bulk'] = reverse('api:bulk', request=request) data['bulk'] = reverse('api:bulk', request=request)
data['analytics'] = reverse('api:analytics_root_view', request=request)
data['service_index'] = django_reverse('service-index-root')
data['role_definitions'] = django_reverse('roledefinition-list')
data['role_user_assignments'] = django_reverse('roleuserassignment-list')
data['role_team_assignments'] = django_reverse('roleteamassignment-list')
return Response(data) return Response(data)
@@ -277,6 +282,15 @@ class ApiV2ConfigView(APIView):
become_methods=PRIVILEGE_ESCALATION_METHODS, become_methods=PRIVILEGE_ESCALATION_METHODS,
) )
# If LDAP is enabled, user_ldap_fields will return a list of field
# names that are managed by LDAP and should be read-only for users with
# a non-empty ldap_dn attribute.
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
user_ldap_fields = ['username', 'password']
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
data['user_ldap_fields'] = user_ldap_fields
if ( if (
request.user.is_superuser request.user.is_superuser
or request.user.is_system_auditor or request.user.is_system_auditor

View File

@@ -1,4 +1,4 @@
from hashlib import sha1, sha256 from hashlib import sha1
import hmac import hmac
import logging import logging
import urllib.parse import urllib.parse
@@ -99,31 +99,14 @@ class WebhookReceiverBase(APIView):
def get_signature(self): def get_signature(self):
raise NotImplementedError raise NotImplementedError
def must_check_signature(self):
return True
def is_ignored_request(self):
return False
def check_signature(self, obj): def check_signature(self, obj):
if not obj.webhook_key: if not obj.webhook_key:
raise PermissionDenied raise PermissionDenied
if not self.must_check_signature():
logger.debug("skipping signature validation")
return
hash_alg, expected_digest = self.get_signature() mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
if hash_alg == 'sha1': logger.debug("header signature: %s", self.get_signature())
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
elif hash_alg == 'sha256':
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha256)
else:
logger.debug("Unsupported signature type, supported: sha1, sha256, received: {}".format(hash_alg))
raise PermissionDenied
logger.debug("header signature: %s", expected_digest)
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest())) logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
if not hmac.compare_digest(force_bytes(mac.hexdigest()), expected_digest): if not hmac.compare_digest(force_bytes(mac.hexdigest()), self.get_signature()):
raise PermissionDenied raise PermissionDenied
@csrf_exempt @csrf_exempt
@@ -131,14 +114,10 @@ class WebhookReceiverBase(APIView):
# Ensure that the full contents of the request are captured for multiple uses. # Ensure that the full contents of the request are captured for multiple uses.
request.body request.body
logger.debug("headers: {}\ndata: {}\n".format(request.headers, request.data)) logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
obj = self.get_object() obj = self.get_object()
self.check_signature(obj) self.check_signature(obj)
if self.is_ignored_request():
# This was an ignored request type (e.g. ping), don't act on it
return Response({'message': _("Webhook ignored")}, status=status.HTTP_200_OK)
event_type = self.get_event_type() event_type = self.get_event_type()
event_guid = self.get_event_guid() event_guid = self.get_event_guid()
event_ref = self.get_event_ref() event_ref = self.get_event_ref()
@@ -207,7 +186,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
if hash_alg != 'sha1': if hash_alg != 'sha1':
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg)) logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
raise PermissionDenied raise PermissionDenied
return hash_alg, force_bytes(signature) return force_bytes(signature)
class GitlabWebhookReceiver(WebhookReceiverBase): class GitlabWebhookReceiver(WebhookReceiverBase):
@@ -235,73 +214,15 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref()) return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
def get_signature(self):
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
def check_signature(self, obj): def check_signature(self, obj):
if not obj.webhook_key: if not obj.webhook_key:
raise PermissionDenied raise PermissionDenied
token_from_request = force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
# GitLab only returns the secret token, not an hmac hash. Use # GitLab only returns the secret token, not an hmac hash. Use
# the hmac `compare_digest` helper function to prevent timing # the hmac `compare_digest` helper function to prevent timing
# analysis by attackers. # analysis by attackers.
if not hmac.compare_digest(force_bytes(obj.webhook_key), token_from_request): if not hmac.compare_digest(force_bytes(obj.webhook_key), self.get_signature()):
raise PermissionDenied raise PermissionDenied
class BitbucketDcWebhookReceiver(WebhookReceiverBase):
service = 'bitbucket_dc'
ref_keys = {
'repo:refs_changed': 'changes.0.toHash',
'mirror:repo_synchronized': 'changes.0.toHash',
'pr:opened': 'pullRequest.toRef.latestCommit',
'pr:from_ref_updated': 'pullRequest.toRef.latestCommit',
'pr:modified': 'pullRequest.toRef.latestCommit',
}
def get_event_type(self):
return self.request.META.get('HTTP_X_EVENT_KEY')
def get_event_guid(self):
return self.request.META.get('HTTP_X_REQUEST_ID')
def get_event_status_api(self):
# https://<bitbucket-base-url>/rest/build-status/1.0/commits/<commit-hash>
if self.get_event_type() not in self.ref_keys.keys():
return
if self.get_event_ref() is None:
return
any_url = None
if 'actor' in self.request.data:
any_url = self.request.data['actor'].get('links', {}).get('self')
if any_url is None and 'repository' in self.request.data:
any_url = self.request.data['repository'].get('links', {}).get('self')
if any_url is None:
return
any_url = any_url[0].get('href')
if any_url is None:
return
parsed = urllib.parse.urlparse(any_url)
return "{}://{}/rest/build-status/1.0/commits/{}".format(parsed.scheme, parsed.netloc, self.get_event_ref())
def is_ignored_request(self):
return self.get_event_type() not in [
'repo:refs_changed',
'mirror:repo_synchronized',
'pr:opened',
'pr:from_ref_updated',
'pr:modified',
]
def must_check_signature(self):
# Bitbucket does not sign ping requests...
return self.get_event_type() != 'diagnostics:ping'
def get_signature(self):
header_sig = self.request.META.get('HTTP_X_HUB_SIGNATURE')
if not header_sig:
logger.debug("Expected signature missing from header key HTTP_X_HUB_SIGNATURE")
raise PermissionDenied
hash_alg, signature = header_sig.split('=')
return hash_alg, force_bytes(signature)

View File

@@ -14,7 +14,7 @@ class ConfConfig(AppConfig):
def ready(self): def ready(self):
self.module.autodiscover() self.module.autodiscover()
if not set(sys.argv) & {'migrate', 'check_migrations', 'showmigrations'}: if not set(sys.argv) & {'migrate', 'check_migrations'}:
from .settings import SettingsWrapper from .settings import SettingsWrapper
SettingsWrapper.initialize() SettingsWrapper.initialize()

View File

@@ -55,7 +55,6 @@ register(
# Optional; category_slug will be slugified version of category if not # Optional; category_slug will be slugified version of category if not
# explicitly provided. # explicitly provided.
category_slug='cows', category_slug='cows',
hidden=True,
) )

View File

@@ -61,10 +61,6 @@ class StringListBooleanField(ListField):
def to_representation(self, value): def to_representation(self, value):
try: try:
if isinstance(value, str):
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
# DRF changed truthy and falsy lists to be capitalized
value = value.lower()
if isinstance(value, (list, tuple)): if isinstance(value, (list, tuple)):
return super(StringListBooleanField, self).to_representation(value) return super(StringListBooleanField, self).to_representation(value)
elif value in BooleanField.TRUE_VALUES: elif value in BooleanField.TRUE_VALUES:
@@ -82,8 +78,6 @@ class StringListBooleanField(ListField):
def to_internal_value(self, data): def to_internal_value(self, data):
try: try:
if isinstance(data, str):
data = data.lower()
if isinstance(data, (list, tuple)): if isinstance(data, (list, tuple)):
return super(StringListBooleanField, self).to_internal_value(data) return super(StringListBooleanField, self).to_internal_value(data)
elif data in BooleanField.TRUE_VALUES: elif data in BooleanField.TRUE_VALUES:

View File

@@ -1,11 +1,13 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals from __future__ import unicode_literals
# AWX
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
from django.db import migrations from django.db import migrations
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0005_v330_rename_two_session_settings')] dependencies = [('conf', '0005_v330_rename_two_session_settings')]
# this migration is doing nothing, and is here to preserve migrations files integrity operations = [migrations.RunPython(fill_ldap_group_type_params)]
operations = []

View File

@@ -1,17 +0,0 @@
# Generated by Django 4.2 on 2023-06-09 19:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('conf', '0009_rename_proot_settings'),
]
operations = [
migrations.AlterField(
model_name='setting',
name='value',
field=models.JSONField(null=True),
),
]

View File

@@ -1,115 +0,0 @@
from django.db import migrations
LDAP_AUTH_CONF_KEYS = [
'AUTH_LDAP_SERVER_URI',
'AUTH_LDAP_BIND_DN',
'AUTH_LDAP_BIND_PASSWORD',
'AUTH_LDAP_START_TLS',
'AUTH_LDAP_CONNECTION_OPTIONS',
'AUTH_LDAP_USER_SEARCH',
'AUTH_LDAP_USER_DN_TEMPLATE',
'AUTH_LDAP_USER_ATTR_MAP',
'AUTH_LDAP_GROUP_SEARCH',
'AUTH_LDAP_GROUP_TYPE',
'AUTH_LDAP_GROUP_TYPE_PARAMS',
'AUTH_LDAP_REQUIRE_GROUP',
'AUTH_LDAP_DENY_GROUP',
'AUTH_LDAP_USER_FLAGS_BY_GROUP',
'AUTH_LDAP_ORGANIZATION_MAP',
'AUTH_LDAP_TEAM_MAP',
'AUTH_LDAP_1_SERVER_URI',
'AUTH_LDAP_1_BIND_DN',
'AUTH_LDAP_1_BIND_PASSWORD',
'AUTH_LDAP_1_START_TLS',
'AUTH_LDAP_1_CONNECTION_OPTIONS',
'AUTH_LDAP_1_USER_SEARCH',
'AUTH_LDAP_1_USER_DN_TEMPLATE',
'AUTH_LDAP_1_USER_ATTR_MAP',
'AUTH_LDAP_1_GROUP_SEARCH',
'AUTH_LDAP_1_GROUP_TYPE',
'AUTH_LDAP_1_GROUP_TYPE_PARAMS',
'AUTH_LDAP_1_REQUIRE_GROUP',
'AUTH_LDAP_1_DENY_GROUP',
'AUTH_LDAP_1_USER_FLAGS_BY_GROUP',
'AUTH_LDAP_1_ORGANIZATION_MAP',
'AUTH_LDAP_1_TEAM_MAP',
'AUTH_LDAP_2_SERVER_URI',
'AUTH_LDAP_2_BIND_DN',
'AUTH_LDAP_2_BIND_PASSWORD',
'AUTH_LDAP_2_START_TLS',
'AUTH_LDAP_2_CONNECTION_OPTIONS',
'AUTH_LDAP_2_USER_SEARCH',
'AUTH_LDAP_2_USER_DN_TEMPLATE',
'AUTH_LDAP_2_USER_ATTR_MAP',
'AUTH_LDAP_2_GROUP_SEARCH',
'AUTH_LDAP_2_GROUP_TYPE',
'AUTH_LDAP_2_GROUP_TYPE_PARAMS',
'AUTH_LDAP_2_REQUIRE_GROUP',
'AUTH_LDAP_2_DENY_GROUP',
'AUTH_LDAP_2_USER_FLAGS_BY_GROUP',
'AUTH_LDAP_2_ORGANIZATION_MAP',
'AUTH_LDAP_2_TEAM_MAP',
'AUTH_LDAP_3_SERVER_URI',
'AUTH_LDAP_3_BIND_DN',
'AUTH_LDAP_3_BIND_PASSWORD',
'AUTH_LDAP_3_START_TLS',
'AUTH_LDAP_3_CONNECTION_OPTIONS',
'AUTH_LDAP_3_USER_SEARCH',
'AUTH_LDAP_3_USER_DN_TEMPLATE',
'AUTH_LDAP_3_USER_ATTR_MAP',
'AUTH_LDAP_3_GROUP_SEARCH',
'AUTH_LDAP_3_GROUP_TYPE',
'AUTH_LDAP_3_GROUP_TYPE_PARAMS',
'AUTH_LDAP_3_REQUIRE_GROUP',
'AUTH_LDAP_3_DENY_GROUP',
'AUTH_LDAP_3_USER_FLAGS_BY_GROUP',
'AUTH_LDAP_3_ORGANIZATION_MAP',
'AUTH_LDAP_3_TEAM_MAP',
'AUTH_LDAP_4_SERVER_URI',
'AUTH_LDAP_4_BIND_DN',
'AUTH_LDAP_4_BIND_PASSWORD',
'AUTH_LDAP_4_START_TLS',
'AUTH_LDAP_4_CONNECTION_OPTIONS',
'AUTH_LDAP_4_USER_SEARCH',
'AUTH_LDAP_4_USER_DN_TEMPLATE',
'AUTH_LDAP_4_USER_ATTR_MAP',
'AUTH_LDAP_4_GROUP_SEARCH',
'AUTH_LDAP_4_GROUP_TYPE',
'AUTH_LDAP_4_GROUP_TYPE_PARAMS',
'AUTH_LDAP_4_REQUIRE_GROUP',
'AUTH_LDAP_4_DENY_GROUP',
'AUTH_LDAP_4_USER_FLAGS_BY_GROUP',
'AUTH_LDAP_4_ORGANIZATION_MAP',
'AUTH_LDAP_4_TEAM_MAP',
'AUTH_LDAP_5_SERVER_URI',
'AUTH_LDAP_5_BIND_DN',
'AUTH_LDAP_5_BIND_PASSWORD',
'AUTH_LDAP_5_START_TLS',
'AUTH_LDAP_5_CONNECTION_OPTIONS',
'AUTH_LDAP_5_USER_SEARCH',
'AUTH_LDAP_5_USER_DN_TEMPLATE',
'AUTH_LDAP_5_USER_ATTR_MAP',
'AUTH_LDAP_5_GROUP_SEARCH',
'AUTH_LDAP_5_GROUP_TYPE',
'AUTH_LDAP_5_GROUP_TYPE_PARAMS',
'AUTH_LDAP_5_REQUIRE_GROUP',
'AUTH_LDAP_5_DENY_GROUP',
'AUTH_LDAP_5_USER_FLAGS_BY_GROUP',
'AUTH_LDAP_5_ORGANIZATION_MAP',
'AUTH_LDAP_5_TEAM_MAP',
]
def remove_ldap_auth_conf(apps, scheme_editor):
setting = apps.get_model('conf', 'Setting')
setting.objects.filter(key__in=LDAP_AUTH_CONF_KEYS).delete()
class Migration(migrations.Migration):
dependencies = [
('conf', '0010_change_to_JSONField'),
]
operations = [
migrations.RunPython(remove_ldap_auth_conf),
]

View File

@@ -1,20 +0,0 @@
# Generated by Django 4.2.10 on 2024-08-27 19:31
from django.db import migrations
OIDC_AUTH_CONF_KEYS = ['SOCIAL_AUTH_OIDC_KEY', 'SOCIAL_AUTH_OIDC_SECRET', 'SOCIAL_AUTH_OIDC_OIDC_ENDPOINT', 'SOCIAL_AUTH_OIDC_VERIFY_SSL']
def remove_oidc_auth_conf(apps, scheme_editor):
setting = apps.get_model('conf', 'Setting')
setting.objects.filter(key__in=OIDC_AUTH_CONF_KEYS).delete()
class Migration(migrations.Migration):
dependencies = [
('conf', '0011_remove_ldap_auth_conf'),
]
operations = [
migrations.RunPython(remove_oidc_auth_conf),
]

View File

@@ -1,22 +0,0 @@
from django.db import migrations
RADIUS_AUTH_CONF_KEYS = [
'RADIUS_SERVER',
'RADIUS_PORT',
'RADIUS_SECRET',
]
def remove_radius_auth_conf(apps, scheme_editor):
setting = apps.get_model('conf', 'Setting')
setting.objects.filter(key__in=RADIUS_AUTH_CONF_KEYS).delete()
class Migration(migrations.Migration):
dependencies = [
('conf', '0012_remove_oidc_auth_conf'),
]
operations = [
migrations.RunPython(remove_radius_auth_conf),
]

View File

@@ -1,39 +0,0 @@
# Generated by Django 4.2.10 on 2024-08-27 14:20
from django.db import migrations
SAML_AUTH_CONF_KEYS = [
'SAML_AUTO_CREATE_OBJECTS',
'SOCIAL_AUTH_SAML_CALLBACK_URL',
'SOCIAL_AUTH_SAML_METADATA_URL',
'SOCIAL_AUTH_SAML_SP_ENTITY_ID',
'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT',
'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY',
'SOCIAL_AUTH_SAML_ORG_INFO',
'SOCIAL_AUTH_SAML_TECHNICAL_CONTACT',
'SOCIAL_AUTH_SAML_SUPPORT_CONTACT',
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
'SOCIAL_AUTH_SAML_SECURITY_CONFIG',
'SOCIAL_AUTH_SAML_SP_EXTRA',
'SOCIAL_AUTH_SAML_EXTRA_DATA',
'SOCIAL_AUTH_SAML_ORGANIZATION_MAP',
'SOCIAL_AUTH_SAML_TEAM_MAP',
'SOCIAL_AUTH_SAML_ORGANIZATION_ATTR',
'SOCIAL_AUTH_SAML_TEAM_ATTR',
'SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR',
]
def remove_saml_auth_conf(apps, scheme_editor):
setting = apps.get_model('conf', 'Setting')
setting.objects.filter(key__in=SAML_AUTH_CONF_KEYS).delete()
class Migration(migrations.Migration):
dependencies = [
('conf', '0013_remove_radius_auth_conf'),
]
operations = [
migrations.RunPython(remove_saml_auth_conf),
]

View File

@@ -1,81 +0,0 @@
# Generated by Django 4.2.10 on 2024-08-13 11:14
from django.db import migrations
SOCIAL_OAUTH_CONF_KEYS = [
# MICROSOFT AZURE ACTIVE DIRECTORY SETTINGS
'SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL',
'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY',
'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET',
'SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP',
'SOCIAL_AUTH_AZUREAD_OAUTH2_TEAM_MAP',
# GOOGLE OAUTH2 AUTHENTICATION SETTINGS
'SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL',
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET',
'SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS',
'SOCIAL_AUTH_GOOGLE_OAUTH2_AUTH_EXTRA_ARGUMENTS',
'SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP',
'SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP',
# GITHUB OAUTH2 AUTHENTICATION SETTINGS
'SOCIAL_AUTH_GITHUB_CALLBACK_URL',
'SOCIAL_AUTH_GITHUB_KEY',
'SOCIAL_AUTH_GITHUB_SECRET',
'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP',
'SOCIAL_AUTH_GITHUB_TEAM_MAP',
# GITHUB ORG OAUTH2 AUTHENTICATION SETTINGS
'SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL',
'SOCIAL_AUTH_GITHUB_ORG_KEY',
'SOCIAL_AUTH_GITHUB_ORG_SECRET',
'SOCIAL_AUTH_GITHUB_ORG_NAME',
'SOCIAL_AUTH_GITHUB_ORG_ORGANIZATION_MAP',
'SOCIAL_AUTH_GITHUB_ORG_TEAM_MAP',
# GITHUB TEAM OAUTH2 AUTHENTICATION SETTINGS
'SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL',
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
'SOCIAL_AUTH_GITHUB_TEAM_SECRET',
'SOCIAL_AUTH_GITHUB_TEAM_ID',
'SOCIAL_AUTH_GITHUB_TEAM_ORGANIZATION_MAP',
'SOCIAL_AUTH_GITHUB_TEAM_TEAM_MAP',
# GITHUB ENTERPRISE OAUTH2 AUTHENTICATION SETTINGS
'SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_SECRET',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORGANIZATION_MAP',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_MAP',
# GITHUB ENTERPRISE ORG OAUTH2 AUTHENTICATION SETTINGS
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_SECRET',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_NAME',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_ORGANIZATION_MAP',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_TEAM_MAP',
# GITHUB ENTERPRISE TEAM OAUTH2 AUTHENTICATION SETTINGS
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_SECRET',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_ID',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_ORGANIZATION_MAP',
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_TEAM_MAP',
]
def remove_social_oauth_conf(apps, scheme_editor):
setting = apps.get_model('conf', 'Setting')
setting.objects.filter(key__in=SOCIAL_OAUTH_CONF_KEYS).delete()
class Migration(migrations.Migration):
dependencies = [
('conf', '0014_remove_saml_auth_conf'),
]
operations = [
migrations.RunPython(remove_social_oauth_conf),
]

View File

@@ -1,25 +0,0 @@
from django.db import migrations
TACACS_PLUS_AUTH_CONF_KEYS = [
'TACACSPLUS_HOST',
'TACACSPLUS_PORT',
'TACACSPLUS_SECRET',
'TACACSPLUS_SESSION_TIMEOUT',
'TACACSPLUS_AUTH_PROTOCOL',
'TACACSPLUS_REM_ADDR',
]
def remove_tacacs_plus_auth_conf(apps, scheme_editor):
setting = apps.get_model('conf', 'Setting')
setting.objects.filter(key__in=TACACS_PLUS_AUTH_CONF_KEYS).delete()
class Migration(migrations.Migration):
dependencies = [
('conf', '0015_remove_social_oauth_conf'),
]
operations = [
migrations.RunPython(remove_tacacs_plus_auth_conf),
]

View File

@@ -0,0 +1,31 @@
import inspect
from django.conf import settings
import logging
logger = logging.getLogger('awx.conf.migrations')
def fill_ldap_group_type_params(apps, schema_editor):
group_type = getattr(settings, 'AUTH_LDAP_GROUP_TYPE', None)
Setting = apps.get_model('conf', 'Setting')
group_type_params = {'name_attr': 'cn', 'member_attr': 'member'}
qs = Setting.objects.filter(key='AUTH_LDAP_GROUP_TYPE_PARAMS')
entry = None
if qs.exists():
entry = qs[0]
group_type_params = entry.value
else:
return # for new installs we prefer to use the default value
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
for k in list(group_type_params.keys()):
if k not in init_attrs:
del group_type_params[k]
entry.value = group_type_params
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
entry.save()

View File

@@ -7,10 +7,9 @@ import json
# Django # Django
from django.db import models from django.db import models
from ansible_base.lib.utils.models import prevent_search
# AWX # AWX
from awx.main.models.base import CreatedModifiedModel from awx.main.fields import JSONBlob
from awx.main.models.base import CreatedModifiedModel, prevent_search
from awx.main.utils import encrypt_field from awx.main.utils import encrypt_field
from awx.conf import settings_registry from awx.conf import settings_registry
@@ -19,7 +18,7 @@ __all__ = ['Setting']
class Setting(CreatedModifiedModel): class Setting(CreatedModifiedModel):
key = models.CharField(max_length=255) key = models.CharField(max_length=255)
value = models.JSONField(null=True) value = JSONBlob(null=True)
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE)) user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
def __str__(self): def __str__(self):

View File

@@ -127,8 +127,6 @@ class SettingsRegistry(object):
encrypted = bool(field_kwargs.pop('encrypted', False)) encrypted = bool(field_kwargs.pop('encrypted', False))
defined_in_file = bool(field_kwargs.pop('defined_in_file', False)) defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
unit = field_kwargs.pop('unit', None) unit = field_kwargs.pop('unit', None)
hidden = field_kwargs.pop('hidden', False)
warning_text = field_kwargs.pop('warning_text', None)
if getattr(field_kwargs.get('child', None), 'source', None) is not None: if getattr(field_kwargs.get('child', None), 'source', None) is not None:
field_kwargs['child'].source = None field_kwargs['child'].source = None
field_instance = field_class(**field_kwargs) field_instance = field_class(**field_kwargs)
@@ -136,14 +134,12 @@ class SettingsRegistry(object):
field_instance.category = category field_instance.category = category
field_instance.depends_on = depends_on field_instance.depends_on = depends_on
field_instance.unit = unit field_instance.unit = unit
field_instance.hidden = hidden
if placeholder is not empty: if placeholder is not empty:
field_instance.placeholder = placeholder field_instance.placeholder = placeholder
field_instance.defined_in_file = defined_in_file field_instance.defined_in_file = defined_in_file
if field_instance.defined_in_file: if field_instance.defined_in_file:
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text) field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
field_instance.encrypted = encrypted field_instance.encrypted = encrypted
field_instance.warning_text = warning_text
original_field_instance = field_instance original_field_instance = field_instance
if field_class != original_field_class: if field_class != original_field_class:
original_field_instance = original_field_class(**field_kwargs) original_field_instance = original_field_class(**field_kwargs)

View File

@@ -1,20 +1,17 @@
# Python # Python
import contextlib import contextlib
import logging import logging
import psycopg
import threading import threading
import time import time
import os import os
from concurrent.futures import ThreadPoolExecutor
# Django # Django
from django.conf import LazySettings from django.conf import LazySettings
from django.conf import settings, UserSettingsHolder from django.conf import settings, UserSettingsHolder
from django.core.cache import cache as django_cache from django.core.cache import cache as django_cache
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation from django.core.exceptions import ImproperlyConfigured
from django.db import transaction, connection from django.db import transaction, connection
from django.db.utils import DatabaseError, ProgrammingError from django.db.utils import Error as DBError, ProgrammingError
from django.utils.functional import cached_property from django.utils.functional import cached_property
# Django REST Framework # Django REST Framework
@@ -81,29 +78,18 @@ def _ctit_db_wrapper(trans_safe=False):
logger.debug('Obtaining database settings in spite of broken transaction.') logger.debug('Obtaining database settings in spite of broken transaction.')
transaction.set_rollback(False) transaction.set_rollback(False)
yield yield
except ProgrammingError as e: except DBError as exc:
# Exception raised for programming errors
# Examples may be table not found or already exists,
# this generally means we can't fetch Tower configuration
# because the database hasn't actually finished migrating yet;
# this is usually a sign that a service in a container (such as ws_broadcast)
# has come up *before* the database has finished migrating, and
# especially that the conf.settings table doesn't exist yet
# syntax error in the SQL statement, wrong number of parameters specified, etc.
if trans_safe: if trans_safe:
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}') level = logger.warning
else: if isinstance(exc, ProgrammingError):
logger.exception('Error modifying something related to database settings.') if 'relation' in str(exc) and 'does not exist' in str(exc):
except DatabaseError as e: # this generally means we can't fetch Tower configuration
if trans_safe: # because the database hasn't actually finished migrating yet;
cause = e.__cause__ # this is usually a sign that a service in a container (such as ws_broadcast)
sqlstate = getattr(cause, 'sqlstate', None) # has come up *before* the database has finished migrating, and
if cause and sqlstate: # especially that the conf.settings table doesn't exist yet
sqlstate = cause.sqlstate level = logger.debug
sqlstate_str = psycopg.errors.lookup(sqlstate) level(f'Database settings are not available, using defaults. error: {str(exc)}')
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
else:
logger.error(f'Error reading something related to database settings: {str(e)}.')
else: else:
logger.exception('Error modifying something related to database settings.') logger.exception('Error modifying something related to database settings.')
finally: finally:
@@ -171,7 +157,7 @@ class EncryptedCacheProxy(object):
obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty) obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty)
if obj_id is empty: if obj_id is empty:
logger.info('Efficiency notice: Corresponding id not stored in cache %s', Setting.get_cache_id_key(key)) logger.info('Efficiency notice: Corresponding id not stored in cache %s', Setting.get_cache_id_key(key))
obj_id = getattr(_get_setting_from_db(self.registry, key), 'pk', None) obj_id = getattr(self._get_setting_from_db(key), 'pk', None)
elif obj_id == SETTING_CACHE_NONE: elif obj_id == SETTING_CACHE_NONE:
obj_id = None obj_id = None
return method(TransientSetting(pk=obj_id, value=value), 'value') return method(TransientSetting(pk=obj_id, value=value), 'value')
@@ -180,6 +166,11 @@ class EncryptedCacheProxy(object):
# a no-op; it just returns the provided value # a no-op; it just returns the provided value
return value return value
def _get_setting_from_db(self, key):
field = self.registry.get_setting_field(key)
if not field.read_only:
return Setting.objects.filter(key=key, user__isnull=True).order_by('pk').first()
def __getattr__(self, name): def __getattr__(self, name):
return getattr(self.cache, name) return getattr(self.cache, name)
@@ -195,22 +186,6 @@ def get_settings_to_cache(registry):
return dict([(key, SETTING_CACHE_NOTSET) for key in get_writeable_settings(registry)]) return dict([(key, SETTING_CACHE_NOTSET) for key in get_writeable_settings(registry)])
# Will first attempt to get the setting from the database in synchronous mode.
# If call from async context, it will attempt to get the setting from the database in a thread.
def _get_setting_from_db(registry, key):
def get_settings_from_db_sync(registry, key):
field = registry.get_setting_field(key)
if not field.read_only or key == 'INSTALL_UUID':
return Setting.objects.filter(key=key, user__isnull=True).order_by('pk').first()
try:
return get_settings_from_db_sync(registry, key)
except SynchronousOnlyOperation:
with ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(get_settings_from_db_sync, registry, key)
return future.result()
def get_cache_value(value): def get_cache_value(value):
"""Returns the proper special cache setting for a value """Returns the proper special cache setting for a value
based on instance type. based on instance type.
@@ -370,7 +345,7 @@ class SettingsWrapper(UserSettingsHolder):
setting_id = None setting_id = None
# this value is read-only, however we *do* want to fetch its value from the database # this value is read-only, however we *do* want to fetch its value from the database
if not field.read_only or name == 'INSTALL_UUID': if not field.read_only or name == 'INSTALL_UUID':
setting = _get_setting_from_db(self.registry, name) setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
if setting: if setting:
if getattr(field, 'encrypted', False): if getattr(field, 'encrypted', False):
value = decrypt_field(setting, 'value') value = decrypt_field(setting, 'value')
@@ -430,10 +405,6 @@ class SettingsWrapper(UserSettingsHolder):
"""Get value while accepting the in-memory cache if key is available""" """Get value while accepting the in-memory cache if key is available"""
with _ctit_db_wrapper(trans_safe=True): with _ctit_db_wrapper(trans_safe=True):
return self._get_local(name) return self._get_local(name)
# If the last line did not return, that means we hit a database error
# in that case, we should not have a local cache value
# thus, return empty as a signal to use the default
return empty
def __getattr__(self, name): def __getattr__(self, name):
value = empty value = empty

View File

@@ -61,3 +61,18 @@ def on_post_delete_setting(sender, **kwargs):
key = getattr(instance, '_saved_key_', None) key = getattr(instance, '_saved_key_', None)
if key: if key:
handle_setting_change(key, True) handle_setting_change(key, True)
@receiver(setting_changed)
def disable_local_auth(**kwargs):
if (kwargs['setting'], kwargs['value']) == ('DISABLE_LOCAL_AUTH', True):
from django.contrib.auth.models import User
from oauth2_provider.models import RefreshToken
from awx.main.models.oauth import OAuth2AccessToken
from awx.main.management.commands.revoke_oauth2_tokens import revoke_tokens
logger.warning("Triggering token invalidation for local users.")
qs = User.objects.filter(profile__ldap_dn='', enterprise_auth__isnull=True, social_auth__isnull=True)
revoke_tokens(RefreshToken.objects.filter(revoked=None, user__in=qs))
revoke_tokens(OAuth2AccessToken.objects.filter(user__in=qs))

View File

@@ -8,6 +8,7 @@ from awx.main.utils.encryption import decrypt_field
from awx.conf import fields from awx.conf import fields
from awx.conf.registry import settings_registry from awx.conf.registry import settings_registry
from awx.conf.models import Setting from awx.conf.models import Setting
from awx.sso import fields as sso_fields
@pytest.fixture @pytest.fixture
@@ -93,7 +94,9 @@ def test_setting_singleton_retrieve_readonly(api_request, dummy_setting):
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_update(api_request, dummy_setting): def test_setting_singleton_update(api_request, dummy_setting):
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch('awx.conf.views.clear_setting_cache'): with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
'awx.conf.views.handle_setting_changes'
):
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 3}) api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 3})
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})) response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 3 assert response.data['FOO_BAR'] == 3
@@ -102,10 +105,28 @@ def test_setting_singleton_update(api_request, dummy_setting):
assert response.data['FOO_BAR'] == 4 assert response.data['FOO_BAR'] == 4
@pytest.mark.django_db
def test_setting_singleton_update_hybriddictfield_with_forbidden(api_request, dummy_setting):
# Some HybridDictField subclasses have a child of _Forbidden,
# indicating that only the defined fields can be filled in. Make
# sure that the _Forbidden validator doesn't get used for the
# fields. See also https://github.com/ansible/awx/issues/4099.
with dummy_setting('FOO_BAR', field_class=sso_fields.SAMLOrgAttrField, category='FooBar', category_slug='foobar'), mock.patch(
'awx.conf.views.handle_setting_changes'
):
api_request(
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}},
)
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting): def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting):
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=4, category='FooBar', category_slug='foobar'), mock.patch( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=4, category='FooBar', category_slug='foobar'), mock.patch(
'awx.conf.views.clear_setting_cache' 'awx.conf.views.handle_setting_changes'
): ):
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 5}) api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 5})
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})) response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
@@ -115,7 +136,7 @@ def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_update_dont_change_encrypted_mark(api_request, dummy_setting): def test_setting_singleton_update_dont_change_encrypted_mark(api_request, dummy_setting):
with dummy_setting('FOO_BAR', field_class=fields.CharField, encrypted=True, category='FooBar', category_slug='foobar'), mock.patch( with dummy_setting('FOO_BAR', field_class=fields.CharField, encrypted=True, category='FooBar', category_slug='foobar'), mock.patch(
'awx.conf.views.clear_setting_cache' 'awx.conf.views.handle_setting_changes'
): ):
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'password'}) api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'password'})
assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$') assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$')
@@ -134,14 +155,16 @@ def test_setting_singleton_update_runs_custom_validate(api_request, dummy_settin
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_validate( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_validate(
'foobar', func_raising_exception 'foobar', func_raising_exception
), mock.patch('awx.conf.views.clear_setting_cache'): ), mock.patch('awx.conf.views.handle_setting_changes'):
response = api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 23}) response = api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 23})
assert response.status_code == 400 assert response.status_code == 400
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_delete(api_request, dummy_setting): def test_setting_singleton_delete(api_request, dummy_setting):
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch('awx.conf.views.clear_setting_cache'): with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
'awx.conf.views.handle_setting_changes'
):
api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})) api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})) response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert not response.data['FOO_BAR'] assert not response.data['FOO_BAR']
@@ -150,7 +173,7 @@ def test_setting_singleton_delete(api_request, dummy_setting):
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting): def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting):
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=23, category='FooBar', category_slug='foobar'), mock.patch( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=23, category='FooBar', category_slug='foobar'), mock.patch(
'awx.conf.views.clear_setting_cache' 'awx.conf.views.handle_setting_changes'
): ):
api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})) api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})) response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))

View File

@@ -0,0 +1,25 @@
import pytest
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
from awx.conf.models import Setting
from django.apps import apps
@pytest.mark.django_db
def test_fill_group_type_params_no_op():
fill_ldap_group_type_params(apps, 'dont-use-me')
assert Setting.objects.count() == 0
@pytest.mark.django_db
def test_keep_old_setting_with_default_value():
Setting.objects.create(key='AUTH_LDAP_GROUP_TYPE', value={'name_attr': 'cn', 'member_attr': 'member'})
fill_ldap_group_type_params(apps, 'dont-use-me')
assert Setting.objects.count() == 1
s = Setting.objects.first()
assert s.value == {'name_attr': 'cn', 'member_attr': 'member'}
# NOTE: would be good to test the removal of attributes by migration
# but this requires fighting with the validator and is not done here

View File

@@ -35,7 +35,7 @@ class TestStringListBooleanField:
field = StringListBooleanField() field = StringListBooleanField()
with pytest.raises(ValidationError) as e: with pytest.raises(ValidationError) as e:
field.to_internal_value(value) field.to_internal_value(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value)) assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES) @pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_representation_valid(self, value_in, value_known): def test_to_representation_valid(self, value_in, value_known):
@@ -48,7 +48,7 @@ class TestStringListBooleanField:
field = StringListBooleanField() field = StringListBooleanField()
with pytest.raises(ValidationError) as e: with pytest.raises(ValidationError) as e:
field.to_representation(value) field.to_representation(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value)) assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
class TestListTuplesField: class TestListTuplesField:
@@ -67,7 +67,7 @@ class TestListTuplesField:
field = ListTuplesField() field = ListTuplesField()
with pytest.raises(ValidationError) as e: with pytest.raises(ValidationError) as e:
field.to_internal_value(value) field.to_internal_value(value)
assert e.value.detail[0] == "Expected a list of tuples of max length 2 but got {} instead.".format(t) assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
class TestStringListPathField: class TestStringListPathField:
@@ -111,6 +111,7 @@ class TestURLField:
@pytest.mark.parametrize( @pytest.mark.parametrize(
"url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error", "url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",
[ [
("ldap://www.example.org42", "ldap", None, True, True),
("https://www.example.org42", "https", None, False, False), ("https://www.example.org42", "https", None, False, False),
("https://www.example.org", None, regex, None, True), ("https://www.example.org", None, regex, None, True),
("https://www.example3.org", None, regex, None, False), ("https://www.example3.org", None, regex, None, False),

View File

@@ -13,7 +13,6 @@ from unittest import mock
from django.conf import LazySettings from django.conf import LazySettings
from django.core.cache.backends.locmem import LocMemCache from django.core.cache.backends.locmem import LocMemCache
from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import ImproperlyConfigured
from django.db.utils import Error as DBError
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
import pytest import pytest
@@ -130,9 +129,9 @@ def test_default_setting(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []}) settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT' assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
@@ -146,9 +145,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []}) settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
def test_empty_setting(settings, mocker): def test_empty_setting(settings, mocker):
@@ -156,10 +155,10 @@ def test_empty_setting(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})}) mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
with pytest.raises(AttributeError): with pytest.raises(AttributeError):
settings.AWX_SOME_SETTING settings.AWX_SOME_SETTING
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
def test_setting_from_db(settings, mocker): def test_setting_from_db(settings, mocker):
@@ -168,9 +167,9 @@ def test_setting_from_db(settings, mocker):
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})}) mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
assert settings.AWX_SOME_SETTING == 'FROM_DB' assert settings.AWX_SOME_SETTING == 'FROM_DB'
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB' assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
@@ -205,8 +204,8 @@ def test_db_setting_update(settings, mocker):
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting}) setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
settings.AWX_SOME_SETTING = 'NEW-VALUE' settings.AWX_SOME_SETTING = 'NEW-VALUE'
assert existing_setting.value == 'NEW-VALUE' assert existing_setting.value == 'NEW-VALUE'
existing_setting.save.assert_called_with(update_fields=['value']) existing_setting.save.assert_called_with(update_fields=['value'])
@@ -217,8 +216,8 @@ def test_db_setting_deletion(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system') settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
del settings.AWX_SOME_SETTING del settings.AWX_SOME_SETTING
assert existing_setting.delete.call_count == 1 assert existing_setting.delete.call_count == 1
@@ -283,10 +282,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
# use its primary key as part of the encryption key # use its primary key as part of the encryption key
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!') setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})}) mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks) with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
cache.set('AWX_ENCRYPTED', 'SECRET!') cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!' assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!' assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
def test_readonly_sensitive_cache_data_is_encrypted(settings): def test_readonly_sensitive_cache_data_is_encrypted(settings):
@@ -332,18 +331,3 @@ def test_in_memory_cache_works(settings):
with mock.patch.object(settings, '_get_local') as mock_get: with mock.patch.object(settings, '_get_local') as mock_get:
assert settings.AWX_VAR == 'DEFAULT' assert settings.AWX_VAR == 'DEFAULT'
mock_get.assert_not_called() mock_get.assert_not_called()
@pytest.mark.defined_in_file(AWX_VAR=[])
def test_getattr_with_database_error(settings):
"""
If a setting is defined via the registry and has a null-ish default which is not None
then referencing that setting during a database outage should give that default
this is regression testing for a bug where it would return None
"""
settings.registry.register('AWX_VAR', field_class=fields.StringListField, default=[], category=_('System'), category_slug='system')
settings._awx_conf_memoizedcache.clear()
with mock.patch('django.db.backends.base.base.BaseDatabaseWrapper.ensure_connection') as mock_ensure:
mock_ensure.side_effect = DBError('for test')
assert settings.AWX_VAR == []

View File

@@ -26,11 +26,10 @@ from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdat
from awx.api.permissions import IsSystemAdminOrAuditor from awx.api.permissions import IsSystemAdminOrAuditor
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main.utils import camelcase_to_underscore from awx.main.utils import camelcase_to_underscore
from awx.main.tasks.system import clear_setting_cache from awx.main.tasks.system import handle_setting_changes
from awx.conf.models import Setting from awx.conf.models import Setting
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
from awx.conf import settings_registry from awx.conf import settings_registry
from awx.main.utils.external_logging import reconfigure_rsyslog
SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'name')) SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'name'))
@@ -119,10 +118,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
setting.save(update_fields=['value']) setting.save(update_fields=['value'])
settings_change_list.append(key) settings_change_list.append(key)
if settings_change_list: if settings_change_list:
connection.on_commit(lambda: clear_setting_cache.delay(settings_change_list)) connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
if any([setting.startswith('LOG_AGGREGATOR') for setting in settings_change_list]):
# call notify to rsyslog. no data is need so payload is empty
reconfigure_rsyslog.delay()
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
instance = self.get_object() instance = self.get_object()
@@ -137,10 +133,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
setting.delete() setting.delete()
settings_change_list.append(setting.key) settings_change_list.append(setting.key)
if settings_change_list: if settings_change_list:
connection.on_commit(lambda: clear_setting_cache.delay(settings_change_list)) connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
if any([setting.startswith('LOG_AGGREGATOR') for setting in settings_change_list]):
# call notify to rsyslog. no data is need so payload is empty
reconfigure_rsyslog.delay()
# When TOWER_URL_BASE is deleted from the API, reset it to the hostname # When TOWER_URL_BASE is deleted from the API, reset it to the hostname
# used to make the request as a default. # used to make the request as a default.

View File

@@ -17,15 +17,14 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
# Django REST Framework # Django REST Framework
from rest_framework.exceptions import ParseError, PermissionDenied from rest_framework.exceptions import ParseError, PermissionDenied
# django-ansible-base # Django OAuth Toolkit
from ansible_base.lib.utils.validation import to_python_boolean from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
from ansible_base.rbac.models import RoleEvaluation
from ansible_base.rbac import permission_registry
# AWX # AWX
from awx.main.utils import ( from awx.main.utils import (
get_object_or_400, get_object_or_400,
get_pk_from_dict, get_pk_from_dict,
to_python_boolean,
get_licenser, get_licenser,
) )
from awx.main.models import ( from awx.main.models import (
@@ -57,7 +56,6 @@ from awx.main.models import (
Project, Project,
ProjectUpdate, ProjectUpdate,
ProjectUpdateEvent, ProjectUpdateEvent,
ReceptorAddress,
Role, Role,
Schedule, Schedule,
SystemJob, SystemJob,
@@ -72,6 +70,8 @@ from awx.main.models import (
WorkflowJobTemplateNode, WorkflowJobTemplateNode,
WorkflowApproval, WorkflowApproval,
WorkflowApprovalTemplate, WorkflowApprovalTemplate,
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
ROLE_SINGLETON_SYSTEM_AUDITOR,
) )
from awx.main.models.mixins import ResourceMixin from awx.main.models.mixins import ResourceMixin
@@ -79,6 +79,7 @@ __all__ = [
'get_user_queryset', 'get_user_queryset',
'check_user_access', 'check_user_access',
'check_user_access_with_errors', 'check_user_access_with_errors',
'user_accessible_objects',
'consumer_access', 'consumer_access',
] ]
@@ -135,6 +136,10 @@ def register_access(model_class, access_class):
access_registry[model_class] = access_class access_registry[model_class] = access_class
def user_accessible_objects(user, role_name):
return ResourceMixin._accessible_objects(User, user, role_name)
def get_user_queryset(user, model_class): def get_user_queryset(user, model_class):
""" """
Return a queryset for the given model_class containing only the instances Return a queryset for the given model_class containing only the instances
@@ -239,10 +244,9 @@ class BaseAccess(object):
return qs return qs
def filtered_queryset(self): def filtered_queryset(self):
if permission_registry.is_registered(self.model): # Override in subclasses
return self.model.access_qs(self.user, 'view') # filter objects according to user's read access
else: return self.model.objects.none()
raise NotImplementedError('Filtered queryset for model is not written')
def can_read(self, obj): def can_read(self, obj):
return bool(obj and self.get_queryset().filter(pk=obj.pk).exists()) return bool(obj and self.get_queryset().filter(pk=obj.pk).exists())
@@ -263,11 +267,7 @@ class BaseAccess(object):
return self.can_change(obj, data) return self.can_change(obj, data)
def can_delete(self, obj): def can_delete(self, obj):
if self.user.is_superuser: return self.user.is_superuser
return True
if obj._meta.model_name in [cls._meta.model_name for cls in permission_registry.all_registered_models]:
return self.user.has_obj_perm(obj, 'delete')
return False
def can_copy(self, obj): def can_copy(self, obj):
return self.can_add({'reference_obj': obj}) return self.can_add({'reference_obj': obj})
@@ -366,9 +366,9 @@ class BaseAccess(object):
report_violation = lambda message: None report_violation = lambda message: None
else: else:
report_violation = lambda message: logger.warning(message) report_violation = lambda message: logger.warning(message)
if validation_info.get('trial', False) is True: if validation_info.get('trial', False) is True or validation_info['instance_count'] == 10: # basic 10 license
def report_violation(message): # noqa def report_violation(message):
raise PermissionDenied(message) raise PermissionDenied(message)
if check_expiration and validation_info.get('time_remaining', None) is None: if check_expiration and validation_info.get('time_remaining', None) is None:
@@ -438,7 +438,10 @@ class BaseAccess(object):
# Actions not possible for reason unrelated to RBAC # Actions not possible for reason unrelated to RBAC
# Cannot copy with validation errors, or update a manual group/project # Cannot copy with validation errors, or update a manual group/project
if display_method in ['copy', 'start', 'schedule'] and isinstance(obj, JobTemplate): if 'write' not in getattr(self.user, 'oauth_scopes', ['write']):
user_capabilities[display_method] = False # Read tokens cannot take any actions
continue
elif display_method in ['copy', 'start', 'schedule'] and isinstance(obj, JobTemplate):
if obj.validation_errors: if obj.validation_errors:
user_capabilities[display_method] = False user_capabilities[display_method] = False
continue continue
@@ -593,7 +596,7 @@ class InstanceGroupAccess(BaseAccess):
- a superuser - a superuser
- admin role on the Instance group - admin role on the Instance group
I can add/delete Instance Groups: I can add/delete Instance Groups:
- a superuser(system administrator), because these are not org-scoped - a superuser(system administrator)
I can use Instance Groups when I have: I can use Instance Groups when I have:
- use_role on the instance group - use_role on the instance group
""" """
@@ -601,6 +604,9 @@ class InstanceGroupAccess(BaseAccess):
model = InstanceGroup model = InstanceGroup
prefetch_related = ('instances',) prefetch_related = ('instances',)
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser @check_superuser
def can_use(self, obj): def can_use(self, obj):
return self.user in obj.use_role return self.user in obj.use_role
@@ -619,7 +625,7 @@ class InstanceGroupAccess(BaseAccess):
def can_delete(self, obj): def can_delete(self, obj):
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
return False return False
return self.user.has_obj_perm(obj, 'delete') return self.user.is_superuser
class UserAccess(BaseAccess): class UserAccess(BaseAccess):
@@ -636,16 +642,18 @@ class UserAccess(BaseAccess):
""" """
model = User model = User
prefetch_related = ('resource',) prefetch_related = ('profile',)
def filtered_queryset(self): def filtered_queryset(self):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()): if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
qs = User.objects.all() qs = User.objects.all()
else: else:
qs = ( qs = (
User.objects.filter(pk__in=Organization.access_qs(self.user, 'view').values('member_role__members')) User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
| User.objects.filter(pk=self.user.id) | User.objects.filter(pk=self.user.id)
| User.objects.filter(is_superuser=True) | User.objects.filter(
pk__in=Role.objects.filter(singleton_name__in=[ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
)
).distinct() ).distinct()
return qs return qs
@@ -660,7 +668,7 @@ class UserAccess(BaseAccess):
return True return True
if not settings.MANAGE_ORGANIZATION_AUTH: if not settings.MANAGE_ORGANIZATION_AUTH:
return False return False
return Organization.access_qs(self.user, 'change').exists() return Organization.accessible_objects(self.user, 'admin_role').exists()
def can_change(self, obj, data): def can_change(self, obj, data):
if data is not None and ('is_superuser' in data or 'is_system_auditor' in data): if data is not None and ('is_superuser' in data or 'is_system_auditor' in data):
@@ -680,7 +688,7 @@ class UserAccess(BaseAccess):
""" """
Returns all organizations that count `u` as a member Returns all organizations that count `u` as a member
""" """
return Organization.access_qs(u, 'member') return Organization.accessible_objects(u, 'member_role')
def is_all_org_admin(self, u): def is_all_org_admin(self, u):
""" """
@@ -703,15 +711,6 @@ class UserAccess(BaseAccess):
if not allow_orphans: if not allow_orphans:
# in these cases only superusers can modify orphan users # in these cases only superusers can modify orphan users
return False return False
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
# Permission granted if the user has all permissions that the target user has
target_perms = set(
RoleEvaluation.objects.filter(role__in=obj.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
)
user_perms = set(
RoleEvaluation.objects.filter(role__in=self.user.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
)
return not (target_perms - user_perms)
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists() return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
else: else:
return self.is_all_org_admin(obj) return self.is_all_org_admin(obj)
@@ -747,6 +746,82 @@ class UserAccess(BaseAccess):
return False return False
class OAuth2ApplicationAccess(BaseAccess):
"""
I can read, change or delete OAuth 2 applications when:
- I am a superuser.
- I am the admin of the organization of the user of the application.
- I am a user in the organization of the application.
I can create OAuth 2 applications when:
- I am a superuser.
- I am the admin of the organization of the application.
"""
model = OAuth2Application
select_related = ('user',)
prefetch_related = ('organization', 'oauth2accesstoken_set')
def filtered_queryset(self):
org_access_qs = Organization.accessible_objects(self.user, 'member_role')
return self.model.objects.filter(organization__in=org_access_qs)
def can_change(self, obj, data):
return self.user.is_superuser or self.check_related('organization', Organization, data, obj=obj, role_field='admin_role', mandatory=True)
def can_delete(self, obj):
return self.user.is_superuser or obj.organization in self.user.admin_of_organizations
def can_add(self, data):
if self.user.is_superuser:
return True
if not data:
return Organization.accessible_objects(self.user, 'admin_role').exists()
return self.check_related('organization', Organization, data, role_field='admin_role', mandatory=True)
class OAuth2TokenAccess(BaseAccess):
"""
I can read, change or delete an app token when:
- I am a superuser.
- I am the admin of the organization of the application of the token.
- I am the user of the token.
I can create an OAuth2 app token when:
- I have the read permission of the related application.
I can read, change or delete a personal token when:
- I am the user of the token
- I am the superuser
I can create an OAuth2 Personal Access Token when:
- I am a user. But I can only create a PAT for myself.
"""
model = OAuth2AccessToken
select_related = ('user', 'application')
prefetch_related = ('refresh_token',)
def filtered_queryset(self):
org_access_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
return self.model.objects.filter(application__organization__in=org_access_qs) | self.model.objects.filter(user__id=self.user.pk)
def can_delete(self, obj):
if (self.user.is_superuser) | (obj.user == self.user):
return True
elif not obj.application:
return False
return self.user in obj.application.organization.admin_role
def can_change(self, obj, data):
return self.can_delete(obj)
def can_add(self, data):
if 'application' in data:
app = get_object_from_data('application', OAuth2Application, data)
if app is None:
return True
return OAuth2ApplicationAccess(self.user).can_read(app)
return True
class OrganizationAccess(NotificationAttachMixin, BaseAccess): class OrganizationAccess(NotificationAttachMixin, BaseAccess):
""" """
I can see organizations when: I can see organizations when:
@@ -763,11 +838,13 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
prefetch_related = ( prefetch_related = (
'created_by', 'created_by',
'modified_by', 'modified_by',
'resource', # dab_resource_registry
) )
# organization admin_role is not a parent of organization auditor_role # organization admin_role is not a parent of organization auditor_role
notification_attach_roles = ['admin_role', 'auditor_role'] notification_attach_roles = ['admin_role', 'auditor_role']
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser @check_superuser
def can_change(self, obj, data): def can_change(self, obj, data):
if data and data.get('default_environment'): if data and data.get('default_environment'):
@@ -835,6 +912,9 @@ class InventoryAccess(BaseAccess):
Prefetch('labels', queryset=Label.objects.all().order_by('name')), Prefetch('labels', queryset=Label.objects.all().order_by('name')),
) )
def filtered_queryset(self, allowed=None, ad_hoc=None):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser @check_superuser
def can_use(self, obj): def can_use(self, obj):
return self.user in obj.use_role return self.user in obj.use_role
@@ -843,7 +923,7 @@ class InventoryAccess(BaseAccess):
def can_add(self, data): def can_add(self, data):
# If no data is specified, just checking for generic add permission? # If no data is specified, just checking for generic add permission?
if not data: if not data:
return Organization.access_qs(self.user, 'add_inventory').exists() return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
return self.check_related('organization', Organization, data, role_field='inventory_admin_role') return self.check_related('organization', Organization, data, role_field='inventory_admin_role')
@check_superuser @check_superuser
@@ -868,6 +948,9 @@ class InventoryAccess(BaseAccess):
def can_update(self, obj): def can_update(self, obj):
return self.user in obj.update_role return self.user in obj.update_role
def can_delete(self, obj):
return self.can_admin(obj, None)
def can_run_ad_hoc_commands(self, obj): def can_run_ad_hoc_commands(self, obj):
return self.user in obj.adhoc_role return self.user in obj.adhoc_role
@@ -905,7 +988,7 @@ class HostAccess(BaseAccess):
def can_add(self, data): def can_add(self, data):
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Inventory.access_qs(self.user, 'change').exists() return Inventory.accessible_objects(self.user, 'admin_role').exists()
# Checks for admin or change permission on inventory. # Checks for admin or change permission on inventory.
if not self.check_related('inventory', Inventory, data): if not self.check_related('inventory', Inventory, data):
@@ -967,7 +1050,7 @@ class GroupAccess(BaseAccess):
def can_add(self, data): def can_add(self, data):
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Inventory.access_qs(self.user, 'change').exists() return Inventory.accessible_objects(self.user, 'admin_role').exists()
if 'inventory' not in data: if 'inventory' not in data:
return False return False
# Checks for admin or change permission on inventory. # Checks for admin or change permission on inventory.
@@ -1009,7 +1092,7 @@ class InventorySourceAccess(NotificationAttachMixin, UnifiedCredentialsMixin, Ba
def can_add(self, data): def can_add(self, data):
if not data or 'inventory' not in data: if not data or 'inventory' not in data:
return Inventory.access_qs(self.user, 'change').exists() return Inventory.accessible_objects(self.user, 'admin_role').exists()
if not self.check_related('source_project', Project, data, role_field='use_role'): if not self.check_related('source_project', Project, data, role_field='use_role'):
return False return False
@@ -1123,6 +1206,9 @@ class CredentialAccess(BaseAccess):
) )
prefetch_related = ('admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members', 'credential_type', 'organization') prefetch_related = ('admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members', 'credential_type', 'organization')
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
if not data: # So the browseable API will work if not data: # So the browseable API will work
@@ -1220,7 +1306,6 @@ class TeamAccess(BaseAccess):
'created_by', 'created_by',
'modified_by', 'modified_by',
'organization', 'organization',
'resource', # dab_resource_registry
) )
def filtered_queryset(self): def filtered_queryset(self):
@@ -1233,7 +1318,7 @@ class TeamAccess(BaseAccess):
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Organization.access_qs(self.user, 'view').exists() return Organization.accessible_objects(self.user, 'admin_role').exists()
if not settings.MANAGE_ORGANIZATION_AUTH: if not settings.MANAGE_ORGANIZATION_AUTH:
return False return False
return self.check_related('organization', Organization, data) return self.check_related('organization', Organization, data)
@@ -1291,11 +1376,12 @@ class TeamAccess(BaseAccess):
class ExecutionEnvironmentAccess(BaseAccess): class ExecutionEnvironmentAccess(BaseAccess):
""" """
I can see an execution environment when: I can see an execution environment when:
- I can see its organization - I'm a superuser
- It is a global ExecutionEnvironment - I'm a member of the same organization
- it is a global ExecutionEnvironment
I can create/change an execution environment when: I can create/change an execution environment when:
- I'm a superuser - I'm a superuser
- I have an organization or object role that gives access - I'm an admin for the organization(s)
""" """
model = ExecutionEnvironment model = ExecutionEnvironment
@@ -1304,34 +1390,26 @@ class ExecutionEnvironmentAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
return ExecutionEnvironment.objects.filter( return ExecutionEnvironment.objects.filter(
Q(organization__in=Organization.access_ids_qs(self.user, 'view')) Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
| Q(organization__isnull=True)
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
).distinct() ).distinct()
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Organization.access_qs(self.user, 'add_executionenvironment').exists() return Organization.accessible_objects(self.user, 'execution_environment_admin_role').exists()
return self.check_related('organization', Organization, data, mandatory=True, role_field='execution_environment_admin_role') return self.check_related('organization', Organization, data, mandatory=True, role_field='execution_environment_admin_role')
@check_superuser @check_superuser
def can_change(self, obj, data): def can_change(self, obj, data):
if obj and obj.organization_id is None: if obj and obj.organization_id is None:
raise PermissionDenied raise PermissionDenied
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED: if self.user not in obj.organization.execution_environment_admin_role:
if not self.user.has_obj_perm(obj, 'change'): raise PermissionDenied
if data and 'organization' in data:
new_org = get_object_from_data('organization', Organization, data, obj=obj)
if not new_org or self.user not in new_org.execution_environment_admin_role:
return False return False
else: return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
if self.user not in obj.organization.execution_environment_admin_role:
raise PermissionDenied
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
return False
# Special case that check_related does not catch, org users can not remove the organization from the EE
if data and ('organization' in data or 'organization_id' in data):
if (not data.get('organization')) and (not data.get('organization_id')):
return False
return True
def can_delete(self, obj): def can_delete(self, obj):
if obj.managed: if obj.managed:
@@ -1361,10 +1439,13 @@ class ProjectAccess(NotificationAttachMixin, BaseAccess):
prefetch_related = ('modified_by', 'created_by', 'organization', 'last_job', 'current_job') prefetch_related = ('modified_by', 'created_by', 'organization', 'last_job', 'current_job')
notification_attach_roles = ['admin_role'] notification_attach_roles = ['admin_role']
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Organization.access_qs(self.user, 'add_project').exists() return Organization.accessible_objects(self.user, 'project_admin_role').exists()
if data.get('default_environment'): if data.get('default_environment'):
ee = get_object_from_data('default_environment', ExecutionEnvironment, data) ee = get_object_from_data('default_environment', ExecutionEnvironment, data)
@@ -1460,6 +1541,9 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
Prefetch('last_job', queryset=UnifiedJob.objects.non_polymorphic()), Prefetch('last_job', queryset=UnifiedJob.objects.non_polymorphic()),
) )
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
def can_add(self, data): def can_add(self, data):
""" """
a user can create a job template if a user can create a job template if
@@ -1472,7 +1556,7 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
Users who are able to create deploy jobs can also run normal and check (dry run) jobs. Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
""" """
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Project.access_qs(self.user, 'use_project').exists() return Project.accessible_objects(self.user, 'use_role').exists()
# if reference_obj is provided, determine if it can be copied # if reference_obj is provided, determine if it can be copied
reference_obj = data.get('reference_obj', None) reference_obj = data.get('reference_obj', None)
@@ -1497,8 +1581,6 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
inventory = get_value(Inventory, 'inventory') inventory = get_value(Inventory, 'inventory')
if inventory: if inventory:
if self.user not in inventory.use_role: if self.user not in inventory.use_role:
if self.save_messages:
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
return False return False
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'): if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
@@ -1507,16 +1589,11 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
project = get_value(Project, 'project') project = get_value(Project, 'project')
# If the user has admin access to the project (as an org admin), should # If the user has admin access to the project (as an org admin), should
# be able to proceed without additional checks. # be able to proceed without additional checks.
if not project: if project:
return self.user in project.use_role
else:
return False return False
if self.user not in project.use_role:
if self.save_messages:
self.messages['project'] = [_('You do not have use permission on Project')]
return False
return True
@check_superuser @check_superuser
def can_copy_related(self, obj): def can_copy_related(self, obj):
""" """
@@ -1663,13 +1740,13 @@ class JobAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
qs = self.model.objects qs = self.model.objects
qs_jt = qs.filter(job_template__in=JobTemplate.access_qs(self.user, 'view')) qs_jt = qs.filter(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role'))
org_access_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user)) org_access_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
if not org_access_qs.exists(): if not org_access_qs.exists():
return qs_jt return qs_jt
return qs.filter(Q(job_template__in=JobTemplate.access_qs(self.user, 'view')) | Q(organization__in=org_access_qs)).distinct() return qs.filter(Q(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')) | Q(organization__in=org_access_qs)).distinct()
def can_add(self, data, validate_license=True): def can_add(self, data, validate_license=True):
raise NotImplementedError('Direct job creation not possible in v2 API') raise NotImplementedError('Direct job creation not possible in v2 API')
@@ -1758,11 +1835,6 @@ class SystemJobTemplateAccess(BaseAccess):
model = SystemJobTemplate model = SystemJobTemplate
def filtered_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
return self.model.objects.all()
return self.model.objects.none()
@check_superuser @check_superuser
def can_start(self, obj, validate_license=True): def can_start(self, obj, validate_license=True):
'''Only a superuser can start a job from a SystemJobTemplate''' '''Only a superuser can start a job from a SystemJobTemplate'''
@@ -1776,11 +1848,6 @@ class SystemJobAccess(BaseAccess):
model = SystemJob model = SystemJob
def filtered_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
return self.model.objects.all()
return self.model.objects.none()
def can_start(self, obj, validate_license=True): def can_start(self, obj, validate_license=True):
return False # no relaunching of system jobs return False # no relaunching of system jobs
@@ -1880,7 +1947,7 @@ class WorkflowJobTemplateNodeAccess(UnifiedCredentialsMixin, BaseAccess):
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'workflow_job_template') prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'workflow_job_template')
def filtered_queryset(self): def filtered_queryset(self):
return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.access_qs(self.user, 'view')) return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.accessible_objects(self.user, 'read_role'))
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
@@ -1995,6 +2062,9 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
'read_role', 'read_role',
) )
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
""" """
@@ -2005,25 +2075,13 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
Users who are able to create deploy jobs can also run normal and check (dry run) jobs. Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
""" """
if not data: # So the browseable API will work if not data: # So the browseable API will work
return Organization.access_qs(self.user, 'add_workflowjobtemplate').exists() return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True): return bool(
if data.get('organization', None) is None: self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
if self.save_messages: and self.check_related('inventory', Inventory, data, role_field='use_role')
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')] and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
return False )
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
if self.save_messages:
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
return False
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
if self.save_messages:
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
return False
return True
def can_copy(self, obj): def can_copy(self, obj):
if self.save_messages: if self.save_messages:
@@ -2098,7 +2156,7 @@ class WorkflowJobAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
return WorkflowJob.objects.filter( return WorkflowJob.objects.filter(
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')) Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role')) | Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
) )
def can_read(self, obj): def can_read(self, obj):
@@ -2176,7 +2234,7 @@ class WorkflowJobAccess(BaseAccess):
if not node_access.can_add({'reference_obj': node}): if not node_access.can_add({'reference_obj': node}):
wj_add_perm = False wj_add_perm = False
if not wj_add_perm and self.save_messages: if not wj_add_perm and self.save_messages:
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job resources required for relaunch.') self.messages['workflow_job_template'] = _('You do not have permission to the workflow job ' 'resources required for relaunch.')
return wj_add_perm return wj_add_perm
def can_cancel(self, obj): def can_cancel(self, obj):
@@ -2376,29 +2434,6 @@ class InventoryUpdateEventAccess(BaseAccess):
return False return False
class ReceptorAddressAccess(BaseAccess):
"""
I can see receptor address records whenever I can access the instance
"""
model = ReceptorAddress
def filtered_queryset(self):
return self.model.objects.filter(Q(instance__in=Instance.accessible_pk_qs(self.user, 'read_role')))
@check_superuser
def can_add(self, data):
return False
@check_superuser
def can_change(self, obj, data):
return False
@check_superuser
def can_delete(self, obj):
return False
class SystemJobEventAccess(BaseAccess): class SystemJobEventAccess(BaseAccess):
""" """
I can only see manage System Jobs events if I'm a super user I can only see manage System Jobs events if I'm a super user
@@ -2496,11 +2531,12 @@ class UnifiedJobAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
inv_pk_qs = Inventory._accessible_pk_qs(Inventory, self.user, 'read_role') inv_pk_qs = Inventory._accessible_pk_qs(Inventory, self.user, 'read_role')
org_auditor_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
qs = self.model.objects.filter( qs = self.model.objects.filter(
Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')) Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs) | Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs)
| Q(adhoccommand__inventory__id__in=inv_pk_qs) | Q(adhoccommand__inventory__id__in=inv_pk_qs)
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role')) | Q(organization__in=org_auditor_qs)
) )
return qs return qs
@@ -2531,8 +2567,6 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
if not JobLaunchConfigAccess(self.user).can_add(data): if not JobLaunchConfigAccess(self.user).can_add(data):
return False return False
if not data: if not data:
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return self.user.has_roles.filter(permission_partials__codename__in=['execute_jobtemplate', 'update_project', 'update_inventory']).exists()
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists() return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True) return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
@@ -2554,28 +2588,29 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
class NotificationTemplateAccess(BaseAccess): class NotificationTemplateAccess(BaseAccess):
""" """
Run standard logic from DAB RBAC I can see/use a notification_template if I have permission to
""" """
model = NotificationTemplate model = NotificationTemplate
prefetch_related = ('created_by', 'modified_by', 'organization') prefetch_related = ('created_by', 'modified_by', 'organization')
def filtered_queryset(self): def filtered_queryset(self):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return self.model.access_qs(self.user, 'view')
return self.model.objects.filter( return self.model.objects.filter(
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=self.user.auditor_of_organizations) Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
).distinct() ).distinct()
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
if not data: if not data:
return Organization.access_qs(self.user, 'add_notificationtemplate').exists() return Organization.accessible_objects(self.user, 'notification_admin_role').exists()
return self.check_related('organization', Organization, data, role_field='notification_admin_role', mandatory=True) return self.check_related('organization', Organization, data, role_field='notification_admin_role', mandatory=True)
@check_superuser @check_superuser
def can_change(self, obj, data): def can_change(self, obj, data):
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role') if obj.organization is None:
# only superusers are allowed to edit orphan notification templates
return False
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
def can_admin(self, obj, data): def can_admin(self, obj, data):
return self.can_change(obj, data) return self.can_change(obj, data)
@@ -2585,7 +2620,9 @@ class NotificationTemplateAccess(BaseAccess):
@check_superuser @check_superuser
def can_start(self, obj, validate_license=True): def can_start(self, obj, validate_license=True):
return self.can_change(obj, None) if obj.organization is None:
return False
return self.user in obj.organization.notification_admin_role
class NotificationAccess(BaseAccess): class NotificationAccess(BaseAccess):
@@ -2598,7 +2635,7 @@ class NotificationAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
return self.model.objects.filter( return self.model.objects.filter(
Q(notification_template__organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) Q(notification_template__organization__in=Organization.accessible_objects(self.user, 'notification_admin_role'))
| Q(notification_template__organization__in=self.user.auditor_of_organizations) | Q(notification_template__organization__in=self.user.auditor_of_organizations)
).distinct() ).distinct()
@@ -2658,6 +2695,8 @@ class ActivityStreamAccess(BaseAccess):
'credential_type', 'credential_type',
'team', 'team',
'ad_hoc_command', 'ad_hoc_command',
'o_auth2_application',
'o_auth2_access_token',
'notification_template', 'notification_template',
'notification', 'notification',
'label', 'label',
@@ -2712,7 +2751,11 @@ class ActivityStreamAccess(BaseAccess):
if credential_set: if credential_set:
q |= Q(credential__in=credential_set) q |= Q(credential__in=credential_set)
auditing_orgs = (Organization.access_qs(self.user, 'change') | Organization.access_qs(self.user, 'audit')).distinct().values_list('id', flat=True) auditing_orgs = (
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
.distinct()
.values_list('id', flat=True)
)
if auditing_orgs: if auditing_orgs:
q |= ( q |= (
Q(user__in=auditing_orgs.values('member_role__members')) Q(user__in=auditing_orgs.values('member_role__members'))
@@ -2720,7 +2763,7 @@ class ActivityStreamAccess(BaseAccess):
| Q(notification_template__organization__in=auditing_orgs) | Q(notification_template__organization__in=auditing_orgs)
| Q(notification__notification_template__organization__in=auditing_orgs) | Q(notification__notification_template__organization__in=auditing_orgs)
| Q(label__organization__in=auditing_orgs) | Q(label__organization__in=auditing_orgs)
| Q(role__in=Role.visible_roles(self.user) if auditing_orgs else []) | Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
) )
project_set = Project.accessible_pk_qs(self.user, 'read_role') project_set = Project.accessible_pk_qs(self.user, 'read_role')
@@ -2743,6 +2786,14 @@ class ActivityStreamAccess(BaseAccess):
if team_set: if team_set:
q |= Q(team__in=team_set) q |= Q(team__in=team_set)
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
if app_set:
q |= Q(o_auth2_application__in=app_set)
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
if token_set:
q |= Q(o_auth2_access_token__in=token_set)
return qs.filter(q).distinct() return qs.filter(q).distinct()
def can_add(self, data): def can_add(self, data):
@@ -2769,10 +2820,13 @@ class RoleAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
result = Role.visible_roles(self.user) result = Role.visible_roles(self.user)
# Make system admin/auditor mandatorily visible. # Sanity check: is the requesting user an orphaned non-admin/auditor?
mandatories = ('system_administrator', 'system_auditor') # if yes, make system admin/auditor mandatorily visible.
super_qs = Role.objects.filter(singleton_name__in=mandatories) if not self.user.is_superuser and not self.user.is_system_auditor and not self.user.organizations.exists():
return result | super_qs mandatories = ('system_administrator', 'system_auditor')
super_qs = Role.objects.filter(singleton_name__in=mandatories)
result = result | super_qs
return result
def can_add(self, obj, data): def can_add(self, obj, data):
# Unsupported for now # Unsupported for now
@@ -2898,19 +2952,3 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
for cls in BaseAccess.__subclasses__(): for cls in BaseAccess.__subclasses__():
access_registry[cls.model] = cls access_registry[cls.model] = cls
access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess
def optimize_queryset(queryset):
"""
A utility method in case you already have a queryset and just want to
apply the standard optimizations for that model.
In other words, use if you do not want to start from filtered_queryset for some reason.
"""
if not queryset.model or queryset.model not in access_registry:
return queryset
access_class = access_registry[queryset.model]
if access_class.select_related:
queryset = queryset.select_related(*access_class.select_related)
if access_class.prefetch_related:
queryset = queryset.prefetch_related(*access_class.prefetch_related)
return queryset

View File

@@ -2,14 +2,13 @@
import logging import logging
# AWX # AWX
from awx.main.analytics.subsystem_metrics import DispatcherMetrics, CallbackReceiverMetrics from awx.main.analytics.subsystem_metrics import Metrics
from awx.main.dispatch.publish import task from awx.main.dispatch.publish import task
from awx.main.dispatch import get_task_queuename from awx.main.dispatch import get_local_queuename
logger = logging.getLogger('awx.main.scheduler') logger = logging.getLogger('awx.main.scheduler')
@task(queue=get_task_queuename) @task(queue=get_local_queuename)
def send_subsystem_metrics(): def send_subsystem_metrics():
DispatcherMetrics().send_metrics() Metrics().send_metrics()
CallbackReceiverMetrics().send_metrics()

View File

@@ -65,14 +65,16 @@ class FixedSlidingWindow:
return sum(self.buckets.values()) or 0 return sum(self.buckets.values()) or 0
class RelayWebsocketStatsManager: class BroadcastWebsocketStatsManager:
def __init__(self, local_hostname): def __init__(self, event_loop, local_hostname):
self._local_hostname = local_hostname self._local_hostname = local_hostname
self._event_loop = event_loop
self._stats = dict() self._stats = dict()
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
def new_remote_host_stats(self, remote_hostname): def new_remote_host_stats(self, remote_hostname):
self._stats[remote_hostname] = RelayWebsocketStats(self._local_hostname, remote_hostname) self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname, remote_hostname)
return self._stats[remote_hostname] return self._stats[remote_hostname]
def delete_remote_host_stats(self, remote_hostname): def delete_remote_host_stats(self, remote_hostname):
@@ -92,10 +94,7 @@ class RelayWebsocketStatsManager:
self.start() self.start()
def start(self): def start(self):
self.async_task = asyncio.get_running_loop().create_task( self.async_task = self._event_loop.create_task(self.run_loop())
self.run_loop(),
name='RelayWebsocketStatsManager.run_loop',
)
return self.async_task return self.async_task
@classmethod @classmethod
@@ -108,7 +107,7 @@ class RelayWebsocketStatsManager:
return parser.text_string_to_metric_families(stats_str.decode('UTF-8')) return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
class RelayWebsocketStats: class BroadcastWebsocketStats:
def __init__(self, local_hostname, remote_hostname): def __init__(self, local_hostname, remote_hostname):
self._local_hostname = local_hostname self._local_hostname = local_hostname
self._remote_hostname = remote_hostname self._remote_hostname = remote_hostname

View File

@@ -6,7 +6,7 @@ import platform
import distro import distro
from django.db import connection from django.db import connection
from django.db.models import Count, Min from django.db.models import Count
from django.conf import settings from django.conf import settings
from django.contrib.sessions.models import Session from django.contrib.sessions.models import Session
from django.utils.timezone import now, timedelta from django.utils.timezone import now, timedelta
@@ -35,7 +35,7 @@ data _since_ the last report date - i.e., new data in the last 24 hours)
""" """
def trivial_slicing(key, since, until, last_gather, **kwargs): def trivial_slicing(key, since, until, last_gather):
if since is not None: if since is not None:
return [(since, until)] return [(since, until)]
@@ -48,7 +48,7 @@ def trivial_slicing(key, since, until, last_gather, **kwargs):
return [(last_entry, until)] return [(last_entry, until)]
def four_hour_slicing(key, since, until, last_gather, **kwargs): def four_hour_slicing(key, since, until, last_gather):
if since is not None: if since is not None:
last_entry = since last_entry = since
else: else:
@@ -69,54 +69,6 @@ def four_hour_slicing(key, since, until, last_gather, **kwargs):
start = end start = end
def host_metric_slicing(key, since, until, last_gather, **kwargs):
"""
Slicing doesn't start 4 weeks ago, but sends whole table monthly or first time
"""
from awx.main.models.inventory import HostMetric
if since is not None:
return [(since, until)]
from awx.conf.models import Setting
# Check if full sync should be done
full_sync_enabled = kwargs.get('full_sync_enabled', False)
last_entry = None
if not full_sync_enabled:
#
# If not, try incremental sync first
#
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
last_entry = last_entries.get(key)
if not last_entry:
#
# If not done before, switch to full sync
#
full_sync_enabled = True
if full_sync_enabled:
#
# Find the lowest date for full sync
#
min_dates = HostMetric.objects.aggregate(min_last_automation=Min('last_automation'), min_last_deleted=Min('last_deleted'))
if min_dates['min_last_automation'] and min_dates['min_last_deleted']:
last_entry = min(min_dates['min_last_automation'], min_dates['min_last_deleted'])
elif min_dates['min_last_automation'] or min_dates['min_last_deleted']:
last_entry = min_dates['min_last_automation'] or min_dates['min_last_deleted']
if not last_entry:
# empty table
return []
start, end = last_entry, None
while start < until:
end = min(start + timedelta(days=30), until)
yield (start, end)
start = end
def _identify_lower(key, since, until, last_gather): def _identify_lower(key, since, until, last_gather):
from awx.conf.models import Setting from awx.conf.models import Setting
@@ -131,7 +83,7 @@ def _identify_lower(key, since, until, last_gather):
return lower, last_entries return lower, last_entries
@register('config', '1.6', description=_('General platform configuration.')) @register('config', '1.4', description=_('General platform configuration.'))
def config(since, **kwargs): def config(since, **kwargs):
license_info = get_license() license_info = get_license()
install_type = 'traditional' install_type = 'traditional'
@@ -155,13 +107,10 @@ def config(since, **kwargs):
'subscription_name': license_info.get('subscription_name'), 'subscription_name': license_info.get('subscription_name'),
'sku': license_info.get('sku'), 'sku': license_info.get('sku'),
'support_level': license_info.get('support_level'), 'support_level': license_info.get('support_level'),
'usage': license_info.get('usage'),
'product_name': license_info.get('product_name'), 'product_name': license_info.get('product_name'),
'valid_key': license_info.get('valid_key'), 'valid_key': license_info.get('valid_key'),
'satellite': license_info.get('satellite'), 'satellite': license_info.get('satellite'),
'pool_id': license_info.get('pool_id'), 'pool_id': license_info.get('pool_id'),
'subscription_id': license_info.get('subscription_id'),
'account_number': license_info.get('account_number'),
'current_instances': license_info.get('current_instances'), 'current_instances': license_info.get('current_instances'),
'automated_instances': license_info.get('automated_instances'), 'automated_instances': license_info.get('automated_instances'),
'automated_since': license_info.get('automated_since'), 'automated_since': license_info.get('automated_since'),
@@ -170,7 +119,6 @@ def config(since, **kwargs):
'compliant': license_info.get('compliant'), 'compliant': license_info.get('compliant'),
'date_warning': license_info.get('date_warning'), 'date_warning': license_info.get('date_warning'),
'date_expired': license_info.get('date_expired'), 'date_expired': license_info.get('date_expired'),
'subscription_usage_model': getattr(settings, 'SUBSCRIPTION_USAGE_MODEL', ''), # 1.5+
'free_instances': license_info.get('free_instances', 0), 'free_instances': license_info.get('free_instances', 0),
'total_licensed_instances': license_info.get('instance_count', 0), 'total_licensed_instances': license_info.get('instance_count', 0),
'license_expiry': license_info.get('time_remaining', 0), 'license_expiry': license_info.get('time_remaining', 0),
@@ -399,10 +347,7 @@ def _copy_table(table, query, path):
file_path = os.path.join(path, table + '_table.csv') file_path = os.path.join(path, table + '_table.csv')
file = FileSplitter(filespec=file_path) file = FileSplitter(filespec=file_path)
with connection.cursor() as cursor: with connection.cursor() as cursor:
with cursor.copy(query) as copy: cursor.copy_expert(query, file)
while data := copy.read():
byte_data = bytes(data)
file.write(byte_data.decode())
return file.file_list() return file.file_list()
@@ -419,7 +364,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
resolved_action, resolved_action,
resolved_role, resolved_role,
-- '-' operator listed here: -- '-' operator listed here:
-- https://www.postgresql.org/docs/15/functions-json.html -- https://www.postgresql.org/docs/12/functions-json.html
-- note that operator is only supported by jsonb objects -- note that operator is only supported by jsonb objects
-- https://www.postgresql.org/docs/current/datatype-json.html -- https://www.postgresql.org/docs/current/datatype-json.html
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats, (CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
@@ -444,6 +389,11 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
return _copy_table(table='events', query=query(fr"replace({tbl}.event_data, '\u', '\u005cu')::jsonb"), path=full_path) return _copy_table(table='events', query=query(fr"replace({tbl}.event_data, '\u', '\u005cu')::jsonb"), path=full_path)
@register('events_table', '1.5', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
def events_table_unpartitioned(since, full_path, until, **kwargs):
return _events_table(since, full_path, until, '_unpartitioned_main_jobevent', 'created', **kwargs)
@register('events_table', '1.5', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) @register('events_table', '1.5', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
def events_table_partitioned_modified(since, full_path, until, **kwargs): def events_table_partitioned_modified(since, full_path, until, **kwargs):
return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs) return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs)
@@ -586,42 +536,3 @@ def workflow_job_template_node_table(since, full_path, **kwargs):
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id ) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER''' ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path) return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)
@register(
'host_metric_table', '1.0', format='csv', description=_('Host Metric data, incremental/full sync'), expensive=host_metric_slicing, full_sync_interval=30
)
def host_metric_table(since, full_path, until, **kwargs):
host_metric_query = '''COPY (SELECT main_hostmetric.id,
main_hostmetric.hostname,
main_hostmetric.first_automation,
main_hostmetric.last_automation,
main_hostmetric.last_deleted,
main_hostmetric.deleted,
main_hostmetric.automated_counter,
main_hostmetric.deleted_counter,
main_hostmetric.used_in_inventories
FROM main_hostmetric
WHERE (main_hostmetric.last_automation > '{}' AND main_hostmetric.last_automation <= '{}') OR
(main_hostmetric.last_deleted > '{}' AND main_hostmetric.last_deleted <= '{}')
ORDER BY main_hostmetric.id ASC) TO STDOUT WITH CSV HEADER'''.format(
since.isoformat(), until.isoformat(), since.isoformat(), until.isoformat()
)
return _copy_table(table='host_metric', query=host_metric_query, path=full_path)
@register('host_metric_summary_monthly_table', '1.0', format='csv', description=_('HostMetricSummaryMonthly export, full sync'), expensive=trivial_slicing)
def host_metric_summary_monthly_table(since, full_path, **kwargs):
query = '''
COPY (SELECT main_hostmetricsummarymonthly.id,
main_hostmetricsummarymonthly.date,
main_hostmetricsummarymonthly.license_capacity,
main_hostmetricsummarymonthly.license_consumed,
main_hostmetricsummarymonthly.hosts_added,
main_hostmetricsummarymonthly.hosts_deleted,
main_hostmetricsummarymonthly.indirectly_managed_hosts
FROM main_hostmetricsummarymonthly
ORDER BY main_hostmetricsummarymonthly.id ASC) TO STDOUT WITH CSV HEADER
'''
return _copy_table(table='host_metric_summary_monthly', query=query, path=full_path)

Some files were not shown because too many files have changed in this diff Show More