Compare commits

..

3 Commits

Author SHA1 Message Date
Elijah DeLee
d65ab1c5ac add serializer stuff so shows up in DRF 2025-03-18 09:14:55 -04:00
Elijah DeLee
a2decc7c60 fix lint 2025-03-18 09:14:55 -04:00
Elijah DeLee
97d03e434e Add concept of priority to job templates and jobs
This adds concept of priority to jobs.
The task manager now orders on priority, then created.
All rules around instance group capacity etc still apply. So even if a
job has very high priority, if there is not available capacity in the
available instance groups, it will not be scheduled.

Higher number is higher priority.
Default priority is 0.

For dependencies spawned from other jobs, assign them the priority of
the job that caused them to be created.

Still need to add prompt on launch stuff for priority to be consistent.
2025-03-18 09:14:55 -04:00
490 changed files with 6189 additions and 14858 deletions

View File

@@ -2,7 +2,7 @@
codecov:
notify:
after_n_builds: 9 # Number of test matrix+lint jobs uploading coverage
after_n_builds: 6 # Number of test matrix+lint jobs uploading coverage
wait_for_ci: false
require_ci_to_pass: false

View File

@@ -17,23 +17,6 @@ exclude_also =
[run]
branch = True
# NOTE: `disable_warnings` is needed when `pytest-cov` runs in tandem
# NOTE: with `pytest-xdist`. These warnings are false negative in this
# NOTE: context.
#
# NOTE: It's `coveragepy` that emits the warnings and previously they
# NOTE: wouldn't get on the radar of `pytest`'s `filterwarnings`
# NOTE: mechanism. This changed, however, with `pytest >= 8.4`. And
# NOTE: since we set `filterwarnings = error`, those warnings are being
# NOTE: raised as exceptions, cascading into `pytest`'s internals and
# NOTE: causing tracebacks and crashes of the test sessions.
#
# Ref:
# * https://github.com/pytest-dev/pytest-cov/issues/693
# * https://github.com/pytest-dev/pytest-cov/pull/695
# * https://github.com/pytest-dev/pytest-cov/pull/696
disable_warnings =
module-not-measured
omit =
awx/main/migrations/*
awx/settings/defaults.py

View File

@@ -1,3 +1,3 @@
# Community Code of Conduct
Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).

View File

@@ -13,7 +13,7 @@ body:
attributes:
label: Please confirm the following
options:
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
required: true
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
required: true

View File

@@ -5,7 +5,7 @@ contact_links:
url: https://github.com/ansible/awx#get-involved
about: For general debugging or technical support please see the Get Involved section of our readme.
- name: 📝 Ansible Code of Conduct
url: https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser
url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser
about: AWX uses the Ansible Code of Conduct; ❤ Be nice to other members of the community. ☮ Behave.
- name: 💼 For Enterprise
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser

View File

@@ -13,7 +13,7 @@ body:
attributes:
label: Please confirm the following
options:
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
required: true
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
required: true

View File

@@ -4,8 +4,7 @@
<!---
If you are fixing an existing issue, please include "related #nnn" in your
commit message and your description; but you should still explain what
the change does. Also please make sure that if this PR has an attached JIRA, put AAP-<number>
in as the first entry for your PR title.
the change does.
-->
##### ISSUE TYPE
@@ -17,11 +16,17 @@ in as the first entry for your PR title.
##### COMPONENT NAME
<!--- Name of the module/plugin/module/task -->
- API
- UI
- Collection
- CLI
- Docs
- Other
##### AWX VERSION
<!--- Paste verbatim output from `make VERSION` between quotes below -->
```
```
##### ADDITIONAL INFORMATION

View File

@@ -11,6 +11,10 @@ inputs:
runs:
using: composite
steps:
- name: Get python version from Makefile
shell: bash
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Set lower case owner name
shell: bash
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
@@ -22,9 +26,26 @@ runs:
run: |
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- uses: ./.github/actions/setup-ssh-agent
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ inputs.private-github-key }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ inputs.private-github-key }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ inputs.private-github-key }}
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
- name: Pre-pull latest devel image to warm cache
shell: bash

View File

@@ -36,7 +36,7 @@ runs:
- name: Upgrade ansible-core
shell: bash
run: python -m pip install --upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core
- name: Install system deps
shell: bash

View File

@@ -1,27 +0,0 @@
name: 'Setup Python from Makefile'
description: 'Extract and set up Python version from Makefile'
inputs:
python-version:
description: 'Override Python version (optional)'
required: false
default: ''
working-directory:
description: 'Directory containing the Makefile'
required: false
default: '.'
runs:
using: composite
steps:
- name: Get python version from Makefile
shell: bash
run: |
if [ -n "${{ inputs.python-version }}" ]; then
echo "py_version=${{ inputs.python-version }}" >> $GITHUB_ENV
else
cd ${{ inputs.working-directory }}
echo "py_version=`make PYTHON_VERSION`" >> $GITHUB_ENV
fi
- name: Install python
uses: actions/setup-python@v5
with:
python-version: ${{ env.py_version }}

View File

@@ -1,29 +0,0 @@
name: 'Setup SSH for GitHub'
description: 'Configure SSH for private repository access'
inputs:
ssh-private-key:
description: 'SSH private key for repository access'
required: false
default: ''
runs:
using: composite
steps:
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ inputs.ssh-private-key }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ inputs.ssh-private-key }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}

View File

@@ -8,10 +8,3 @@ updates:
labels:
- "docs"
- "dependencies"
- package-ecosystem: "pip"
directory: "requirements/"
schedule:
interval: "daily" #run daily until we trust it, then back this off to weekly
open-pull-requests-limit: 2
labels:
- "dependencies"

View File

@@ -70,10 +70,10 @@ Thank you for your submission and for supporting AWX!
- Hello, we'd love to help, but we need a little more information about the problem you're having. Screenshots, log outputs, or any reproducers would be very helpful.
### Code of Conduct
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html
### EE Contents / Community General
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://docs.ansible.com/projects/builder/en/stable/ \
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://ansible-builder.readthedocs.io/en/stable/ \
\
The Ansible Community is looking at building an EE that corresponds to all of the collections inside the ansible package. That may help you if and when it happens; see https://github.com/ansible-community/community-topics/issues/31 for details.
@@ -88,7 +88,7 @@ The Ansible Community is looking at building an EE that corresponds to all of th
- Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
### Receptor
- You can find the receptor docs here: https://docs.ansible.com/projects/receptor/en/latest/
- You can find the receptor docs here: https://receptor.readthedocs.io/en/latest/
- Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
### Ansible Engine not AWX

View File

@@ -1,102 +0,0 @@
---
name: API Schema Change Detection
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
UPSTREAM_REPOSITORY_ID: 91594105
on:
pull_request:
branches:
- devel
- release_**
- feature_**
- stable-**
jobs:
api-schema-detection:
name: Detect API Schema Changes
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
packages: write
contents: read
steps:
- uses: actions/checkout@v4
with:
show-progress: false
fetch-depth: 0
- name: Build awx_devel image for schema check
uses: ./.github/actions/awx_devel_image
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Detect API schema changes
id: schema-check
continue-on-error: true
run: |
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS' \
AWX_DOCKER_CMD='make detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}' \
make docker-runner 2>&1 | tee schema-diff.txt
exit ${PIPESTATUS[0]}
- name: Validate OpenAPI schema
id: schema-validation
continue-on-error: true
run: |
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS' \
AWX_DOCKER_CMD='make validate-openapi-schema' \
make docker-runner 2>&1 | tee schema-validation.txt
exit ${PIPESTATUS[0]}
- name: Add schema validation and diff to job summary
if: always()
# show text and if for some reason, it can't be generated, state that it can't be.
run: |
echo "## API Schema Check Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Show validation status
echo "### OpenAPI Validation" >> $GITHUB_STEP_SUMMARY
if [ -f schema-validation.txt ] && grep -q "✓ Schema is valid" schema-validation.txt; then
echo "✅ **Status:** PASSED - Schema is valid OpenAPI 3.0.3" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Status:** FAILED - Schema validation failed" >> $GITHUB_STEP_SUMMARY
if [ -f schema-validation.txt ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "<details><summary>Validation errors</summary>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
cat schema-validation.txt >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
echo "</details>" >> $GITHUB_STEP_SUMMARY
fi
fi
echo "" >> $GITHUB_STEP_SUMMARY
# Show schema changes
echo "### Schema Changes" >> $GITHUB_STEP_SUMMARY
if [ -f schema-diff.txt ]; then
if grep -q "^+" schema-diff.txt || grep -q "^-" schema-diff.txt; then
echo "**Changes detected** between this PR and the base branch" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Truncate to first 1000 lines to stay under GitHub's 1MB summary limit
TOTAL_LINES=$(wc -l < schema-diff.txt)
if [ $TOTAL_LINES -gt 1000 ]; then
echo "_Showing first 1000 of ${TOTAL_LINES} lines. See job logs or download artifact for full diff._" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo '```diff' >> $GITHUB_STEP_SUMMARY
head -n 1000 schema-diff.txt >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
else
echo "No schema changes detected" >> $GITHUB_STEP_SUMMARY
fi
else
echo "Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
fi

View File

@@ -32,9 +32,18 @@ jobs:
- name: api-lint
command: /var/lib/awx/venv/awx/bin/tox -e linters
coverage-upload-name: ""
- name: api-swagger
command: /start_tests.sh swagger
coverage-upload-name: ""
- name: awx-collection
command: /start_tests.sh test_collection_all
coverage-upload-name: "awx-collection"
- name: api-schema
command: >-
/start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{
github.event.pull_request.base.ref || github.ref_name
}}
coverage-upload-name: ""
steps:
- uses: actions/checkout@v4
@@ -54,17 +63,6 @@ jobs:
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
make docker-runner
- name: Inject PR number into coverage.xml
if: >-
!cancelled()
&& github.event_name == 'pull_request'
&& steps.make-run.outputs.cov-report-files != ''
run: |
if [ -f "reports/coverage.xml" ]; then
sed -i '2i<!-- PR ${{ github.event.pull_request.number }} -->' reports/coverage.xml
echo "Injected PR number ${{ github.event.pull_request.number }} into coverage.xml"
fi
- name: Upload test coverage to Codecov
if: >-
!cancelled()
@@ -104,14 +102,6 @@ jobs:
}}
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tests.name }}-artifacts
path: reports/coverage.xml
retention-days: 5
- name: Upload awx jUnit test reports
if: >-
!cancelled()
@@ -140,9 +130,9 @@ jobs:
with:
show-progress: false
- uses: ./.github/actions/setup-python
- uses: actions/setup-python@v5
with:
python-version: '3.13'
python-version: '3.x'
- uses: ./.github/actions/run_awx_devel
id: awx
@@ -171,10 +161,6 @@ jobs:
show-progress: false
path: awx
- uses: ./awx/.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Checkout awx-operator
uses: actions/checkout@v4
with:
@@ -182,20 +168,39 @@ jobs:
repository: ansible/awx-operator
path: awx-operator
- name: Setup python, referencing action at awx relative path
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
- name: Get python version from Makefile
working-directory: awx
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
python-version: '3.12'
python-version: ${{ env.py_version }}
- name: Install playbook dependencies
run: |
python -m pip install docker
python3 -m pip install docker
- name: Check Python version
working-directory: awx
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
make print-PYTHON
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
- name: Build AWX image
working-directory: awx
run: |
@@ -207,59 +212,27 @@ jobs:
- name: Run test deployment with awx-operator
working-directory: awx-operator
id: awx_operator_test
timeout-minutes: 60
continue-on-error: true
run: |
set +e
timeout 15m bash -elc '
python -m pip install -r molecule/requirements.txt
python -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
ansible-galaxy collection install -r molecule/requirements.yml
sudo rm -f $(which kustomize)
make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
'
rc=$?
if [ $rc -eq 124 ]; then
echo "timed_out=true" >> "$GITHUB_OUTPUT"
fi
exit $rc
python3 -m pip install -r molecule/requirements.txt
python3 -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
ansible-galaxy collection install -r molecule/requirements.yml
sudo rm -f $(which kustomize)
make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
env:
AWX_TEST_IMAGE: local/awx
AWX_TEST_VERSION: ci
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
STORE_DEBUG_OUTPUT: true
- name: Collect awx-operator logs on timeout
# Only run on timeout; normal failures should use molecule's built-in log collection.
if: steps.awx_operator_test.outputs.timed_out == 'true'
run: |
mkdir -p "$DEBUG_OUTPUT_DIR"
if command -v kind >/dev/null 2>&1; then
for cluster in $(kind get clusters 2>/dev/null); do
kind export logs "$DEBUG_OUTPUT_DIR/$cluster" --name "$cluster" || true
done
fi
if command -v kubectl >/dev/null 2>&1; then
kubectl get all -A -o wide > "$DEBUG_OUTPUT_DIR/kubectl-get-all.txt" || true
kubectl get pods -A -o wide > "$DEBUG_OUTPUT_DIR/kubectl-get-pods.txt" || true
kubectl describe pods -A > "$DEBUG_OUTPUT_DIR/kubectl-describe-pods.txt" || true
fi
docker ps -a > "$DEBUG_OUTPUT_DIR/docker-ps.txt" || true
- name: Upload debug output
if: always()
if: failure()
uses: actions/upload-artifact@v4
with:
name: awx-operator-debug-output
path: ${{ env.DEBUG_OUTPUT_DIR }}
- name: Fail awx-operator check if test deployment failed
if: steps.awx_operator_test.outcome != 'success'
run: exit 1
collection-sanity:
name: awx_collection sanity
runs-on: ubuntu-latest
@@ -326,13 +299,9 @@ jobs:
with:
show-progress: false
- uses: ./.github/actions/setup-python
- uses: actions/setup-python@v5
with:
python-version: '3.13'
- name: Remove system ansible to avoid conflicts
run: |
python -m pip uninstall -y ansible ansible-core || true
python-version: '3.x'
- uses: ./.github/actions/run_awx_devel
id: awx
@@ -343,9 +312,8 @@ jobs:
- name: Install dependencies for running tests
run: |
python -m pip install -e ./awxkit/
python -m pip install -r awx_collection/requirements.txt
hash -r # Rehash to pick up newly installed scripts
python3 -m pip install -e ./awxkit/
python3 -m pip install -r awx_collection/requirements.txt
- name: Run integration tests
id: make-run
@@ -357,7 +325,6 @@ jobs:
echo 'password = password' >> ~/.tower_cli.cfg
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
export PYTHONPATH="$(python -c 'import site; print(":".join(site.getsitepackages()))')${PYTHONPATH:+:$PYTHONPATH}"
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
env:
ANSIBLE_TEST_PREFER_PODMAN: 1
@@ -389,7 +356,6 @@ jobs:
with:
name: coverage-${{ matrix.target-regex.name }}
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
retention-days: 1
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
@@ -407,26 +373,32 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
show-progress: false
- uses: ./.github/actions/setup-python
- uses: actions/setup-python@v5
with:
python-version: '3.13'
- name: Remove system ansible to avoid conflicts
run: |
python -m pip uninstall -y ansible ansible-core || true
python-version: '3.x'
- name: Upgrade ansible-core
run: python -m pip install --upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core
- name: Download coverage artifacts
- name: Download coverage artifacts A to H
uses: actions/download-artifact@v4
with:
merge-multiple: true
name: coverage-a-h
path: coverage
- name: Download coverage artifacts I to P
uses: actions/download-artifact@v4
with:
name: coverage-i-p
path: coverage
- name: Download coverage artifacts Z to Z
uses: actions/download-artifact@v4
with:
name: coverage-r-z0-9
path: coverage
pattern: coverage-*
- name: Combine coverage
run: |
@@ -434,17 +406,56 @@ jobs:
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
cd ~/.ansible/collections/ansible_collections/awx/awx
hash -r # Rehash to pick up newly installed scripts
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage combine --requirements
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage html
ansible-test coverage combine --requirements
ansible-test coverage html
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage report >> $GITHUB_STEP_SUMMARY
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
echo >> $GITHUB_STEP_SUMMARY
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
echo 'Download the HTML artifacts to view the coverage report.' >> $GITHUB_STEP_SUMMARY
# This is a huge hack, there's no official action for removing artifacts currently.
# Also ACTIONS_RUNTIME_URL and ACTIONS_RUNTIME_TOKEN aren't available in normal run
# steps, so we have to use github-script to get them.
#
# The advantage of doing this, though, is that we save on artifact storage space.
- name: Get secret artifact runtime URL
uses: actions/github-script@v6
id: get-runtime-url
with:
result-encoding: string
script: |
const { ACTIONS_RUNTIME_URL } = process.env;
return ACTIONS_RUNTIME_URL;
- name: Get secret artifact runtime token
uses: actions/github-script@v6
id: get-runtime-token
with:
result-encoding: string
script: |
const { ACTIONS_RUNTIME_TOKEN } = process.env;
return ACTIONS_RUNTIME_TOKEN;
- name: Remove intermediary artifacts
env:
ACTIONS_RUNTIME_URL: ${{ steps.get-runtime-url.outputs.result }}
ACTIONS_RUNTIME_TOKEN: ${{ steps.get-runtime-token.outputs.result }}
run: |
echo "::add-mask::${ACTIONS_RUNTIME_TOKEN}"
artifacts=$(
curl -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
${ACTIONS_RUNTIME_URL}_apis/pipelines/workflows/${{ github.run_id }}/artifacts?api-version=6.0-preview \
| jq -r '.value | .[] | select(.name | startswith("coverage-")) | .url'
)
for artifact in $artifacts; do
curl -i -X DELETE -H "Accept: application/json;api-version=6.0-preview" -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" "$artifact"
done
- name: Upload coverage report as artifact
uses: actions/upload-artifact@v4
with:

View File

@@ -10,7 +10,6 @@ on:
- devel
- release_*
- feature_*
- stable-*
jobs:
push-development-images:
runs-on: ubuntu-latest
@@ -50,10 +49,14 @@ jobs:
run: |
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
env:
OWNER: '${{ github.repository_owner }}'
- uses: ./.github/actions/setup-python
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.py_version }}
- name: Log in to registry
run: |
@@ -70,9 +73,25 @@ jobs:
make ui
if: matrix.build-targets.image-name == 'awx'
- uses: ./.github/actions/setup-ssh-agent
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
- name: Build and push AWX devel images
run: |

View File

@@ -12,7 +12,7 @@ jobs:
with:
show-progress: false
- uses: ./.github/actions/setup-python
- uses: actions/setup-python@v5
with:
python-version: '3.x'

View File

@@ -20,4 +20,4 @@ jobs:
run: |
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \
-a "bucket=awx-public-ci-files object=${{ github.event.repository.name }}/${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"

View File

@@ -34,11 +34,9 @@ jobs:
with:
show-progress: false
- uses: ./.github/actions/setup-python
- uses: actions/setup-python@v4
- name: Install python requests
run: pip install requests
- name: Check if user is a member of Ansible org
uses: jannekem/run-python-script-action@v1
id: check_user

View File

@@ -33,7 +33,7 @@ jobs:
with:
show-progress: false
- uses: ./.github/actions/setup-python
- uses: actions/setup-python@v5
with:
python-version: '3.x'

View File

@@ -36,7 +36,13 @@ jobs:
with:
show-progress: false
- uses: ./.github/actions/setup-python
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.py_version }}
- name: Install dependencies
run: |

View File

@@ -1,248 +0,0 @@
# SonarCloud Analysis Workflow for awx
#
# This workflow runs SonarCloud analysis triggered by CI workflow completion.
# It is split into two separate jobs for clarity and maintainability:
#
# FLOW: CI completes → workflow_run triggers this workflow → appropriate job runs
#
# JOB 1: sonar-pr-analysis (for PRs)
# - Triggered by: workflow_run (CI on pull_request)
# - Steps: Download coverage → Get PR info → Get changed files → Run SonarCloud PR analysis
# - Scans: All changed files in the PR (Python, YAML, JSON, etc.)
# - Quality gate: Focuses on new/changed code in PR only
#
# JOB 2: sonar-branch-analysis (for long-lived branches)
# - Triggered by: workflow_run (CI on push to devel)
# - Steps: Download coverage → Run SonarCloud branch analysis
# - Scans: Full codebase
# - Quality gate: Focuses on overall project health
#
# This ensures coverage data is always available from CI before analysis runs.
#
# What files are scanned:
# - All files in the repository that SonarCloud can analyze
# - Excludes: tests, scripts, dev environments, external collections (see sonar-project.properties)
# With much help from:
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/30
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/32
name: SonarCloud
on:
workflow_run: # This is triggered by CI being completed.
workflows:
- CI
types:
- completed
permissions: read-all
jobs:
sonar-pr-analysis:
name: SonarCloud PR Analysis
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.event == 'pull_request' &&
github.repository == 'ansible/awx'
steps:
- uses: actions/checkout@v4
# Download all individual coverage artifacts from CI workflow
- name: Download coverage artifacts
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
workflow: CI
run_id: ${{ github.event.workflow_run.id }}
pattern: api-test-artifacts
# Extract PR metadata from workflow_run event
- name: Set PR metadata and prepare files for analysis
env:
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
REPO_NAME: ${{ github.event.repository.full_name }}
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Find all downloaded coverage XML files
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
echo "Found coverage files: $coverage_files"
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
# Extract PR number from first coverage.xml file found
first_coverage=$(find . -name "coverage.xml" -type f | head -1)
if [ -f "$first_coverage" ]; then
PR_NUMBER=$(grep -m 1 '<!-- PR' "$first_coverage" | awk '{print $3}' || echo "")
else
PR_NUMBER=""
fi
echo "🔍 SonarCloud Analysis Decision Summary"
echo "========================================"
echo "├── CI Event: ✅ Pull Request"
echo "├── PR Number from coverage.xml: #${PR_NUMBER:-<not found>}"
if [ -z "$PR_NUMBER" ]; then
echo "##[error]❌ FATAL: PR number not found in coverage.xml"
echo "##[error]This job requires a PR number to run PR analysis."
echo "##[error]The ci workflow should have injected the PR number into coverage.xml."
exit 1
fi
# Get PR metadata from GitHub API
PR_DATA=$(gh api "repos/$REPO_NAME/pulls/$PR_NUMBER")
PR_BASE=$(echo "$PR_DATA" | jq -r '.base.ref')
PR_HEAD=$(echo "$PR_DATA" | jq -r '.head.ref')
# Print summary
echo "🔍 SonarCloud Analysis Decision Summary"
echo "========================================"
echo "├── CI Event: ✅ Pull Request"
echo "├── PR Number: #$PR_NUMBER"
echo "├── Base Branch: $PR_BASE"
echo "├── Head Branch: $PR_HEAD"
echo "├── Repo: $REPO_NAME"
# Export to GitHub env for later steps
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
echo "PR_BASE=$PR_BASE" >> $GITHUB_ENV
echo "PR_HEAD=$PR_HEAD" >> $GITHUB_ENV
echo "COMMIT_SHA=$COMMIT_SHA" >> $GITHUB_ENV
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
# Get all changed files from PR (with error handling)
files=""
if [ -n "$PR_NUMBER" ]; then
if gh api repos/$REPO_NAME/pulls/$PR_NUMBER/files --jq '.[].filename' > /tmp/pr_files.txt 2>/tmp/pr_error.txt; then
files=$(cat /tmp/pr_files.txt)
else
echo "├── Changed Files: ⚠️ Could not fetch (likely test repo or PR not found)"
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
echo "├── Coverage Data: ✅ Available"
else
echo "├── Coverage Data: ⚠️ Not available"
fi
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
# No files = no inclusions filter = full scan
exit 0
fi
else
echo "├── PR Number: ⚠️ Not available"
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
echo "├── Coverage Data: ✅ Available"
else
echo "├── Coverage Data: ⚠️ Not available"
fi
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
exit 0
fi
# Get file extensions and count for summary
extensions=$(echo "$files" | sed 's/.*\.//' | sort | uniq | tr '\n' ',' | sed 's/,$//')
file_count=$(echo "$files" | wc -l)
echo "├── Changed Files: $file_count file(s) (.${extensions})"
# Check if coverage.xml exists and has content
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
echo "├── Coverage Data: ✅ Available"
else
echo "├── Coverage Data: ⚠️ Not available (analysis will proceed without coverage)"
fi
# Prepare file list for Sonar
echo "All changed files in PR:"
echo "$files"
# Filter out files that are excluded by .coveragerc to avoid coverage conflicts
# This prevents SonarCloud from analyzing files that have no coverage data
if [ -n "$files" ]; then
# Filter out files matching .coveragerc omit patterns
filtered_files=$(echo "$files" | grep -v "settings/.*_defaults\.py$" | grep -v "settings/defaults\.py$" | grep -v "main/migrations/")
# Show which files were filtered out for transparency
excluded_files=$(echo "$files" | grep -E "(settings/.*_defaults\.py$|settings/defaults\.py$|main/migrations/)" || true)
if [ -n "$excluded_files" ]; then
echo "├── Filtered out (coverage-excluded): $(echo "$excluded_files" | wc -l) file(s)"
echo "$excluded_files" | sed 's/^/│ - /'
fi
if [ -n "$filtered_files" ]; then
inclusions=$(echo "$filtered_files" | tr '\n' ',' | sed 's/,$//')
echo "SONAR_INCLUSIONS=$inclusions" >> $GITHUB_ENV
echo "└── Result: ✅ Will scan these files (excluding coverage-omitted files): $inclusions"
else
echo "└── Result: ✅ All changed files are excluded by coverage config, running full SonarCloud analysis"
# Don't set SONAR_INCLUSIONS, let it scan everything per sonar-project.properties
fi
else
echo "└── Result: ✅ Running SonarCloud analysis"
fi
- name: Add base branch
if: env.PR_NUMBER != ''
run: |
gh pr checkout ${{ env.PR_NUMBER }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: SonarCloud Scan
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
with:
args: >
-Dsonar.scm.revision=${{ env.COMMIT_SHA }}
-Dsonar.pullrequest.key=${{ env.PR_NUMBER }}
-Dsonar.pullrequest.branch=${{ env.PR_HEAD }}
-Dsonar.pullrequest.base=${{ env.PR_BASE }}
-Dsonar.python.coverage.reportPaths=${{ env.COVERAGE_PATHS }}
${{ env.SONAR_INCLUSIONS && format('-Dsonar.inclusions={0}', env.SONAR_INCLUSIONS) || '' }}
sonar-branch-analysis:
name: SonarCloud Branch Analysis
runs-on: ubuntu-latest
if: |
github.event_name == 'workflow_run' &&
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.event == 'push' &&
github.repository == 'ansible/awx'
steps:
- uses: actions/checkout@v4
# Download all individual coverage artifacts from CI workflow (optional for branch pushes)
- name: Download coverage artifacts
continue-on-error: true
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
workflow: CI
run_id: ${{ github.event.workflow_run.id }}
pattern: api-test-artifacts
- name: Print SonarCloud Analysis Summary
env:
BRANCH_NAME: ${{ github.event.workflow_run.head_branch }}
run: |
# Find all downloaded coverage XML files
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
echo "Found coverage files: $coverage_files"
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
echo "🔍 SonarCloud Analysis Summary"
echo "=============================="
echo "├── CI Event: ✅ Push (via workflow_run)"
echo "├── Branch: $BRANCH_NAME"
echo "├── Coverage Files: ${coverage_files:-none}"
echo "├── Python Changes: N/A (Full codebase scan)"
echo "└── Result: ✅ Proceed - \"Running SonarCloud analysis\""
- name: SonarCloud Scan
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
with:
args: >
-Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
-Dsonar.branch.name=${{ github.event.workflow_run.head_branch }}
${{ env.COVERAGE_PATHS && format('-Dsonar.python.coverage.reportPaths={0}', env.COVERAGE_PATHS) || '' }}

View File

@@ -64,9 +64,14 @@ jobs:
repository: ansible/awx-logos
path: awx-logos
- uses: ./awx/.github/actions/setup-python
- name: Get python version from Makefile
working-directory: awx
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
working-directory: awx
python-version: ${{ env.py_version }}
- name: Install playbook dependencies
run: |
@@ -85,11 +90,9 @@ jobs:
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
- name: Setup node and npm for new UI build
uses: actions/setup-node@v4
uses: actions/setup-node@v2
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: awx/awx/ui/**/package-lock.json
- name: Prebuild new UI for awx image (to speed up build process)
working-directory: awx

View File

@@ -11,7 +11,6 @@ on:
- devel
- release_**
- feature_**
- stable-**
jobs:
push:
runs-on: ubuntu-latest
@@ -24,26 +23,57 @@ jobs:
with:
show-progress: false
- name: Build awx_devel image to use for schema gen
uses: ./.github/actions/awx_devel_image
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
python-version: ${{ env.py_version }}
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
- name: Pre-pull image to warm build cache
run: |
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
- name: Build image
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
- name: Generate API Schema
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
--workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
- name: Upload API Schema
uses: keithweaver/aws-s3-github-action@4dd5a7b81d54abaa23bbac92b27e85d7f405ae53
with:
command: cp
source: ${{ github.workspace }}/schema.json
destination: s3://awx-public-ci-files/${{ github.event.repository.name }}/${{ github.ref_name }}/schema.json
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_KEY }}
aws_region: us-east-1
flags: --acl public-read --only-show-errors
env:
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_REGION: 'us-east-1'
run: |
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"

3
.gitignore vendored
View File

@@ -1,7 +1,6 @@
# Ignore generated schema
swagger.json
schema.json
schema.yaml
reference-schema.json
# Tags
@@ -151,8 +150,6 @@ use_dev_supervisor.txt
awx/ui/src
awx/ui/build
awx/ui/.ui-built
awx/ui_next
# Docs build stuff
docs/docsite/build/

View File

@@ -7,7 +7,7 @@ build:
os: ubuntu-22.04
tools:
python: >-
3.12
3.11
commands:
- pip install --user tox
- python3 -m tox -e docs --notest -v

View File

@@ -31,7 +31,7 @@ Have questions about this document or anything not covered here? Create a topic
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see [git push docs](https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt).
- If submitting a large code change, it's a good idea to create a [forum topic tagged with 'awx'](https://forum.ansible.com/tag/awx), and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
## Setting up your development environment

View File

@@ -1,6 +1,6 @@
-include awx/ui/Makefile
PYTHON := $(notdir $(shell for i in python3.12 python3.11 python3; do command -v $$i; done|sed 1q))
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
SHELL := bash
DOCKER_COMPOSE ?= docker compose
OFFICIAL ?= no
@@ -19,16 +19,8 @@ COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d .
COLLECTION_SANITY_ARGS ?= --docker
# collection unit testing directories
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
# pytest added args to collect coverage
COVERAGE_ARGS ?= --cov --cov-report=xml --junitxml=reports/junit.xml
# pytest test directories
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
# pytest args to run tests in parallel
PARALLEL_TESTS ?= -n auto
# collection integration test directories (defaults to all)
COLLECTION_TEST_TARGET ?=
# Python version for ansible-test (must be 3.11, 3.12, or 3.13)
ANSIBLE_TEST_PYTHON_VERSION ?= 3.13
# args for collection install
COLLECTION_PACKAGE ?= awx
COLLECTION_NAMESPACE ?= awx
@@ -79,7 +71,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
# These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements
VENV_BOOTSTRAP ?= pip==25.3 setuptools==80.9.0 setuptools_scm[toml]==9.2.2 wheel==0.45.1 cython==3.1.3
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==70.3.0 setuptools_scm[toml]==8.1.0 wheel==0.45.1 cython==3.0.11
NAME ?= awx
@@ -107,8 +99,6 @@ else
endif
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
update_requirements upgrade_requirements update_requirements_dev \
docker_update_requirements docker_upgrade_requirements docker_update_requirements_dev \
develop refresh adduser migrate dbchange \
receiver test test_unit test_coverage coverage_html \
sdist \
@@ -148,7 +138,7 @@ clean-api:
rm -rf build $(NAME)-$(VERSION) *.egg-info
rm -rf .tox
find . -type f -regex ".*\.py[co]$$" -delete
find . -type d -name "__pycache__" -exec rm -rf {} +
find . -type d -name "__pycache__" -delete
rm -f awx/awx_test.sqlite3*
rm -rf requirements/vendor
rm -rf awx/projects
@@ -198,36 +188,6 @@ requirements_dev: requirements_awx requirements_awx_dev
requirements_test: requirements
## Update requirements files using pip-compile (run inside container)
update_requirements:
cd requirements && ./updater.sh run
## Upgrade all requirements to latest versions (run inside container)
upgrade_requirements:
cd requirements && ./updater.sh upgrade
## Update development requirements (run inside container)
update_requirements_dev:
cd requirements && ./updater.sh dev
## Update requirements using docker-runner
docker_update_requirements:
@echo "Running requirements updater..."
AWX_DOCKER_CMD='make update_requirements' $(MAKE) docker-runner
@echo "Requirements update complete!"
## Upgrade requirements using docker-runner
docker_upgrade_requirements:
@echo "Running requirements upgrader..."
AWX_DOCKER_CMD='make upgrade_requirements' $(MAKE) docker-runner
@echo "Requirements upgrade complete!"
## Update dev requirements using docker-runner
docker_update_requirements_dev:
@echo "Running dev requirements updater..."
AWX_DOCKER_CMD='make update_requirements_dev' $(MAKE) docker-runner
@echo "Dev requirements update complete!"
## "Install" awx package in development mode.
develop:
@if [ "$(VIRTUAL_ENV)" ]; then \
@@ -289,7 +249,7 @@ dispatcher:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(PYTHON) manage.py dispatcherd
$(PYTHON) manage.py run_dispatcher
## Run to start the zeromq callback receiver
receiver:
@@ -348,17 +308,20 @@ black: reports
@echo "fi" >> .git/hooks/pre-commit
@chmod +x .git/hooks/pre-commit
genschema: awx-link reports
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(MANAGEMENT_COMMAND) spectacular --format openapi-json --file schema.json
genschema: reports
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
mv swagger.json schema.json
genschema-yaml: awx-link reports
swagger: reports
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(MANAGEMENT_COMMAND) spectacular --format openapi --file schema.yaml
(set -o pipefail && py.test --cov --cov-report=xml --junitxml=reports/junit.xml $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
check: black
@@ -371,12 +334,14 @@ api-lint:
awx-link:
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
PYTEST_ARGS ?= -n auto
## Run all API unit tests.
test:
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PARALLEL_TESTS) $(TEST_DIRS)
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PYTEST_ARGS) $(TEST_DIRS)
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
@@ -385,7 +350,7 @@ live_test:
## Run all API unit tests with coverage enabled.
test_coverage:
$(MAKE) test PYTEST_ADDOPTS="--create-db $(COVERAGE_ARGS)"
$(MAKE) test PYTEST_ARGS="--create-db --cov --cov-report=xml --junitxml=reports/junit.xml"
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=awxkit/coverage.xml,reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
@@ -393,7 +358,7 @@ test_coverage:
fi
test_migrations:
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test --create-db $(PARALLEL_TESTS) $(COVERAGE_ARGS) $(TEST_DIRS)
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test --create-db --cov=awx --cov-report=xml --junitxml=reports/junit.xml $(PYTEST_ARGS) $(TEST_DIRS)
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
@@ -411,7 +376,7 @@ test_collection:
fi && \
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
ansible --version
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
py.test $(COLLECTION_TEST_DIRS) --cov --cov-report=xml --junitxml=reports/junit.xml -v
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
@@ -462,8 +427,8 @@ test_collection_sanity:
test_collection_integration: install_collection
cd $(COLLECTION_INSTALL) && \
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test integration --python $(ANSIBLE_TEST_PYTHON_VERSION) --coverage -vvv $(COLLECTION_TEST_TARGET) && \
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test coverage xml --requirements --group-by command --group-by version
ansible-test integration --coverage -vvv $(COLLECTION_TEST_TARGET) && \
ansible-test coverage xml --requirements --group-by command --group-by version
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
@@ -568,20 +533,14 @@ docker-compose-test: awx/projects docker-compose-sources
docker-compose-runtest: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
docker-compose-build-schema: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 make genschema
docker-compose-build-swagger: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
SCHEMA_DIFF_BASE_FOLDER ?= awx
SCHEMA_DIFF_BASE_BRANCH ?= devel
detect-schema-change: genschema
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_FOLDER)/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
# Ignore differences in whitespace with -b
# diff exits with 1 when files differ - capture but don't fail
-diff -u -b reference-schema.json schema.json
validate-openapi-schema: genschema
@echo "Validating OpenAPI schema from schema.json..."
@python3 -c "from openapi_spec_validator import validate; import json; spec = json.load(open('schema.json')); validate(spec); print('✓ OpenAPI Schema is valid!')"
diff -u -b reference-schema.json schema.json
docker-compose-clean: awx/projects
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
@@ -614,7 +573,7 @@ docker-compose-build: Dockerfile.dev
docker-compose-buildx: Dockerfile.dev
- docker buildx create --name docker-compose-buildx
docker buildx use docker-compose-buildx
docker buildx build \
- docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
--push \
--build-arg BUILDKIT_INLINE_CACHE=1 \
@@ -674,7 +633,7 @@ awx-kube-build: Dockerfile
awx-kube-buildx: Dockerfile
- docker buildx create --name awx-kube-buildx
docker buildx use awx-kube-buildx
docker buildx build \
- docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
--push \
--build-arg VERSION=$(VERSION) \
@@ -708,7 +667,7 @@ awx-kube-dev-build: Dockerfile.kube-dev
awx-kube-dev-buildx: Dockerfile.kube-dev
- docker buildx create --name awx-kube-dev-buildx
docker buildx use awx-kube-dev-buildx
docker buildx build \
- docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
--push \
--build-arg BUILDKIT_INLINE_CACHE=1 \

View File

@@ -1,24 +1,13 @@
[![CI](https://github.com/ansible/awx/actions/workflows/ci.yml/badge.svg?branch=devel)](https://github.com/ansible/awx/actions/workflows/ci.yml) [![codecov](https://codecov.io/github/ansible/awx/graph/badge.svg?token=4L4GSP9IAR)](https://codecov.io/github/ansible/awx) [![Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-yellow.svg)](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html) [![Apache v2 License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/ansible/awx/blob/devel/LICENSE.md) [![AWX on the Ansible Forum](https://img.shields.io/badge/mailing%20list-AWX-orange.svg)](https://forum.ansible.com/tag/awx)
[![CI](https://github.com/ansible/awx/actions/workflows/ci.yml/badge.svg?branch=devel)](https://github.com/ansible/awx/actions/workflows/ci.yml) [![codecov](https://codecov.io/github/ansible/awx/graph/badge.svg?token=4L4GSP9IAR)](https://codecov.io/github/ansible/awx) [![Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-yellow.svg)](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [![Apache v2 License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/ansible/awx/blob/devel/LICENSE.md) [![AWX on the Ansible Forum](https://img.shields.io/badge/mailing%20list-AWX-orange.svg)](https://forum.ansible.com/tag/awx)
[![Ansible Matrix](https://img.shields.io/badge/matrix-Ansible%20Community-blueviolet.svg?logo=matrix)](https://chat.ansible.im/#/welcome) [![Ansible Discourse](https://img.shields.io/badge/discourse-Ansible%20Community-yellowgreen.svg?logo=discourse)](https://forum.ansible.com)
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
> [!CAUTION]
> The last release of this repository was released on Jul 2, 2024.
> **Releases of this project are now paused during a large scale refactoring.**
> For more information, follow [the Forum](https://forum.ansible.com/) and - more specifically - see the various communications on the matter:
>
> * [Blog: Upcoming Changes to the AWX Project](https://www.ansible.com/blog/upcoming-changes-to-the-awx-project/)
> * [Streamlining AWX Releases](https://forum.ansible.com/t/streamlining-awx-releases/6894) Primary update
> * [Refactoring AWX into a Pluggable, Service-Oriented Architecture](https://forum.ansible.com/t/refactoring-awx-into-a-pluggable-service-oriented-architecture/7404)
> * [Upcoming changes to AWX Operator installation methods](https://forum.ansible.com/t/upcoming-changes-to-awx-operator-installation-methods/7598)
> * [AWX UI and credential types transitioning to the new pluggable architecture](https://forum.ansible.com/t/awx-ui-and-credential-types-transitioning-to-the-new-pluggable-architecture/8027)
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is one of the upstream projects for [Red Hat Ansible Automation Platform](https://www.ansible.com/products/automation-platform).
To install AWX, please view the [Install guide](./INSTALL.md).
To learn more about using AWX, view the [AWX docs site](https://docs.ansible.com/projects/awx/en/latest/).
To learn more about using AWX, view the [AWX docs site](https://ansible.readthedocs.io/projects/awx/en/latest/).
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
@@ -41,11 +30,11 @@ If you're experiencing a problem that you feel is a bug in AWX or have ideas for
Code of Conduct
---------------
We require all of our community members and contributors to adhere to the [Ansible code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
We require all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
Get Involved
------------
We welcome your feedback and ideas via the [Ansible Forum](https://forum.ansible.com/tag/awx).
For a full list of all the ways to talk with the Ansible Community, see the [AWX Communication guide](https://docs.ansible.com/projects/awx/en/latest/contributor/communication.html).
For a full list of all the ways to talk with the Ansible Community, see the [AWX Communication guide](https://ansible.readthedocs.io/projects/awx/en/latest/contributor/communication.html).

View File

@@ -7,6 +7,7 @@ from rest_framework import serializers
# AWX
from awx.conf import fields, register, register_validate
register(
'SESSION_COOKIE_AGE',
field_class=fields.IntegerField,

View File

@@ -21,7 +21,7 @@ class NullFieldMixin(object):
"""
def validate_empty_values(self, data):
is_empty_value, data = super(NullFieldMixin, self).validate_empty_values(data)
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
if is_empty_value and data is None:
return (False, data)
return (is_empty_value, data)

View File

@@ -161,14 +161,16 @@ def get_view_description(view, html=False):
def get_default_schema():
# drf-spectacular is configured via REST_FRAMEWORK['DEFAULT_SCHEMA_CLASS']
# Just use the DRF default, which will pick up our CustomAutoSchema
return views.APIView.schema
if settings.DYNACONF.is_development_mode:
from awx.api.swagger import schema_view
return schema_view
else:
return views.APIView.schema
class APIView(views.APIView):
# Schema is inherited from DRF's APIView, which uses DEFAULT_SCHEMA_CLASS
# No need to override it here - drf-spectacular will handle it
schema = get_default_schema()
versioning_class = URLPathVersioning
def initialize_request(self, request, *args, **kwargs):
@@ -764,7 +766,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
def unattach(self, request, *args, **kwargs):
sub_id, res = self.unattach_validate(request)
(sub_id, res) = self.unattach_validate(request)
if res:
return res
return self.unattach_by_id(request, sub_id)
@@ -842,7 +844,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
auditor_role = RoleDefinition.objects.filter(name="Platform Auditor").first()
auditor_role = RoleDefinition.objects.filter(name="Controller System Auditor").first()
if auditor_role:
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
return qs.distinct()
@@ -1023,9 +1025,6 @@ class GenericCancelView(RetrieveAPIView):
# In subclass set model, serializer_class
obj_permission_type = 'cancel'
def get(self, request, *args, **kwargs):
return super(GenericCancelView, self).get(request, *args, **kwargs)
@transaction.non_atomic_requests
def dispatch(self, *args, **kwargs):
return super(GenericCancelView, self).dispatch(*args, **kwargs)

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import MetricsView
urls = [re_path(r'^$', MetricsView.as_view(), name='metrics_view')]
__all__ = ['urls']

View File

@@ -111,7 +111,7 @@ class UnifiedJobEventPagination(Pagination):
def __init__(self, *args, **kwargs):
self.use_limit_paginator = False
self.limit_pagination = LimitPagination()
super().__init__(*args, **kwargs)
return super().__init__(*args, **kwargs)
def paginate_queryset(self, queryset, request, view=None):
if 'limit' in request.query_params:

View File

@@ -10,7 +10,7 @@ from rest_framework import permissions
# AWX
from awx.main.access import check_user_access
from awx.main.models import Inventory, UnifiedJob, Organization
from awx.main.models import Inventory, UnifiedJob
from awx.main.utils import get_object_or_400
logger = logging.getLogger('awx.api.permissions')
@@ -228,19 +228,12 @@ class InventoryInventorySourcesUpdatePermission(ModelAccessPermission):
class UserPermission(ModelAccessPermission):
def check_post_permissions(self, request, view, obj=None):
if not request.data:
return Organization.access_qs(request.user, 'change').exists()
return request.user.admin_of_organizations.exists()
elif request.user.is_superuser:
return True
raise PermissionDenied()
class IsSystemAdmin(permissions.BasePermission):
def has_permission(self, request, view):
if not (request.user and request.user.is_authenticated):
return False
return request.user.is_superuser
class IsSystemAdminOrAuditor(permissions.BasePermission):
"""
Allows write access only to system admin users.

View File

@@ -1,119 +0,0 @@
import warnings
from rest_framework.permissions import IsAuthenticated
from drf_spectacular.openapi import AutoSchema
from drf_spectacular.views import (
SpectacularAPIView,
SpectacularSwaggerView,
SpectacularRedocView,
)
def filter_credential_type_schema(
result,
generator, # NOSONAR
request, # NOSONAR
public, # NOSONAR
):
"""
Postprocessing hook to filter CredentialType kind enum values.
For CredentialTypeRequest and PatchedCredentialTypeRequest schemas (POST/PUT/PATCH),
filter the 'kind' enum to only show 'cloud' and 'net' values.
This ensures the OpenAPI schema accurately reflects that only 'cloud' and 'net'
credential types can be created or modified via the API, matching the validation
in CredentialTypeSerializer.validate().
Args:
result: The OpenAPI schema dict to be modified
generator, request, public: Required by drf-spectacular interface (unused)
Returns:
The modified OpenAPI schema dict
"""
schemas = result.get('components', {}).get('schemas', {})
# Filter CredentialTypeRequest (POST/PUT) - field is required
if 'CredentialTypeRequest' in schemas:
kind_prop = schemas['CredentialTypeRequest'].get('properties', {}).get('kind', {})
if 'enum' in kind_prop:
# Filter to only cloud and net (no None - field is required)
kind_prop['enum'] = ['cloud', 'net']
kind_prop['description'] = "* `cloud` - Cloud\\n* `net` - Network"
# Filter PatchedCredentialTypeRequest (PATCH) - field is optional
if 'PatchedCredentialTypeRequest' in schemas:
kind_prop = schemas['PatchedCredentialTypeRequest'].get('properties', {}).get('kind', {})
if 'enum' in kind_prop:
# Filter to only cloud and net (None allowed - field can be omitted in PATCH)
kind_prop['enum'] = ['cloud', 'net', None]
kind_prop['description'] = "* `cloud` - Cloud\\n* `net` - Network"
return result
class CustomAutoSchema(AutoSchema):
"""Custom AutoSchema to add swagger_topic to tags and handle deprecated endpoints."""
def get_tags(self):
tags = []
try:
if hasattr(self.view, 'get_serializer'):
serializer = self.view.get_serializer()
else:
serializer = None
except Exception:
serializer = None
warnings.warn(
'{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be '
'generated for this view.'.format(self.view.__class__.__name__)
)
if hasattr(self.view, 'swagger_topic'):
tags.append(str(self.view.swagger_topic).title())
elif serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
elif hasattr(self.view, 'model'):
tags.append(str(self.view.model._meta.verbose_name_plural).title())
else:
tags = super().get_tags() # Use default drf-spectacular behavior
if not tags:
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
tags = ['api'] # Fallback to default value
return tags
def is_deprecated(self):
"""Return `True` if this operation is to be marked as deprecated."""
return getattr(self.view, 'deprecated', False)
class AuthenticatedSpectacularAPIView(SpectacularAPIView):
"""SpectacularAPIView that requires authentication."""
permission_classes = [IsAuthenticated]
class AuthenticatedSpectacularSwaggerView(SpectacularSwaggerView):
"""SpectacularSwaggerView that requires authentication."""
permission_classes = [IsAuthenticated]
class AuthenticatedSpectacularRedocView(SpectacularRedocView):
"""SpectacularRedocView that requires authentication."""
permission_classes = [IsAuthenticated]
# Schema view (returns OpenAPI schema JSON/YAML)
schema_view = AuthenticatedSpectacularAPIView.as_view()
# Swagger UI view
swagger_ui_view = AuthenticatedSpectacularSwaggerView.as_view(url_name='api:schema-json')
# ReDoc UI view
redoc_view = AuthenticatedSpectacularRedocView.as_view(url_name='api:schema-json')

View File

@@ -6,8 +6,6 @@ import copy
import json
import logging
import re
import yaml
import urllib.parse
from collections import Counter, OrderedDict
from datetime import timedelta
from uuid import uuid4
@@ -117,7 +115,6 @@ from awx.main.utils import (
from awx.main.utils.filters import SmartFilter
from awx.main.utils.plugins import load_combined_inventory_source_options
from awx.main.utils.named_url_graph import reset_counters
from awx.main.utils.inventory_vars import update_group_variables
from awx.main.scheduler.task_manager_models import TaskManagerModels
from awx.main.redact import UriCleaner, REPLACE_STR
from awx.main.signals import update_inventory_computed_fields
@@ -629,41 +626,15 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
return exclusions
def validate(self, attrs):
"""
Apply serializer validation. Called by DRF.
Can be extended by subclasses. Or consider overwriting
`validate_with_obj` in subclasses, which provides access to the model
object and exception handling for field validation.
:param dict attrs: The names and values of the model form fields.
:raise rest_framework.exceptions.ValidationError: If the validation
fails.
The exception must contain a dict with the names of the form fields
which failed validation as keys, and a list of error messages as
values. This ensures that the error messages are rendered near the
relevant fields.
:return: The names and values from the model form fields, possibly
modified by the validations.
:rtype: dict
"""
attrs = super(BaseSerializer, self).validate(attrs)
# Create/update a model instance and run its full_clean() method to
# do any validation implemented on the model class.
exclusions = self.get_validation_exclusions(self.instance)
# Create a new model instance or take the existing one if it exists,
# and update its attributes with the respective field values from
# attrs.
obj = self.instance or self.Meta.model()
for k, v in attrs.items():
if k not in exclusions and k != 'canonical_address_port':
setattr(obj, k, v)
try:
# Run serializer validators which need the model object for
# validation.
self.validate_with_obj(attrs, obj)
# Apply any validations implemented on the model class.
# Create/update a model instance and run its full_clean() method to
# do any validation implemented on the model class.
exclusions = self.get_validation_exclusions(self.instance)
obj = self.instance or self.Meta.model()
for k, v in attrs.items():
if k not in exclusions and k != 'canonical_address_port':
setattr(obj, k, v)
obj.full_clean(exclude=exclusions)
# full_clean may modify values on the instance; copy those changes
# back to attrs so they are saved.
@@ -692,32 +663,6 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
raise ValidationError(d)
return attrs
def validate_with_obj(self, attrs, obj):
"""
Overwrite this if you need the model instance for your validation.
:param dict attrs: The names and values of the model form fields.
:param obj: An instance of the class's meta model.
If the serializer runs on a newly created object, obj contains only
the attrs from its serializer. If the serializer runs because an
object has been edited, obj is the existing model instance with all
attributes and values available.
:raise django.core.exceptionsValidationError: Raise this if your
validation fails.
To make the error appear at the respective form field, instantiate
the Exception with a dict containing the field name as key and the
error message as value.
Example: ``ValidationError({"password": "Not good enough!"})``
If the exception contains just a string, the message cannot be
related to a field and is rendered at the top of the model form.
:return: None
"""
return
def reverse(self, *args, **kwargs):
kwargs['request'] = self.context.get('request')
return reverse(*args, **kwargs)
@@ -734,29 +679,15 @@ class EmptySerializer(serializers.Serializer):
pass
class OpaQueryPathMixin(serializers.Serializer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def validate_opa_query_path(self, value):
# Decode the URL and re-encode it
decoded_value = urllib.parse.unquote(value)
re_encoded_value = urllib.parse.quote(decoded_value, safe='/')
if value != re_encoded_value:
raise serializers.ValidationError(_("The URL must be properly encoded."))
return value
class UnifiedJobTemplateSerializer(BaseSerializer, OpaQueryPathMixin):
class UnifiedJobTemplateSerializer(BaseSerializer):
# As a base serializer, the capabilities prefetch is not used directly,
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
capabilities_prefetch = []
class Meta:
model = UnifiedJobTemplate
fields = ('*', 'last_job_run', 'last_job_failed', 'next_job_run', 'status', 'execution_environment')
fields = ('*', 'last_job_run', 'last_job_failed', 'next_job_run', 'status', 'priority', 'execution_environment')
def get_related(self, obj):
res = super(UnifiedJobTemplateSerializer, self).get_related(obj)
@@ -963,13 +894,13 @@ class UnifiedJobSerializer(BaseSerializer):
class UnifiedJobListSerializer(UnifiedJobSerializer):
class Meta:
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-event_processing_finished', '-artifacts')
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-event_processing_finished')
def get_field_names(self, declared_fields, info):
field_names = super(UnifiedJobListSerializer, self).get_field_names(declared_fields, info)
# Meta multiple inheritance and -field_name options don't seem to be
# taking effect above, so remove the undesired fields here.
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'event_processing_finished', 'artifacts'))
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'event_processing_finished'))
def get_types(self):
if type(self) is UnifiedJobListSerializer:
@@ -1054,6 +985,7 @@ class UserSerializer(BaseSerializer):
return ret
def validate_password(self, value):
django_validate_password(value)
if not self.instance and value in (None, ''):
raise serializers.ValidationError(_('Password required for new User.'))
@@ -1076,50 +1008,6 @@ class UserSerializer(BaseSerializer):
return value
def validate_with_obj(self, attrs, obj):
"""
Validate the password with the Django password validators
To enable the Django password validators, configure
`settings.AUTH_PASSWORD_VALIDATORS` as described in the [Django
docs](https://docs.djangoproject.com/en/5.1/topics/auth/passwords/#enabling-password-validation)
:param dict attrs: The User form field names and their values as a dict.
Example::
{
'username': 'TestUsername', 'first_name': 'FirstName',
'last_name': 'LastName', 'email': 'First.Last@my.org',
'is_superuser': False, 'is_system_auditor': False,
'password': 'secret123'
}
:param obj: The User model instance.
:raises django.core.exceptions.ValidationError: Raise this if at least
one Django password validator fails.
The exception contains a dict ``{"password": <error-message>``}
which indicates that the password field has failed validation, and
the reason for failure.
:return: None.
"""
# We must do this here instead of in `validate_password` bacause some
# django password validators need access to other model instance fields,
# e.g. ``username`` for the ``UserAttributeSimilarityValidator``.
password = attrs.get("password")
# Skip validation if no password has been entered. This may happen when
# an existing User is edited.
if password and password != '$encrypted$':
# Apply validators from settings.AUTH_PASSWORD_VALIDATORS. This may
# raise ValidationError.
#
# If the validation fails, re-raise the exception with adjusted
# content to make the error appear near the password field.
try:
django_validate_password(password, user=obj)
except DjangoValidationError as exc:
raise DjangoValidationError({"password": exc.messages})
def _update_password(self, obj, new_password):
if new_password and new_password != '$encrypted$':
obj.set_password(new_password)
@@ -1182,12 +1070,12 @@ class UserActivityStreamSerializer(UserSerializer):
fields = ('*', '-is_system_auditor')
class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
class OrganizationSerializer(BaseSerializer):
show_capabilities = ['edit', 'delete']
class Meta:
model = Organization
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment', 'opa_query_path')
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment')
read_only_fields = ('*', 'custom_virtualenv')
def get_related(self, obj):
@@ -1230,7 +1118,7 @@ class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
# to a team. This provides a hint to the ui so it can know to not
# display these roles for team role selection.
for key in ('admin_role', 'member_role'):
if summary_dict and key in summary_dict.get('object_roles', {}):
if key in summary_dict.get('object_roles', {}):
summary_dict['object_roles'][key]['user_only'] = True
return summary_dict
@@ -1541,7 +1429,7 @@ class LabelsListMixin(object):
return res
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQueryPathMixin):
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
show_capabilities = ['edit', 'delete', 'adhoc', 'copy']
capabilities_prefetch = ['admin', 'adhoc', {'copy': 'organization.inventory_admin'}]
@@ -1562,7 +1450,6 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQuery
'inventory_sources_with_failures',
'pending_deletion',
'prevent_instance_group_fallback',
'opa_query_path',
)
def get_related(self, obj):
@@ -1632,68 +1519,8 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQuery
if kind == 'smart' and not host_filter:
raise serializers.ValidationError({'host_filter': _('Smart inventories must specify host_filter')})
return super(InventorySerializer, self).validate(attrs)
@staticmethod
def _update_variables(variables, inventory_id):
"""
Update the inventory variables of the 'all'-group.
The variables field contains vars from the inventory dialog, hence
representing the "all"-group variables.
Since this is not an update from an inventory source, we update the
variables when the inventory details form is saved.
A user edit on the inventory variables is considered a reset of the
variables update history. Particularly if the user removes a variable by
editing the inventory variables field, the variable is not supposed to
reappear with a value from a previous inventory source update.
We achieve this by forcing `reset=True` on such an update.
As a side-effect, variables which have been set by source updates and
have survived a user-edit (i.e. they have not been deleted from the
variables field) will be assumed to originate from the user edit and are
thus no longer deleted from the inventory when they are removed from
their original source!
Note that we use the inventory source id -1 for user-edit updates
because a regular inventory source cannot have an id of -1 since
PostgreSQL assigns pk's starting from 1 (if this assumption doesn't hold
true, we have to assign another special value for invsrc_id).
:param str variables: The variables as plain text in yaml or json
format.
:param int inventory_id: The primary key of the related inventory
object.
"""
variables_dict = parse_yaml_or_json(variables, silent_failure=False)
logger.debug(f"InventorySerializer._update_variables: {inventory_id=} {variables_dict=}, {variables=}")
update_group_variables(
group_id=None, # `None` denotes the 'all' group (which doesn't have a pk).
newvars=variables_dict,
dbvars=None,
invsrc_id=-1,
inventory_id=inventory_id,
reset=True,
)
def create(self, validated_data):
"""Called when a new inventory has to be created."""
logger.debug(f"InventorySerializer.create({validated_data=}) >>>>")
obj = super().create(validated_data)
self._update_variables(validated_data.get("variables") or "", obj.id)
return obj
def update(self, obj, validated_data):
"""Called when an existing inventory is updated."""
logger.debug(f"InventorySerializer.update({validated_data=}) >>>>")
obj = super().update(obj, validated_data)
self._update_variables(validated_data.get("variables") or "", obj.id)
return obj
class ConstructedFieldMixin(serializers.Field):
def get_attribute(self, instance):
@@ -1983,12 +1810,10 @@ class GroupSerializer(BaseSerializerWithVariables):
return res
def validate(self, attrs):
# Do not allow the group name to conflict with an existing host name.
name = force_str(attrs.get('name', self.instance and self.instance.name or ''))
inventory = attrs.get('inventory', self.instance and self.instance.inventory or '')
if Host.objects.filter(name=name, inventory=inventory).exists():
raise serializers.ValidationError(_('A Host with that name already exists.'))
#
return super(GroupSerializer, self).validate(attrs)
def validate_name(self, value):
@@ -2165,13 +1990,13 @@ class BulkHostDeleteSerializer(serializers.Serializer):
attrs['hosts_data'] = attrs['host_qs'].values()
if len(attrs['host_qs']) == 0:
error_hosts = dict.fromkeys(attrs['hosts'], "Hosts do not exist or you lack permission to delete it")
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in attrs['hosts']}
raise serializers.ValidationError({'hosts': error_hosts})
if len(attrs['host_qs']) < len(attrs['hosts']):
hosts_exists = [host['id'] for host in attrs['hosts_data']]
failed_hosts = list(set(attrs['hosts']).difference(hosts_exists))
error_hosts = dict.fromkeys(failed_hosts, "Hosts do not exist or you lack permission to delete it")
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in failed_hosts}
raise serializers.ValidationError({'hosts': error_hosts})
# Getting all inventories that the hosts can be in
@@ -2839,7 +2664,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
{
"role": {
"id": None,
"name": _("Platform Auditor"),
"name": _("Controller System Auditor"),
"description": _("Can view all aspects of the system"),
"user_capabilities": {"unattach": False},
},
@@ -3027,6 +2852,11 @@ class CredentialSerializer(BaseSerializer):
ret.remove(field)
return ret
def validate_organization(self, org):
if self.instance and (not self.instance.managed) and self.instance.credential_type.kind == 'galaxy' and org is None:
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
return org
def validate_credential_type(self, credential_type):
if self.instance and credential_type.pk != self.instance.credential_type.pk:
for related_objects in (
@@ -3102,6 +2932,9 @@ class CredentialSerializerCreate(CredentialSerializer):
if attrs.get('team'):
attrs['organization'] = attrs['team'].organization
if 'credential_type' in attrs and attrs['credential_type'].kind == 'galaxy' and list(owner_fields) != ['organization']:
raise serializers.ValidationError({"organization": _("Galaxy credentials must be owned by an Organization.")})
return super(CredentialSerializerCreate, self).validate(attrs)
def create(self, validated_data):
@@ -3164,6 +2997,7 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
'scm_branch',
'forks',
'limit',
'priority',
'verbosity',
'extra_vars',
'job_tags',
@@ -3286,6 +3120,7 @@ class JobTemplateMixin(object):
class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobOptionsSerializer):
show_capabilities = ['start', 'schedule', 'copy', 'edit', 'delete']
capabilities_prefetch = ['admin', 'execute', {'copy': ['project.use', 'inventory.use']}]
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
status = serializers.ChoiceField(choices=JobTemplate.JOB_TEMPLATE_STATUS_CHOICES, read_only=True, required=False)
@@ -3293,6 +3128,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
model = JobTemplate
fields = (
'*',
'priority',
'host_config_key',
'ask_scm_branch_on_launch',
'ask_diff_mode_on_launch',
@@ -3319,7 +3155,6 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
'webhook_service',
'webhook_credential',
'prevent_instance_group_fallback',
'opa_query_path',
)
read_only_fields = ('*', 'custom_virtualenv')
@@ -3421,6 +3256,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
'diff_mode',
'job_slice_number',
'job_slice_count',
'priority',
'webhook_service',
'webhook_credential',
'webhook_guid',
@@ -3527,7 +3363,7 @@ class JobRelaunchSerializer(BaseSerializer):
choices=NEW_JOB_TYPE_CHOICES,
write_only=True,
)
credential_passwords = VerbatimField(required=False, write_only=True)
credential_passwords = VerbatimField(required=True, write_only=True)
class Meta:
model = Job
@@ -3871,6 +3707,7 @@ class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
@@ -3891,6 +3728,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
'-controller_node',
'inventory',
'limit',
'priority',
'scm_branch',
'webhook_service',
'webhook_credential',
@@ -4008,6 +3846,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None, choices=NEW_JOB_TYPE_CHOICES)
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None)
verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES)
@@ -4026,6 +3865,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
'job_tags',
'skip_tags',
'limit',
'priority',
'skip_tags',
'diff_mode',
'verbosity',
@@ -4519,6 +4359,7 @@ class JobLaunchSerializer(BaseSerializer):
job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True)
skip_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
priority = serializers.IntegerField(required=False, write_only=False, min_value=0, max_value=32000)
verbosity = serializers.ChoiceField(required=False, choices=VERBOSITY_CHOICES, write_only=True)
execution_environment = serializers.PrimaryKeyRelatedField(queryset=ExecutionEnvironment.objects.all(), required=False, write_only=True)
labels = serializers.PrimaryKeyRelatedField(many=True, queryset=Label.objects.all(), required=False, write_only=True)
@@ -4536,6 +4377,7 @@ class JobLaunchSerializer(BaseSerializer):
'inventory',
'scm_branch',
'limit',
'priority',
'job_tags',
'skip_tags',
'job_type',
@@ -4721,6 +4563,7 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
extra_vars = VerbatimField(required=False, write_only=True)
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
priority = serializers.IntegerField(required=False, write_only=False, min_value=0, max_value=32000)
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
workflow_job_template_data = serializers.SerializerMethodField()
@@ -4860,13 +4703,14 @@ class BulkJobLaunchSerializer(serializers.Serializer):
)
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
limit = serializers.CharField(write_only=True, required=False, allow_blank=False)
# priority = serializers.IntegerField(write_only=True, required=False, min_value=0, max_value=32000)
scm_branch = serializers.CharField(write_only=True, required=False, allow_blank=False)
skip_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
job_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
class Meta:
model = WorkflowJob
fields = ('name', 'jobs', 'description', 'extra_vars', 'organization', 'inventory', 'limit', 'scm_branch', 'skip_tags', 'job_tags')
fields = ('name', 'jobs', 'description', 'extra_vars', 'organization', 'inventory', 'limit', 'priority', 'scm_branch', 'skip_tags', 'job_tags')
read_only_fields = ()
def validate(self, attrs):
@@ -5990,34 +5834,6 @@ class InstanceGroupSerializer(BaseSerializer):
raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group'))
return value
def validate_pod_spec_override(self, value):
if not value:
return value
# value should be empty for non-container groups
if self.instance and not self.instance.is_container_group:
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
pod_spec_override_json = {}
# defect if the value is yaml or json if yaml convert to json
try:
# convert yaml to json
pod_spec_override_json = yaml.safe_load(value)
except yaml.YAMLError:
try:
pod_spec_override_json = json.loads(value)
except json.JSONDecodeError:
raise serializers.ValidationError(_('pod_spec_override must be valid yaml or json'))
# validate the
spec = pod_spec_override_json.get('spec', {})
automount_service_account_token = spec.get('automountServiceAccountToken', False)
if automount_service_account_token:
raise serializers.ValidationError(_('automountServiceAccountToken is not allowed for security reasons'))
return value
def validate(self, attrs):
attrs = super(InstanceGroupSerializer, self).validate(attrs)

55
awx/api/swagger.py Normal file
View File

@@ -0,0 +1,55 @@
import warnings
from rest_framework.permissions import AllowAny
from drf_yasg import openapi
from drf_yasg.inspectors import SwaggerAutoSchema
from drf_yasg.views import get_schema_view
class CustomSwaggerAutoSchema(SwaggerAutoSchema):
"""Custom SwaggerAutoSchema to add swagger_topic to tags."""
def get_tags(self, operation_keys=None):
tags = []
try:
if hasattr(self.view, 'get_serializer'):
serializer = self.view.get_serializer()
else:
serializer = None
except Exception:
serializer = None
warnings.warn(
'{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be '
'generated for {}.'.format(self.view.__class__.__name__, operation_keys)
)
if hasattr(self.view, 'swagger_topic'):
tags.append(str(self.view.swagger_topic).title())
elif serializer and hasattr(serializer, 'Meta'):
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
elif hasattr(self.view, 'model'):
tags.append(str(self.view.model._meta.verbose_name_plural).title())
else:
tags = ['api'] # Fallback to default value
if not tags:
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
return tags
def is_deprecated(self):
"""Return `True` if this operation is to be marked as deprecated."""
return getattr(self.view, 'deprecated', False)
schema_view = get_schema_view(
openapi.Info(
title='AWX API',
default_version='v2',
description='AWX API Documentation',
terms_of_service='https://www.google.com/policies/terms/',
contact=openapi.Contact(email='contact@snippets.local'),
license=openapi.License(name='Apache License'),
),
public=True,
permission_classes=[AllowAny],
)

View File

@@ -1,6 +1,6 @@
{% if content_only %}<div class="nocode ansi_fore ansi_back{% if dark %} ansi_dark{% endif %}">{% else %}
<!DOCTYPE HTML>
<html lang="en">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>{{ title }}</title>

View File

@@ -1,4 +1,4 @@
---
collections:
- name: ansible.receptor
version: 2.0.6
version: 2.0.3

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import ActivityStreamList, ActivityStreamDetail
urls = [
re_path(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
AdHocCommandStdout,
)
urls = [
re_path(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'),
re_path(r'^(?P<pk>[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import AdHocCommandEventDetail
urls = [
re_path(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
]

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
import awx.api.views.analytics as analytics
urls = [
re_path(r'^$', analytics.AnalyticsRootView.as_view(), name='analytics_root_view'),
re_path(r'^authorized/$', analytics.AnalyticsAuthorizedView.as_view(), name='analytics_authorized'),

View File

@@ -16,6 +16,7 @@ from awx.api.views import (
CredentialExternalTest,
)
urls = [
re_path(r'^$', CredentialList.as_view(), name='credential_list'),
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList
urls = [
re_path(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'),
re_path(r'^(?P<pk>[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest
urls = [
re_path(r'^$', CredentialTypeList.as_view(), name='credential_type_list'),
re_path(r'^(?P<pk>[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'),

View File

@@ -8,6 +8,7 @@ from awx.api.views import (
ExecutionEnvironmentActivityStreamList,
)
urls = [
re_path(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'),

View File

@@ -18,6 +18,7 @@ from awx.api.views import (
GroupAdHocCommandsList,
)
urls = [
re_path(r'^$', GroupList.as_view(), name='group_list'),
re_path(r'^(?P<pk>[0-9]+)/$', GroupDetail.as_view(), name='group_detail'),

View File

@@ -18,6 +18,7 @@ from awx.api.views import (
HostAdHocCommandEventsList,
)
urls = [
re_path(r'^$', HostList.as_view(), name='host_list'),
re_path(r'^(?P<pk>[0-9]+)/$', HostDetail.as_view(), name='host_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
)
from awx.api.views.instance_install_bundle import InstanceInstallBundle
urls = [
re_path(r'^$', InstanceList.as_view(), name='instance_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),

View File

@@ -12,6 +12,7 @@ from awx.api.views import (
InstanceGroupObjectRolesList,
)
urls = [
re_path(r'^$', InstanceGroupList.as_view(), name='instance_group_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),

View File

@@ -29,6 +29,7 @@ from awx.api.views import (
InventoryVariableData,
)
urls = [
re_path(r'^$', InventoryList.as_view(), name='inventory_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'),

View File

@@ -18,6 +18,7 @@ from awx.api.views import (
InventorySourceNotificationTemplatesSuccessList,
)
urls = [
re_path(r'^$', InventorySourceList.as_view(), name='inventory_source_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'),

View File

@@ -15,6 +15,7 @@ from awx.api.views import (
InventoryUpdateCredentialsList,
)
urls = [
re_path(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'),

View File

@@ -19,6 +19,7 @@ from awx.api.views import (
JobHostSummaryDetail,
)
urls = [
re_path(r'^$', JobList.as_view(), name='job_list'),
re_path(r'^(?P<pk>[0-9]+)/$', JobDetail.as_view(), name='job_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import JobHostSummaryDetail
urls = [re_path(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')]
__all__ = ['urls']

View File

@@ -23,6 +23,7 @@ from awx.api.views import (
JobTemplateCopy,
)
urls = [
re_path(r'^$', JobTemplateList.as_view(), name='job_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views.labels import LabelList, LabelDetail
urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
__all__ = ['urls']

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import NotificationList, NotificationDetail
urls = [
re_path(r'^$', NotificationList.as_view(), name='notification_list'),
re_path(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),

View File

@@ -11,6 +11,7 @@ from awx.api.views import (
NotificationTemplateCopy,
)
urls = [
re_path(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),

View File

@@ -27,6 +27,7 @@ from awx.api.views.organization import (
)
from awx.api.views import OrganizationCredentialList
urls = [
re_path(r'^$', OrganizationList.as_view(), name='organization_list'),
re_path(r'^(?P<pk>[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'),

View File

@@ -22,6 +22,7 @@ from awx.api.views import (
ProjectCopy,
)
urls = [
re_path(r'^$', ProjectList.as_view(), name='project_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),

View File

@@ -13,6 +13,7 @@ from awx.api.views import (
ProjectUpdateEventsList,
)
urls = [
re_path(r'^$', ProjectUpdateList.as_view(), name='project_update_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'),

View File

@@ -8,6 +8,7 @@ from awx.api.views import (
ReceptorAddressDetail,
)
urls = [
re_path(r'^$', ReceptorAddressesList.as_view(), name='receptor_addresses_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ReceptorAddressDetail.as_view(), name='receptor_address_detail'),

View File

@@ -3,13 +3,16 @@
from django.urls import re_path
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
urls = [
re_path(r'^$', RoleList.as_view(), name='role_list'),
re_path(r'^(?P<pk>[0-9]+)/$', RoleDetail.as_view(), name='role_detail'),
re_path(r'^(?P<pk>[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'),
re_path(r'^(?P<pk>[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'),
re_path(r'^(?P<pk>[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'),
re_path(r'^(?P<pk>[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'),
]
__all__ = ['urls']

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList, ScheduleLabelsList, ScheduleInstanceGroupList
urls = [
re_path(r'^$', ScheduleList.as_view(), name='schedule_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList
urls = [
re_path(r'^$', SystemJobList.as_view(), name='system_job_list'),
re_path(r'^(?P<pk>[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
SystemJobTemplateNotificationTemplatesSuccessList,
)
urls = [
re_path(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'),

View File

@@ -15,6 +15,7 @@ from awx.api.views import (
TeamAccessList,
)
urls = [
re_path(r'^$', TeamList.as_view(), name='team_list'),
re_path(r'^(?P<pk>[0-9]+)/$', TeamDetail.as_view(), name='team_detail'),

View File

@@ -4,6 +4,7 @@
from __future__ import absolute_import, unicode_literals
from django.urls import include, re_path
from awx import MODE
from awx.api.generics import LoggedLoginView, LoggedLogoutView
from awx.api.views.root import (
ApiRootView,
@@ -147,15 +148,21 @@ v2_urls = [
app_name = 'api'
urlpatterns = [
re_path(r'^$', ApiRootView.as_view(), name='api_root_view'),
re_path(r'^(?P<version>(v2))/', include(v2_urls)),
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
# the docs/, schema-related endpoints used to be listed here but now exposed by DAB api_documentation app
]
if MODE == 'development':
# Only include these if we are in the development environment
from awx.api.swagger import schema_view
from awx.api.urls.debug import urls as debug_urls
from awx.api.urls.debug import urls as debug_urls
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
urlpatterns += [
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
]

View File

@@ -2,6 +2,7 @@ from django.urls import re_path
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver, BitbucketDcWebhookReceiver
urlpatterns = [
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny
urls = [
re_path(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList
urls = [
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'),
re_path(r'^(?P<pk>[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
WorkflowJobActivityStreamList,
)
urls = [
re_path(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
WorkflowJobNodeInstanceGroupsList,
)
urls = [
re_path(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'),

View File

@@ -22,6 +22,7 @@ from awx.api.views import (
WorkflowJobTemplateLabelList,
)
urls = [
re_path(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'),

View File

@@ -15,6 +15,7 @@ from awx.api.views import (
WorkflowJobTemplateNodeInstanceGroupsList,
)
urls = [
re_path(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'),

File diff suppressed because it is too large Load Diff

View File

@@ -10,13 +10,11 @@ from awx.api.generics import APIView, Response
from awx.api.permissions import AnalyticsPermission
from awx.api.versioning import reverse
from awx.main.utils import get_awx_version
from awx.main.utils.analytics_proxy import OIDCClient
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
from rest_framework import status
from collections import OrderedDict
from ansible_base.lib.utils.schema import extend_schema_if_available
AUTOMATION_ANALYTICS_API_URL_PATH = "/api/tower-analytics/v1"
AWX_ANALYTICS_API_PREFIX = 'analytics'
@@ -40,8 +38,6 @@ class MissingSettings(Exception):
class GetNotAllowedMixin(object):
skip_ai_description = True
def get(self, request, format=None):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@@ -50,9 +46,7 @@ class AnalyticsRootView(APIView):
permission_classes = (AnalyticsPermission,)
name = _('Automation Analytics')
swagger_topic = 'Automation Analytics'
resource_purpose = 'automation analytics endpoints'
@extend_schema_if_available(extensions={"x-ai-description": "A list of additional API endpoints related to analytics"})
def get(self, request, format=None):
data = OrderedDict()
data['authorized'] = reverse('api:analytics_authorized', request=request)
@@ -105,8 +99,6 @@ class AnalyticsGenericView(APIView):
return Response(response.json(), status=response.status_code)
"""
resource_purpose = 'base view for analytics api proxy'
permission_classes = (AnalyticsPermission,)
@staticmethod
@@ -210,16 +202,10 @@ class AnalyticsGenericView(APIView):
if method not in ["GET", "POST", "OPTIONS"]:
return self._error_response(ERROR_UNSUPPORTED_METHOD, method, remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
url = self._get_analytics_url(request.path)
using_subscriptions_credentials = False
try:
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
if not (rh_user and rh_password):
rh_user = self._get_setting('SUBSCRIPTIONS_CLIENT_ID', None, ERROR_MISSING_USER)
rh_password = self._get_setting('SUBSCRIPTIONS_CLIENT_SECRET', None, ERROR_MISSING_PASSWORD)
using_subscriptions_credentials = True
client = OIDCClient(rh_user, rh_password)
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
response = client.make_request(
method,
url,
@@ -230,17 +216,17 @@ class AnalyticsGenericView(APIView):
timeout=(31, 31),
)
except requests.RequestException:
# subscriptions credentials are not valid for basic auth, so just return 401
if using_subscriptions_credentials:
response = Response(status=status.HTTP_401_UNAUTHORIZED)
else:
logger.error("Automation Analytics API request failed, trying base auth method")
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
logger.error("Automation Analytics API request failed, trying base auth method")
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
except MissingSettings:
rh_user = self._get_setting('SUBSCRIPTIONS_USERNAME', None, ERROR_MISSING_USER)
rh_password = self._get_setting('SUBSCRIPTIONS_PASSWORD', None, ERROR_MISSING_PASSWORD)
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
#
# Missing or wrong user/pass
#
if response.status_code == status.HTTP_401_UNAUTHORIZED:
text = response.get('text', '').rstrip("\n")
text = (response.text or '').rstrip("\n")
return self._error_response(ERROR_UNAUTHORIZED, text, remote=True, remote_status_code=response.status_code)
#
# Not found, No entitlement or No data in Analytics
@@ -265,91 +251,67 @@ class AnalyticsGenericView(APIView):
class AnalyticsGenericListView(AnalyticsGenericView):
resource_purpose = 'analytics api proxy list view'
@extend_schema_if_available(extensions={"x-ai-description": "Get analytics data from Red Hat Insights"})
def get(self, request, format=None):
return self._send_to_analytics(request, method="GET")
@extend_schema_if_available(extensions={"x-ai-description": "Post query to Red Hat Insights analytics"})
def post(self, request, format=None):
return self._send_to_analytics(request, method="POST")
@extend_schema_if_available(extensions={"x-ai-description": "Get analytics endpoint options"})
def options(self, request, format=None):
return self._send_to_analytics(request, method="OPTIONS")
class AnalyticsGenericDetailView(AnalyticsGenericView):
resource_purpose = 'analytics api proxy detail view'
@extend_schema_if_available(extensions={"x-ai-description": "Get specific analytics resource from Red Hat Insights"})
def get(self, request, slug, format=None):
return self._send_to_analytics(request, method="GET")
@extend_schema_if_available(extensions={"x-ai-description": "Post query for specific analytics resource to Red Hat Insights"})
def post(self, request, slug, format=None):
return self._send_to_analytics(request, method="POST")
@extend_schema_if_available(extensions={"x-ai-description": "Get options for specific analytics resource"})
def options(self, request, slug, format=None):
return self._send_to_analytics(request, method="OPTIONS")
@extend_schema_if_available(
extensions={'x-ai-description': 'Check if the user has access to Red Hat Insights'},
)
class AnalyticsAuthorizedView(AnalyticsGenericListView):
name = _("Authorized")
resource_purpose = 'red hat insights authorization status'
class AnalyticsReportsList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Reports")
swagger_topic = "Automation Analytics"
resource_purpose = 'automation analytics reports'
class AnalyticsReportDetail(AnalyticsGenericDetailView):
name = _("Report")
resource_purpose = 'automation analytics report detail'
class AnalyticsReportOptionsList(AnalyticsGenericListView):
name = _("Report Options")
resource_purpose = 'automation analytics report options'
class AnalyticsAdoptionRateList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Adoption Rate")
resource_purpose = 'automation analytics adoption rate data'
class AnalyticsEventExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Event Explorer")
resource_purpose = 'automation analytics event explorer data'
class AnalyticsHostExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Host Explorer")
resource_purpose = 'automation analytics host explorer data'
class AnalyticsJobExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Job Explorer")
resource_purpose = 'automation analytics job explorer data'
class AnalyticsProbeTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Probe Templates")
resource_purpose = 'automation analytics probe templates'
class AnalyticsProbeTemplateForHostsList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Probe Template For Hosts")
resource_purpose = 'automation analytics probe templates for hosts'
class AnalyticsRoiTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("ROI Templates")
resource_purpose = 'automation analytics roi templates'

View File

@@ -1,7 +1,5 @@
from collections import OrderedDict
from ansible_base.lib.utils.schema import extend_schema_if_available
from django.utils.translation import gettext_lazy as _
from rest_framework.permissions import IsAuthenticated
@@ -32,7 +30,6 @@ class BulkView(APIView):
]
allowed_methods = ['GET', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Retrieves a list of available bulk actions"})
def get(self, request, format=None):
'''List top level resources'''
data = OrderedDict()
@@ -48,13 +45,11 @@ class BulkJobLaunchView(GenericAPIView):
serializer_class = serializers.BulkJobLaunchSerializer
allowed_methods = ['GET', 'POST', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk job launch endpoint"})
def get(self, request):
data = OrderedDict()
data['detail'] = "Specify a list of unified job templates to launch alongside their launchtime parameters"
return Response(data, status=status.HTTP_200_OK)
@extend_schema_if_available(extensions={"x-ai-description": "Bulk launch job templates"})
def post(self, request):
bulkjob_serializer = serializers.BulkJobLaunchSerializer(data=request.data, context={'request': request})
if bulkjob_serializer.is_valid():
@@ -69,11 +64,9 @@ class BulkHostCreateView(GenericAPIView):
serializer_class = serializers.BulkHostCreateSerializer
allowed_methods = ['GET', 'POST', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk host create endpoint"})
def get(self, request):
return Response({"detail": "Bulk create hosts with this endpoint"}, status=status.HTTP_200_OK)
@extend_schema_if_available(extensions={"x-ai-description": "Bulk create hosts"})
def post(self, request):
serializer = serializers.BulkHostCreateSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
@@ -88,11 +81,9 @@ class BulkHostDeleteView(GenericAPIView):
serializer_class = serializers.BulkHostDeleteSerializer
allowed_methods = ['GET', 'POST', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk host delete endpoint"})
def get(self, request):
return Response({"detail": "Bulk delete hosts with this endpoint"}, status=status.HTTP_200_OK)
@extend_schema_if_available(extensions={"x-ai-description": "Bulk delete hosts"})
def post(self, request):
serializer = serializers.BulkHostDeleteSerializer(data=request.data, context={'request': request})
if serializer.is_valid():

View File

@@ -5,7 +5,6 @@ from django.conf import settings
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from awx.api.generics import APIView
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx.main.scheduler import TaskManager, DependencyManager, WorkflowManager
@@ -15,9 +14,7 @@ class TaskManagerDebugView(APIView):
exclude_from_schema = True
permission_classes = [AllowAny]
prefix = 'Task'
resource_purpose = 'debug task manager'
@extend_schema_if_available(extensions={"x-ai-description": "Trigger task manager scheduling"})
def get(self, request):
TaskManager().schedule()
if not settings.AWX_DISABLE_TASK_MANAGERS:
@@ -32,9 +29,7 @@ class DependencyManagerDebugView(APIView):
exclude_from_schema = True
permission_classes = [AllowAny]
prefix = 'Dependency'
resource_purpose = 'debug dependency manager'
@extend_schema_if_available(extensions={"x-ai-description": "Trigger dependency manager scheduling"})
def get(self, request):
DependencyManager().schedule()
if not settings.AWX_DISABLE_TASK_MANAGERS:
@@ -49,9 +44,7 @@ class WorkflowManagerDebugView(APIView):
exclude_from_schema = True
permission_classes = [AllowAny]
prefix = 'Workflow'
resource_purpose = 'debug workflow manager'
@extend_schema_if_available(extensions={"x-ai-description": "Trigger workflow manager scheduling"})
def get(self, request):
WorkflowManager().schedule()
if not settings.AWX_DISABLE_TASK_MANAGERS:
@@ -65,9 +58,7 @@ class DebugRootView(APIView):
_ignore_model_permissions = True
exclude_from_schema = True
permission_classes = [AllowAny]
resource_purpose = 'debug endpoints root'
@extend_schema_if_available(extensions={"x-ai-description": "List available debug endpoints"})
def get(self, request, format=None):
'''List of available debug urls'''
data = OrderedDict()

View File

@@ -10,10 +10,9 @@ import time
import re
import asn1
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx.api import serializers
from awx.api.generics import GenericAPIView, Response
from awx.api.permissions import IsSystemAdmin
from awx.api.permissions import IsSystemAdminOrAuditor
from awx.main import models
from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization
@@ -49,10 +48,8 @@ class InstanceInstallBundle(GenericAPIView):
name = _('Install Bundle')
model = models.Instance
serializer_class = serializers.InstanceSerializer
permission_classes = (IsSystemAdmin,)
resource_purpose = 'install bundle'
permission_classes = (IsSystemAdminOrAuditor,)
@extend_schema_if_available(extensions={"x-ai-description": "Generate and download install bundle for an instance"})
def get(self, request, *args, **kwargs):
instance_obj = self.get_object()
@@ -198,8 +195,8 @@ def generate_receptor_tls(instance_obj):
.issuer_name(ca_cert.issuer)
.public_key(csr.public_key())
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.datetime.now(datetime.UTC))
.not_valid_after(datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=3650))
.not_valid_before(datetime.datetime.utcnow())
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
.add_extension(
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,

View File

@@ -19,8 +19,6 @@ from rest_framework import serializers
# AWX
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx.api.generics import (
ListCreateAPIView,
RetrieveUpdateDestroyAPIView,
@@ -45,6 +43,7 @@ from awx.api.views.mixin import RelatedJobsPreventDeleteMixin
from awx.api.pagination import UnifiedJobEventPagination
logger = logging.getLogger('awx.api.views.organization')
@@ -56,7 +55,6 @@ class InventoryUpdateEventsList(SubListAPIView):
name = _('Inventory Update Events List')
search_fields = ('stdout',)
pagination_class = UnifiedJobEventPagination
resource_purpose = 'events of an inventory update'
def get_queryset(self):
iu = self.get_parent_object()
@@ -71,17 +69,11 @@ class InventoryUpdateEventsList(SubListAPIView):
class InventoryList(ListCreateAPIView):
model = Inventory
serializer_class = InventorySerializer
resource_purpose = 'inventories'
@extend_schema_if_available(extensions={"x-ai-description": "A list of inventories."})
def get(self, request, *args, **kwargs):
return super().get(request, *args, **kwargs)
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Inventory
serializer_class = InventorySerializer
resource_purpose = 'inventory detail'
def update(self, request, *args, **kwargs):
obj = self.get_object()
@@ -108,39 +100,33 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
class ConstructedInventoryDetail(InventoryDetail):
serializer_class = ConstructedInventorySerializer
resource_purpose = 'constructed inventory detail'
class ConstructedInventoryList(InventoryList):
serializer_class = ConstructedInventorySerializer
resource_purpose = 'constructed inventories'
def get_queryset(self):
r = super().get_queryset()
return r.filter(kind='constructed')
@extend_schema_if_available(extensions={"x-ai-description": "Get or create input inventory inventory"})
class InventoryInputInventoriesList(SubListAttachDetachAPIView):
model = Inventory
serializer_class = InventorySerializer
parent_model = Inventory
relationship = 'input_inventories'
resource_purpose = 'input inventories of a constructed inventory'
def is_valid_relation(self, parent, sub, created=False):
if sub.kind == 'constructed':
raise serializers.ValidationError({'error': 'You cannot add a constructed inventory to another constructed inventory.'})
@extend_schema_if_available(extensions={"x-ai-description": "Get activity stream for an inventory"})
class InventoryActivityStreamList(SubListAPIView):
model = ActivityStream
serializer_class = ActivityStreamSerializer
parent_model = Inventory
relationship = 'activitystream_set'
search_fields = ('changes',)
resource_purpose = 'activity stream for an inventory'
def get_queryset(self):
parent = self.get_parent_object()
@@ -154,13 +140,11 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
serializer_class = InstanceGroupSerializer
parent_model = Inventory
relationship = 'instance_groups'
resource_purpose = 'instance groups of an inventory'
class InventoryAccessList(ResourceAccessList):
model = User # needs to be User for AccessLists's
parent_model = Inventory
resource_purpose = 'users who can access the inventory'
class InventoryObjectRolesList(SubListAPIView):
@@ -169,7 +153,6 @@ class InventoryObjectRolesList(SubListAPIView):
parent_model = Inventory
search_fields = ('role_field', 'content_type__model')
deprecated = True
resource_purpose = 'roles of an inventory'
def get_queryset(self):
po = self.get_parent_object()
@@ -182,7 +165,6 @@ class InventoryJobTemplateList(SubListAPIView):
serializer_class = JobTemplateSerializer
parent_model = Inventory
relationship = 'jobtemplates'
resource_purpose = 'job templates using an inventory'
def get_queryset(self):
parent = self.get_parent_object()
@@ -193,10 +175,8 @@ class InventoryJobTemplateList(SubListAPIView):
class InventoryLabelList(LabelSubListCreateAttachDetachView):
parent_model = Inventory
resource_purpose = 'labels of an inventory'
class InventoryCopy(CopyAPIView):
model = Inventory
copy_return_serializer_class = InventorySerializer
resource_purpose = 'copy of an inventory'

View File

@@ -2,7 +2,6 @@
from awx.api.generics import SubListCreateAttachDetachAPIView, RetrieveUpdateAPIView, ListCreateAPIView
from awx.main.models import Label
from awx.api.serializers import LabelSerializer
from ansible_base.lib.utils.schema import extend_schema_if_available
# Django
from django.utils.translation import gettext_lazy as _
@@ -25,10 +24,9 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
model = Label
serializer_class = LabelSerializer
relationship = 'labels'
resource_purpose = 'labels of a resource'
def unattach(self, request, *args, **kwargs):
sub_id, res = super().unattach_validate(request)
(sub_id, res) = super().unattach_validate(request)
if res:
return res
@@ -41,7 +39,6 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
return res
@extend_schema_if_available(extensions={"x-ai-description": "Create or attach a label to a resource"})
def post(self, request, *args, **kwargs):
# If a label already exists in the database, attach it instead of erroring out
# that it already exists
@@ -64,11 +61,9 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
class LabelDetail(RetrieveUpdateAPIView):
model = Label
serializer_class = LabelSerializer
resource_purpose = 'label detail'
class LabelList(ListCreateAPIView):
name = _("Labels")
model = Label
serializer_class = LabelSerializer
resource_purpose = 'labels'

View File

@@ -2,7 +2,6 @@
# All Rights Reserved.
from django.utils.translation import gettext_lazy as _
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx.api.generics import APIView, Response
from awx.api.permissions import IsSystemAdminOrAuditor
@@ -14,9 +13,7 @@ class MeshVisualizer(APIView):
name = _("Mesh Visualizer")
permission_classes = (IsSystemAdminOrAuditor,)
swagger_topic = "System Configuration"
resource_purpose = 'mesh network topology visualization data'
@extend_schema_if_available(extensions={"x-ai-description": "Get mesh network topology visualization data"})
def get(self, request, format=None):
data = {
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,

View File

@@ -7,13 +7,13 @@ import logging
# Django
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from ansible_base.lib.utils.schema import extend_schema_if_available
# Django REST Framework
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.exceptions import PermissionDenied
# AWX
# from awx.main.analytics import collectors
import awx.main.analytics.subsystem_metrics as s_metrics
@@ -22,13 +22,13 @@ from awx.api import renderers
from awx.api.generics import APIView
logger = logging.getLogger('awx.analytics')
class MetricsView(APIView):
name = _('Metrics')
swagger_topic = 'Metrics'
resource_purpose = 'prometheus metrics data'
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
@@ -37,7 +37,6 @@ class MetricsView(APIView):
self.permission_classes = (AllowAny,)
return super(APIView, self).initialize_request(request, *args, **kwargs)
@extend_schema_if_available(extensions={"x-ai-description": "Get Prometheus metrics data"})
def get(self, request):
'''Show Metrics Details'''
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor:

View File

@@ -53,20 +53,21 @@ from awx.api.serializers import (
CredentialSerializer,
)
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin, OrganizationInstanceGroupMembershipMixin
from awx.api.views import immutablesharedfields
logger = logging.getLogger('awx.api.views.organization')
@immutablesharedfields
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
model = Organization
serializer_class = OrganizationSerializer
resource_purpose = 'organizations'
@immutablesharedfields
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Organization
serializer_class = OrganizationSerializer
resource_purpose = 'organization detail'
def get_serializer_context(self, *args, **kwargs):
full_context = super(OrganizationDetail, self).get_serializer_context(*args, **kwargs)
@@ -104,25 +105,24 @@ class OrganizationInventoriesList(SubListAPIView):
serializer_class = InventorySerializer
parent_model = Organization
relationship = 'inventories'
resource_purpose = 'inventories of an organization'
@immutablesharedfields
class OrganizationUsersList(BaseUsersList):
model = User
serializer_class = UserSerializer
parent_model = Organization
relationship = 'member_role.members'
ordering = ('username',)
resource_purpose = 'users of an organization'
@immutablesharedfields
class OrganizationAdminsList(BaseUsersList):
model = User
serializer_class = UserSerializer
parent_model = Organization
relationship = 'admin_role.members'
ordering = ('username',)
resource_purpose = 'administrators of an organization'
class OrganizationProjectsList(SubListCreateAPIView):
@@ -130,7 +130,6 @@ class OrganizationProjectsList(SubListCreateAPIView):
serializer_class = ProjectSerializer
parent_model = Organization
parent_key = 'organization'
resource_purpose = 'projects of an organization'
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
@@ -140,7 +139,6 @@ class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
relationship = 'executionenvironments'
parent_key = 'organization'
swagger_topic = "Execution Environments"
resource_purpose = 'execution environments of an organization'
class OrganizationJobTemplatesList(SubListCreateAPIView):
@@ -148,7 +146,6 @@ class OrganizationJobTemplatesList(SubListCreateAPIView):
serializer_class = JobTemplateSerializer
parent_model = Organization
parent_key = 'organization'
resource_purpose = 'job templates of an organization'
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
@@ -156,16 +153,15 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
serializer_class = WorkflowJobTemplateSerializer
parent_model = Organization
parent_key = 'organization'
resource_purpose = 'workflow job templates of an organization'
@immutablesharedfields
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
model = Team
serializer_class = TeamSerializer
parent_model = Organization
relationship = 'teams'
parent_key = 'organization'
resource_purpose = 'teams of an organization'
class OrganizationActivityStreamList(SubListAPIView):
@@ -174,7 +170,6 @@ class OrganizationActivityStreamList(SubListAPIView):
parent_model = Organization
relationship = 'activitystream_set'
search_fields = ('changes',)
resource_purpose = 'activity stream for an organization'
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
@@ -183,34 +178,28 @@ class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
parent_model = Organization
relationship = 'notification_templates'
parent_key = 'organization'
resource_purpose = 'notification templates of an organization'
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
model = NotificationTemplate
serializer_class = NotificationTemplateSerializer
parent_model = Organization
resource_purpose = 'base view for notification templates of an organization'
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_started'
resource_purpose = 'notification templates for job started events of an organization'
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_error'
resource_purpose = 'notification templates for job error events of an organization'
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_success'
resource_purpose = 'notification templates for job success events of an organization'
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_approvals'
resource_purpose = 'notification templates for workflow approval events of an organization'
class OrganizationInstanceGroupsList(OrganizationInstanceGroupMembershipMixin, SubListAttachDetachAPIView):
@@ -219,7 +208,6 @@ class OrganizationInstanceGroupsList(OrganizationInstanceGroupMembershipMixin, S
parent_model = Organization
relationship = 'instance_groups'
filter_read_permission = False
resource_purpose = 'instance groups of an organization'
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
@@ -228,7 +216,6 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
parent_model = Organization
relationship = 'galaxy_credentials'
filter_read_permission = False
resource_purpose = 'galaxy credentials of an organization'
def is_valid_relation(self, parent, sub, created=False):
if sub.kind != 'galaxy_api_token':
@@ -238,7 +225,6 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
class OrganizationAccessList(ResourceAccessList):
model = User # needs to be User for AccessLists's
parent_model = Organization
resource_purpose = 'users who can access the organization'
class OrganizationObjectRolesList(SubListAPIView):
@@ -247,7 +233,6 @@ class OrganizationObjectRolesList(SubListAPIView):
parent_model = Organization
search_fields = ('role_field', 'content_type__model')
deprecated = True
resource_purpose = 'roles of an organization'
def get_queryset(self):
po = self.get_parent_object()

View File

@@ -8,8 +8,6 @@ import operator
from collections import OrderedDict
from django.conf import settings
from django.core.cache import cache
from django.db import connection
from django.utils.encoding import smart_str
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie
@@ -23,21 +21,17 @@ from rest_framework import status
import requests
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx import MODE
from awx.api.generics import APIView
from awx.conf.registry import settings_registry
from awx.main.analytics import all_collectors
from awx.main.ha import is_ha_environment
from awx.main.tasks.system import clear_setting_cache
from awx.main.utils import get_awx_version, get_custom_venv_choices
from awx.main.utils.licensing import validate_entitlement_manifest
from awx.api.versioning import URLPathVersioning, reverse, drf_reverse
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
from awx.main.utils import set_environ
from awx.main.utils.analytics_proxy import TokenError
from awx.main.utils.licensing import get_licenser
logger = logging.getLogger('awx.api.views.root')
@@ -48,10 +42,8 @@ class ApiRootView(APIView):
name = _('REST API')
versioning_class = URLPathVersioning
swagger_topic = 'Versioning'
resource_purpose = 'api root and version information'
@method_decorator(ensure_csrf_cookie)
@extend_schema_if_available(extensions={"x-ai-description": "List supported API versions"})
def get(self, request, format=None):
'''List supported API versions'''
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
@@ -63,16 +55,14 @@ class ApiRootView(APIView):
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
if MODE == 'development':
data['docs'] = drf_reverse('api:schema-swagger-ui')
data['swagger'] = drf_reverse('api:schema-swagger-ui')
return Response(data)
class ApiVersionRootView(APIView):
permission_classes = (AllowAny,)
swagger_topic = 'Versioning'
resource_purpose = 'api top-level resources'
@extend_schema_if_available(extensions={"x-ai-description": "List top-level API resources"})
def get(self, request, format=None):
'''List top level resources'''
data = OrderedDict()
@@ -132,7 +122,6 @@ class ApiVersionRootView(APIView):
class ApiV2RootView(ApiVersionRootView):
name = _('Version 2')
resource_purpose = 'api v2 root'
class ApiV2PingView(APIView):
@@ -144,11 +133,7 @@ class ApiV2PingView(APIView):
authentication_classes = ()
name = _('Ping')
swagger_topic = 'System Configuration'
resource_purpose = 'basic instance information'
@extend_schema_if_available(
extensions={'x-ai-description': 'Return basic information about this instance'},
)
def get(self, request, format=None):
"""Return some basic information about this instance
@@ -183,64 +168,27 @@ class ApiV2SubscriptionView(APIView):
permission_classes = (IsAuthenticated,)
name = _('Subscriptions')
swagger_topic = 'System Configuration'
resource_purpose = 'aap subscription validation'
def check_permissions(self, request):
super(ApiV2SubscriptionView, self).check_permissions(request)
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
self.permission_denied(request) # Raises PermissionDenied exception.
@extend_schema_if_available(
extensions={'x-ai-description': 'List valid AAP subscriptions'},
)
def post(self, request):
data = request.data.copy()
if data.get('subscriptions_password') == '$encrypted$':
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
try:
user = None
pw = None
basic_auth = False
# determine if the credentials are for basic auth or not
if data.get('subscriptions_client_id'):
user, pw = data.get('subscriptions_client_id'), data.get('subscriptions_client_secret')
if pw == '$encrypted$':
pw = settings.SUBSCRIPTIONS_CLIENT_SECRET
elif data.get('subscriptions_username'):
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
if pw == '$encrypted$':
pw = settings.SUBSCRIPTIONS_PASSWORD
basic_auth = True
if not user or not pw:
return Response({"error": _("Missing subscription credentials")}, status=status.HTTP_400_BAD_REQUEST)
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
with set_environ(**settings.AWX_TASK_ENV):
validated = get_licenser().validate_rh(user, pw, basic_auth)
# update settings if the credentials were valid
if basic_auth:
if user:
settings.SUBSCRIPTIONS_USERNAME = user
if pw:
settings.SUBSCRIPTIONS_PASSWORD = pw
# mutual exclusion for basic auth and service account
# only one should be set at a given time so that
# config/attach/ knows which credentials to use
settings.SUBSCRIPTIONS_CLIENT_ID = ""
settings.SUBSCRIPTIONS_CLIENT_SECRET = ""
else:
if user:
settings.SUBSCRIPTIONS_CLIENT_ID = user
if pw:
settings.SUBSCRIPTIONS_CLIENT_SECRET = pw
# mutual exclusion for basic auth and service account
settings.SUBSCRIPTIONS_USERNAME = ""
settings.SUBSCRIPTIONS_PASSWORD = ""
validated = get_licenser().validate_rh(user, pw)
if user:
settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username']
if pw:
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
except Exception as exc:
msg = _("Invalid Subscription")
if isinstance(exc, TokenError) or (
isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401
):
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
msg = _("The provided credentials are invalid (HTTP 401).")
elif isinstance(exc, requests.exceptions.ProxyError):
msg = _("Unable to connect to proxy server.")
@@ -259,37 +207,24 @@ class ApiV2AttachView(APIView):
permission_classes = (IsAuthenticated,)
name = _('Attach Subscription')
swagger_topic = 'System Configuration'
resource_purpose = 'subscription attachment'
def check_permissions(self, request):
super(ApiV2AttachView, self).check_permissions(request)
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
self.permission_denied(request) # Raises PermissionDenied exception.
@extend_schema_if_available(
extensions={'x-ai-description': 'Attach a subscription'},
)
def post(self, request):
data = request.data.copy()
subscription_id = data.get('subscription_id', None)
if not subscription_id:
return Response({"error": _("No subscription ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
# Ensure we always use the latest subscription credentials
cache.delete_many(['SUBSCRIPTIONS_CLIENT_ID', 'SUBSCRIPTIONS_CLIENT_SECRET', 'SUBSCRIPTIONS_USERNAME', 'SUBSCRIPTIONS_PASSWORD'])
user = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
pw = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
basic_auth = False
if not (user and pw):
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
basic_auth = True
if not (user and pw):
return Response({"error": _("Missing subscription credentials")}, status=status.HTTP_400_BAD_REQUEST)
if subscription_id and user and pw:
pool_id = data.get('pool_id', None)
if not pool_id:
return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
if pool_id and user and pw:
data = request.data.copy()
try:
with set_environ(**settings.AWX_TASK_ENV):
validated = get_licenser().validate_rh(user, pw, basic_auth)
validated = get_licenser().validate_rh(user, pw)
except Exception as exc:
msg = _("Invalid Subscription")
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
@@ -303,12 +238,10 @@ class ApiV2AttachView(APIView):
else:
logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
for sub in validated:
if sub['subscription_id'] == subscription_id:
if sub['pool_id'] == pool_id:
sub['valid_key'] = True
settings.LICENSE = sub
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
return Response(sub)
return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST)
@@ -318,20 +251,17 @@ class ApiV2ConfigView(APIView):
permission_classes = (IsAuthenticated,)
name = _('Configuration')
swagger_topic = 'System Configuration'
resource_purpose = 'system configuration and license management'
def check_permissions(self, request):
super(ApiV2ConfigView, self).check_permissions(request)
if not request.user.is_superuser and request.method.lower() not in {'options', 'head', 'get'}:
self.permission_denied(request) # Raises PermissionDenied exception.
@extend_schema_if_available(
extensions={'x-ai-description': 'Return various configuration settings'},
)
def get(self, request, format=None):
'''Return various sitewide configuration settings'''
license_data = get_licenser().validate()
if not license_data.get('valid_key', False):
license_data = {}
@@ -366,7 +296,6 @@ class ApiV2ConfigView(APIView):
return Response(data)
@extend_schema_if_available(extensions={"x-ai-description": "Add or update a subscription manifest license"})
def post(self, request):
if not isinstance(request.data, dict):
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
@@ -396,7 +325,6 @@ class ApiV2ConfigView(APIView):
try:
license_data_validated = get_licenser().license_from_manifest(license_data)
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
except Exception:
logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
@@ -412,13 +340,9 @@ class ApiV2ConfigView(APIView):
logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
@extend_schema_if_available(
extensions={'x-ai-description': 'Remove the current subscription'},
)
def delete(self, request):
try:
settings.LICENSE = {}
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception:
# FIX: Log

View File

@@ -11,7 +11,6 @@ from rest_framework import status
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx.api import serializers
from awx.api.generics import APIView, GenericAPIView
@@ -25,7 +24,6 @@ logger = logging.getLogger('awx.api.views.webhooks')
class WebhookKeyView(GenericAPIView):
serializer_class = serializers.EmptySerializer
permission_classes = (WebhookKeyPermission,)
resource_purpose = 'webhook key management'
def get_queryset(self):
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
@@ -33,13 +31,11 @@ class WebhookKeyView(GenericAPIView):
return super().get_queryset()
@extend_schema_if_available(extensions={"x-ai-description": "Get the webhook key for a template"})
def get(self, request, *args, **kwargs):
obj = self.get_object()
return Response({'webhook_key': obj.webhook_key})
@extend_schema_if_available(extensions={"x-ai-description": "Rotate the webhook key for a template"})
def post(self, request, *args, **kwargs):
obj = self.get_object()
obj.rotate_webhook_key()
@@ -56,7 +52,6 @@ class WebhookReceiverBase(APIView):
authentication_classes = ()
ref_keys = {}
resource_purpose = 'webhook receiver for triggering jobs'
def get_queryset(self):
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
@@ -132,8 +127,7 @@ class WebhookReceiverBase(APIView):
raise PermissionDenied
@csrf_exempt
@extend_schema_if_available(extensions={"x-ai-description": "Receive a webhook event and trigger a job"})
def post(self, request, *args, **kwargs_in):
def post(self, request, *args, **kwargs):
# Ensure that the full contents of the request are captured for multiple uses.
request.body
@@ -181,7 +175,6 @@ class WebhookReceiverBase(APIView):
class GithubWebhookReceiver(WebhookReceiverBase):
service = 'github'
resource_purpose = 'github webhook receiver'
ref_keys = {
'pull_request': 'pull_request.head.sha',
@@ -219,7 +212,6 @@ class GithubWebhookReceiver(WebhookReceiverBase):
class GitlabWebhookReceiver(WebhookReceiverBase):
service = 'gitlab'
resource_purpose = 'gitlab webhook receiver'
ref_keys = {'Push Hook': 'checkout_sha', 'Tag Push Hook': 'checkout_sha', 'Merge Request Hook': 'object_attributes.last_commit.id'}
@@ -258,7 +250,6 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
class BitbucketDcWebhookReceiver(WebhookReceiverBase):
service = 'bitbucket_dc'
resource_purpose = 'bitbucket data center webhook receiver'
ref_keys = {
'repo:refs_changed': 'changes.0.toHash',

View File

@@ -6,11 +6,11 @@ import urllib.parse as urlparse
from collections import OrderedDict
# Django
from django.core.validators import URLValidator, DomainNameValidator, _lazy_re_compile
from django.core.validators import URLValidator, _lazy_re_compile
from django.utils.translation import gettext_lazy as _
# Django REST Framework
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, FloatField # noqa
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField # noqa
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
# AWX
@@ -160,11 +160,10 @@ class StringListIsolatedPathField(StringListField):
class URLField(CharField):
# these lines set up a custom regex that allow numbers in the
# top-level domain
tld_re = (
r'\.' # dot
r'(?!-)' # can't start with a dash
r'(?:[a-z' + DomainNameValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
r'|xn--[a-z0-9]{1,59})' # or punycode label
r'(?<!-)' # can't end with a dash
r'\.?' # may have a trailing dot
@@ -208,8 +207,7 @@ class URLField(CharField):
if self.allow_plain_hostname:
try:
url_parts = urlparse.urlsplit(value)
looks_like_ipv6 = bool(url_parts.netloc and url_parts.netloc.startswith('[') and url_parts.netloc.endswith(']'))
if not looks_like_ipv6 and url_parts.hostname and '.' not in url_parts.hostname:
if url_parts.hostname and '.' not in url_parts.hostname:
netloc = '{}.local'.format(url_parts.hostname)
if url_parts.port:
netloc = '{}:{}'.format(netloc, url_parts.port)

View File

@@ -27,5 +27,5 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
def prefill_rh_credentials(apps, schema_editor):
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_CLIENT_ID', encrypted=False)
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_CLIENT_SECRET', encrypted=True)
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False)
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True)

View File

@@ -38,7 +38,6 @@ class SettingsRegistry(object):
if setting in self._registry:
raise ImproperlyConfigured('Setting "{}" is already registered.'.format(setting))
category = kwargs.setdefault('category', None)
kwargs.setdefault('required', False) # No setting is ordinarily required
category_slug = kwargs.setdefault('category_slug', slugify(category or '') or None)
if category_slug in {'all', 'changed', 'user-defaults'}:
raise ImproperlyConfigured('"{}" is a reserved category slug.'.format(category_slug))

View File

@@ -128,41 +128,3 @@ class TestURLField:
else:
with pytest.raises(ValidationError):
field.run_validators(url)
@pytest.mark.parametrize(
"url, expect_error",
[
("https://[1:2:3]", True),
("http://[1:2:3]", True),
("https://[2001:db8:3333:4444:5555:6666:7777:8888", True),
("https://2001:db8:3333:4444:5555:6666:7777:8888", True),
("https://[2001:db8:3333:4444:5555:6666:7777:8888]", False),
("https://[::1]", False),
("https://[::]", False),
("https://[2001:db8::1]", False),
("https://[2001:db8:0:0:0:0:1:1]", False),
("https://[fe80::2%eth0]", True), # ipv6 scope identifier
("https://[fe80:0:0:0:200:f8ff:fe21:67cf]", False),
("https://[::ffff:192.168.1.10]", False),
("https://[0:0:0:0:0:ffff:c000:0201]", False),
("https://[2001:0db8:000a:0001:0000:0000:0000:0000]", False),
("https://[2001:db8:a:1::]", False),
("https://[ff02::1]", False),
("https://[ff02:0:0:0:0:0:0:1]", False),
("https://[fc00::1]", False),
("https://[fd12:3456:789a:1::1]", False),
("https://[2001:db8::abcd:ef12:3456:7890]", False),
("https://[2001:db8:0000:abcd:0000:ef12:0000:3456]", False),
("https://[::ffff:10.0.0.1]", False),
("https://[2001:db8:cafe::]", False),
("https://[2001:db8:cafe:0:0:0:0:0]", False),
("https://[fe80::210:f3ff:fedf:4567%3]", True), # ipv6 scope identifier, numerical interface
],
)
def test_ipv6_urls(self, url, expect_error):
field = URLField()
if expect_error:
with pytest.raises(ValidationError, match="Enter a valid URL"):
field.run_validators(url)
else:
field.run_validators(url)

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest
urlpatterns = [
re_path(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
re_path(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),

View File

@@ -31,7 +31,7 @@ from awx.conf.models import Setting
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
from awx.conf import settings_registry
from awx.main.utils.external_logging import reconfigure_rsyslog
from ansible_base.lib.utils.schema import extend_schema_if_available
SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'name'))
@@ -42,10 +42,6 @@ class SettingCategoryList(ListAPIView):
filter_backends = []
name = _('Setting Categories')
@extend_schema_if_available(extensions={"x-ai-description": "A list of additional API endpoints related to settings."})
def get(self, request, *args, **kwargs):
return super().get(request, *args, **kwargs)
def get_queryset(self):
setting_categories = []
categories = settings_registry.get_registered_categories()
@@ -67,10 +63,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
filter_backends = []
name = _('Setting Detail')
@extend_schema_if_available(extensions={"x-ai-description": "Update system settings."})
def patch(self, request, *args, **kwargs):
return super().patch(request, *args, **kwargs)
def get_queryset(self):
self.category_slug = self.kwargs.get('category_slug', 'all')
all_category_slugs = list(settings_registry.get_registered_categories().keys())

View File

@@ -639,9 +639,7 @@ class UserAccess(BaseAccess):
prefetch_related = ('resource',)
def filtered_queryset(self):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
qs = User.objects.all()
else:
qs = (
@@ -1226,9 +1224,7 @@ class TeamAccess(BaseAccess):
)
def filtered_queryset(self):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
return self.model.objects.all()
return self.model.objects.filter(
Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) | Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
@@ -2102,7 +2098,7 @@ class WorkflowJobAccess(BaseAccess):
def filtered_queryset(self):
return WorkflowJob.objects.filter(
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role'))
| Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
)
def can_read(self, obj):
@@ -2500,11 +2496,12 @@ class UnifiedJobAccess(BaseAccess):
def filtered_queryset(self):
inv_pk_qs = Inventory._accessible_pk_qs(Inventory, self.user, 'read_role')
org_auditor_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
qs = self.model.objects.filter(
Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
| Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs)
| Q(adhoccommand__inventory__id__in=inv_pk_qs)
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role'))
| Q(organization__in=org_auditor_qs)
)
return qs
@@ -2568,7 +2565,7 @@ class NotificationTemplateAccess(BaseAccess):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return self.model.access_qs(self.user, 'view')
return self.model.objects.filter(
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=Organization.access_qs(self.user, 'audit'))
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=self.user.auditor_of_organizations)
).distinct()
@check_superuser
@@ -2603,7 +2600,7 @@ class NotificationAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(notification_template__organization__in=Organization.access_qs(self.user, 'add_notificationtemplate'))
| Q(notification_template__organization__in=Organization.access_qs(self.user, 'audit'))
| Q(notification_template__organization__in=self.user.auditor_of_organizations)
).distinct()
def can_delete(self, obj):

View File

@@ -1,17 +1,15 @@
# Python
import logging
# Dispatcherd
from dispatcherd.publish import task
# AWX
from awx.main.analytics.subsystem_metrics import DispatcherMetrics, CallbackReceiverMetrics
from awx.main.dispatch.publish import task
from awx.main.dispatch import get_task_queuename
logger = logging.getLogger('awx.main.scheduler')
@task(queue=get_task_queuename, timeout=300, on_duplicate='discard')
@task(queue=get_task_queuename)
def send_subsystem_metrics():
DispatcherMetrics().send_metrics()
CallbackReceiverMetrics().send_metrics()

View File

@@ -1,6 +1,8 @@
import datetime
import asyncio
import logging
import redis
import redis.asyncio
import re
from prometheus_client import (
@@ -13,7 +15,7 @@ from prometheus_client import (
)
from django.conf import settings
from awx.main.utils.redis import get_redis_client, get_redis_client_async
BROADCAST_WEBSOCKET_REDIS_KEY_NAME = 'broadcast_websocket_stats'
@@ -64,8 +66,6 @@ class FixedSlidingWindow:
class RelayWebsocketStatsManager:
_redis_client = None # Cached Redis client for get_stats_sync()
def __init__(self, local_hostname):
self._local_hostname = local_hostname
self._stats = dict()
@@ -80,7 +80,7 @@ class RelayWebsocketStatsManager:
async def run_loop(self):
try:
redis_conn = get_redis_client_async()
redis_conn = await redis.asyncio.Redis.from_url(settings.BROKER_URL)
while True:
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
await redis_conn.set(self._redis_key, stats_data_str)
@@ -103,10 +103,8 @@ class RelayWebsocketStatsManager:
"""
Stringified verion of all the stats
"""
# Reuse cached Redis client to avoid creating new connection pools on every call
if cls._redis_client is None:
cls._redis_client = get_redis_client()
stats_str = cls._redis_client.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
redis_conn = redis.Redis.from_url(settings.BROKER_URL)
stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))

View File

@@ -142,7 +142,7 @@ def config(since, **kwargs):
return {
'platform': {
'system': platform.system(),
'dist': (distro.name(), distro.version(), distro.codename()),
'dist': distro.linux_distribution(),
'release': platform.release(),
'type': install_type,
},
@@ -487,7 +487,9 @@ def unified_jobs_table(since, full_path, until, **kwargs):
OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}'))
AND main_unifiedjob.launch_type != 'sync'
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER
'''.format(since.isoformat(), until.isoformat())
'''.format(
since.isoformat(), until.isoformat()
)
return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
@@ -548,7 +550,9 @@ def workflow_job_node_table(since, full_path, until, **kwargs):
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
WHERE (main_workflowjobnode.modified > '{}' AND main_workflowjobnode.modified <= '{}')
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER
'''.format(since.isoformat(), until.isoformat())
'''.format(
since.isoformat(), until.isoformat()
)
return _copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)

Some files were not shown because too many files have changed in this diff Show More