mirror of
https://github.com/ansible/awx.git
synced 2026-02-08 13:04:43 -03:30
Compare commits
2 Commits
openapi_sp
...
AAP-44075
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1417b1e33e | ||
|
|
73187c61f4 |
@@ -2,7 +2,7 @@
|
||||
|
||||
codecov:
|
||||
notify:
|
||||
after_n_builds: 9 # Number of test matrix+lint jobs uploading coverage
|
||||
after_n_builds: 6 # Number of test matrix+lint jobs uploading coverage
|
||||
wait_for_ci: false
|
||||
|
||||
require_ci_to_pass: false
|
||||
|
||||
17
.coveragerc
17
.coveragerc
@@ -17,23 +17,6 @@ exclude_also =
|
||||
|
||||
[run]
|
||||
branch = True
|
||||
# NOTE: `disable_warnings` is needed when `pytest-cov` runs in tandem
|
||||
# NOTE: with `pytest-xdist`. These warnings are false negative in this
|
||||
# NOTE: context.
|
||||
#
|
||||
# NOTE: It's `coveragepy` that emits the warnings and previously they
|
||||
# NOTE: wouldn't get on the radar of `pytest`'s `filterwarnings`
|
||||
# NOTE: mechanism. This changed, however, with `pytest >= 8.4`. And
|
||||
# NOTE: since we set `filterwarnings = error`, those warnings are being
|
||||
# NOTE: raised as exceptions, cascading into `pytest`'s internals and
|
||||
# NOTE: causing tracebacks and crashes of the test sessions.
|
||||
#
|
||||
# Ref:
|
||||
# * https://github.com/pytest-dev/pytest-cov/issues/693
|
||||
# * https://github.com/pytest-dev/pytest-cov/pull/695
|
||||
# * https://github.com/pytest-dev/pytest-cov/pull/696
|
||||
disable_warnings =
|
||||
module-not-measured
|
||||
omit =
|
||||
awx/main/migrations/*
|
||||
awx/settings/defaults.py
|
||||
|
||||
2
.github/CODE_OF_CONDUCT.md
vendored
2
.github/CODE_OF_CONDUCT.md
vendored
@@ -1,3 +1,3 @@
|
||||
# Community Code of Conduct
|
||||
|
||||
Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
|
||||
Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -13,7 +13,7 @@ body:
|
||||
attributes:
|
||||
label: Please confirm the following
|
||||
options:
|
||||
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
|
||||
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
||||
required: true
|
||||
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
|
||||
required: true
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -5,7 +5,7 @@ contact_links:
|
||||
url: https://github.com/ansible/awx#get-involved
|
||||
about: For general debugging or technical support please see the Get Involved section of our readme.
|
||||
- name: 📝 Ansible Code of Conduct
|
||||
url: https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser
|
||||
url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser
|
||||
about: AWX uses the Ansible Code of Conduct; ❤ Be nice to other members of the community. ☮ Behave.
|
||||
- name: 💼 For Enterprise
|
||||
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -13,7 +13,7 @@ body:
|
||||
attributes:
|
||||
label: Please confirm the following
|
||||
options:
|
||||
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
|
||||
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
||||
required: true
|
||||
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
|
||||
required: true
|
||||
|
||||
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -4,8 +4,7 @@
|
||||
<!---
|
||||
If you are fixing an existing issue, please include "related #nnn" in your
|
||||
commit message and your description; but you should still explain what
|
||||
the change does. Also please make sure that if this PR has an attached JIRA, put AAP-<number>
|
||||
in as the first entry for your PR title.
|
||||
the change does.
|
||||
-->
|
||||
|
||||
##### ISSUE TYPE
|
||||
@@ -17,11 +16,17 @@ in as the first entry for your PR title.
|
||||
##### COMPONENT NAME
|
||||
<!--- Name of the module/plugin/module/task -->
|
||||
- API
|
||||
- UI
|
||||
- Collection
|
||||
- CLI
|
||||
- Docs
|
||||
- Other
|
||||
|
||||
##### AWX VERSION
|
||||
<!--- Paste verbatim output from `make VERSION` between quotes below -->
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
|
||||
##### ADDITIONAL INFORMATION
|
||||
|
||||
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,6 +11,8 @@ inputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||
|
||||
2
.github/actions/run_awx_devel/action.yml
vendored
2
.github/actions/run_awx_devel/action.yml
vendored
@@ -36,7 +36,7 @@ runs:
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
shell: bash
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Install system deps
|
||||
shell: bash
|
||||
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -8,10 +8,3 @@ updates:
|
||||
labels:
|
||||
- "docs"
|
||||
- "dependencies"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "requirements/"
|
||||
schedule:
|
||||
interval: "daily" #run daily until we trust it, then back this off to weekly
|
||||
open-pull-requests-limit: 2
|
||||
labels:
|
||||
- "dependencies"
|
||||
|
||||
6
.github/triage_replies.md
vendored
6
.github/triage_replies.md
vendored
@@ -70,10 +70,10 @@ Thank you for your submission and for supporting AWX!
|
||||
- Hello, we'd love to help, but we need a little more information about the problem you're having. Screenshots, log outputs, or any reproducers would be very helpful.
|
||||
|
||||
### Code of Conduct
|
||||
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html
|
||||
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html
|
||||
|
||||
### EE Contents / Community General
|
||||
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://docs.ansible.com/projects/builder/en/stable/ \
|
||||
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://ansible-builder.readthedocs.io/en/stable/ \
|
||||
\
|
||||
The Ansible Community is looking at building an EE that corresponds to all of the collections inside the ansible package. That may help you if and when it happens; see https://github.com/ansible-community/community-topics/issues/31 for details.
|
||||
|
||||
@@ -88,7 +88,7 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
||||
- Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
|
||||
|
||||
### Receptor
|
||||
- You can find the receptor docs here: https://docs.ansible.com/projects/receptor/en/latest/
|
||||
- You can find the receptor docs here: https://receptor.readthedocs.io/en/latest/
|
||||
- Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
|
||||
|
||||
### Ansible Engine not AWX
|
||||
|
||||
72
.github/workflows/api_schema_check.yml
vendored
72
.github/workflows/api_schema_check.yml
vendored
@@ -1,72 +0,0 @@
|
||||
---
|
||||
name: API Schema Change Detection
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
|
||||
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||
UPSTREAM_REPOSITORY_ID: 91594105
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
- stable-**
|
||||
|
||||
jobs:
|
||||
api-schema-detection:
|
||||
name: Detect API Schema Changes
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build awx_devel image for schema check
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Detect API schema changes
|
||||
id: schema-check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS' \
|
||||
AWX_DOCKER_CMD='make detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}' \
|
||||
make docker-runner 2>&1 | tee schema-diff.txt
|
||||
exit ${PIPESTATUS[0]}
|
||||
|
||||
- name: Add schema diff to job summary
|
||||
if: always()
|
||||
# show text and if for some reason, it can't be generated, state that it can't be.
|
||||
run: |
|
||||
echo "## API Schema Change Detection Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
if [ -f schema-diff.txt ]; then
|
||||
if grep -q "^+" schema-diff.txt || grep -q "^-" schema-diff.txt; then
|
||||
echo "### Schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
# Truncate to first 1000 lines to stay under GitHub's 1MB summary limit
|
||||
TOTAL_LINES=$(wc -l < schema-diff.txt)
|
||||
if [ $TOTAL_LINES -gt 1000 ]; then
|
||||
echo "_Showing first 1000 of ${TOTAL_LINES} lines. See job logs or download artifact for full diff._" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo '```diff' >> $GITHUB_STEP_SUMMARY
|
||||
head -n 1000 schema-diff.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### No schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
echo "### Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
176
.github/workflows/ci.yml
vendored
176
.github/workflows/ci.yml
vendored
@@ -32,9 +32,18 @@ jobs:
|
||||
- name: api-lint
|
||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||
coverage-upload-name: ""
|
||||
- name: api-swagger
|
||||
command: /start_tests.sh swagger
|
||||
coverage-upload-name: ""
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
coverage-upload-name: "awx-collection"
|
||||
- name: api-schema
|
||||
command: >-
|
||||
/start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{
|
||||
github.event.pull_request.base.ref || github.ref_name
|
||||
}}
|
||||
coverage-upload-name: ""
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -54,17 +63,6 @@ jobs:
|
||||
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
|
||||
make docker-runner
|
||||
|
||||
- name: Inject PR number into coverage.xml
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& github.event_name == 'pull_request'
|
||||
&& steps.make-run.outputs.cov-report-files != ''
|
||||
run: |
|
||||
if [ -f "reports/coverage.xml" ]; then
|
||||
sed -i '2i<!-- PR ${{ github.event.pull_request.number }} -->' reports/coverage.xml
|
||||
echo "Injected PR number ${{ github.event.pull_request.number }} into coverage.xml"
|
||||
fi
|
||||
|
||||
- name: Upload test coverage to Codecov
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -104,14 +102,6 @@ jobs:
|
||||
}}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload test artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.tests.name }}-artifacts
|
||||
path: reports/coverage.xml
|
||||
retention-days: 5
|
||||
|
||||
- name: Upload awx jUnit test reports
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -142,7 +132,7 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.x'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -182,20 +172,14 @@ jobs:
|
||||
repository: ansible/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Setup python, referencing action at awx relative path
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
|
||||
- uses: ./awx/.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.12'
|
||||
working-directory: awx
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python -m pip install docker
|
||||
python3 -m pip install docker
|
||||
|
||||
- name: Check Python version
|
||||
working-directory: awx
|
||||
run: |
|
||||
make print-PYTHON
|
||||
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
run: |
|
||||
@@ -207,59 +191,27 @@ jobs:
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
id: awx_operator_test
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
run: |
|
||||
set +e
|
||||
timeout 15m bash -elc '
|
||||
python -m pip install -r molecule/requirements.txt
|
||||
python -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||
'
|
||||
rc=$?
|
||||
if [ $rc -eq 124 ]; then
|
||||
echo "timed_out=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
exit $rc
|
||||
python3 -m pip install -r molecule/requirements.txt
|
||||
python3 -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||
env:
|
||||
AWX_TEST_IMAGE: local/awx
|
||||
AWX_TEST_VERSION: ci
|
||||
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
|
||||
STORE_DEBUG_OUTPUT: true
|
||||
|
||||
- name: Collect awx-operator logs on timeout
|
||||
# Only run on timeout; normal failures should use molecule's built-in log collection.
|
||||
if: steps.awx_operator_test.outputs.timed_out == 'true'
|
||||
run: |
|
||||
mkdir -p "$DEBUG_OUTPUT_DIR"
|
||||
if command -v kind >/dev/null 2>&1; then
|
||||
for cluster in $(kind get clusters 2>/dev/null); do
|
||||
kind export logs "$DEBUG_OUTPUT_DIR/$cluster" --name "$cluster" || true
|
||||
done
|
||||
fi
|
||||
if command -v kubectl >/dev/null 2>&1; then
|
||||
kubectl get all -A -o wide > "$DEBUG_OUTPUT_DIR/kubectl-get-all.txt" || true
|
||||
kubectl get pods -A -o wide > "$DEBUG_OUTPUT_DIR/kubectl-get-pods.txt" || true
|
||||
kubectl describe pods -A > "$DEBUG_OUTPUT_DIR/kubectl-describe-pods.txt" || true
|
||||
fi
|
||||
docker ps -a > "$DEBUG_OUTPUT_DIR/docker-ps.txt" || true
|
||||
|
||||
- name: Upload debug output
|
||||
if: always()
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: awx-operator-debug-output
|
||||
path: ${{ env.DEBUG_OUTPUT_DIR }}
|
||||
|
||||
- name: Fail awx-operator check if test deployment failed
|
||||
if: steps.awx_operator_test.outcome != 'success'
|
||||
run: exit 1
|
||||
|
||||
collection-sanity:
|
||||
name: awx_collection sanity
|
||||
runs-on: ubuntu-latest
|
||||
@@ -328,11 +280,7 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
python-version: '3.x'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -343,9 +291,8 @@ jobs:
|
||||
|
||||
- name: Install dependencies for running tests
|
||||
run: |
|
||||
python -m pip install -e ./awxkit/
|
||||
python -m pip install -r awx_collection/requirements.txt
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
python3 -m pip install -e ./awxkit/
|
||||
python3 -m pip install -r awx_collection/requirements.txt
|
||||
|
||||
- name: Run integration tests
|
||||
id: make-run
|
||||
@@ -357,7 +304,6 @@ jobs:
|
||||
echo 'password = password' >> ~/.tower_cli.cfg
|
||||
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
|
||||
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
|
||||
export PYTHONPATH="$(python -c 'import site; print(":".join(site.getsitepackages()))')${PYTHONPATH:+:$PYTHONPATH}"
|
||||
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
|
||||
env:
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
@@ -389,7 +335,6 @@ jobs:
|
||||
with:
|
||||
name: coverage-${{ matrix.target-regex.name }}
|
||||
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
retention-days: 1
|
||||
|
||||
- uses: ./.github/actions/upload_awx_devel_logs
|
||||
if: always()
|
||||
@@ -407,26 +352,32 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Download coverage artifacts
|
||||
- name: Download coverage artifacts A to H
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
merge-multiple: true
|
||||
name: coverage-a-h
|
||||
path: coverage
|
||||
|
||||
- name: Download coverage artifacts I to P
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: coverage-i-p
|
||||
path: coverage
|
||||
|
||||
- name: Download coverage artifacts Z to Z
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: coverage-r-z0-9
|
||||
path: coverage
|
||||
pattern: coverage-*
|
||||
|
||||
- name: Combine coverage
|
||||
run: |
|
||||
@@ -434,17 +385,56 @@ jobs:
|
||||
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
|
||||
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
cd ~/.ansible/collections/ansible_collections/awx/awx
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage combine --requirements
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage html
|
||||
ansible-test coverage combine --requirements
|
||||
ansible-test coverage html
|
||||
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo >> $GITHUB_STEP_SUMMARY
|
||||
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
|
||||
echo 'Download the HTML artifacts to view the coverage report.' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# This is a huge hack, there's no official action for removing artifacts currently.
|
||||
# Also ACTIONS_RUNTIME_URL and ACTIONS_RUNTIME_TOKEN aren't available in normal run
|
||||
# steps, so we have to use github-script to get them.
|
||||
#
|
||||
# The advantage of doing this, though, is that we save on artifact storage space.
|
||||
|
||||
- name: Get secret artifact runtime URL
|
||||
uses: actions/github-script@v6
|
||||
id: get-runtime-url
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const { ACTIONS_RUNTIME_URL } = process.env;
|
||||
return ACTIONS_RUNTIME_URL;
|
||||
|
||||
- name: Get secret artifact runtime token
|
||||
uses: actions/github-script@v6
|
||||
id: get-runtime-token
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const { ACTIONS_RUNTIME_TOKEN } = process.env;
|
||||
return ACTIONS_RUNTIME_TOKEN;
|
||||
|
||||
- name: Remove intermediary artifacts
|
||||
env:
|
||||
ACTIONS_RUNTIME_URL: ${{ steps.get-runtime-url.outputs.result }}
|
||||
ACTIONS_RUNTIME_TOKEN: ${{ steps.get-runtime-token.outputs.result }}
|
||||
run: |
|
||||
echo "::add-mask::${ACTIONS_RUNTIME_TOKEN}"
|
||||
artifacts=$(
|
||||
curl -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
||||
${ACTIONS_RUNTIME_URL}_apis/pipelines/workflows/${{ github.run_id }}/artifacts?api-version=6.0-preview \
|
||||
| jq -r '.value | .[] | select(.name | startswith("coverage-")) | .url'
|
||||
)
|
||||
|
||||
for artifact in $artifacts; do
|
||||
curl -i -X DELETE -H "Accept: application/json;api-version=6.0-preview" -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" "$artifact"
|
||||
done
|
||||
|
||||
- name: Upload coverage report as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/devel_images.yml
vendored
1
.github/workflows/devel_images.yml
vendored
@@ -10,7 +10,6 @@ on:
|
||||
- devel
|
||||
- release_*
|
||||
- feature_*
|
||||
- stable-*
|
||||
jobs:
|
||||
push-development-images:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -20,4 +20,4 @@ jobs:
|
||||
run: |
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "bucket=awx-public-ci-files object=${{ github.event.repository.name }}/${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"
|
||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"
|
||||
|
||||
248
.github/workflows/sonarcloud_pr.yml
vendored
248
.github/workflows/sonarcloud_pr.yml
vendored
@@ -1,248 +0,0 @@
|
||||
# SonarCloud Analysis Workflow for awx
|
||||
#
|
||||
# This workflow runs SonarCloud analysis triggered by CI workflow completion.
|
||||
# It is split into two separate jobs for clarity and maintainability:
|
||||
#
|
||||
# FLOW: CI completes → workflow_run triggers this workflow → appropriate job runs
|
||||
#
|
||||
# JOB 1: sonar-pr-analysis (for PRs)
|
||||
# - Triggered by: workflow_run (CI on pull_request)
|
||||
# - Steps: Download coverage → Get PR info → Get changed files → Run SonarCloud PR analysis
|
||||
# - Scans: All changed files in the PR (Python, YAML, JSON, etc.)
|
||||
# - Quality gate: Focuses on new/changed code in PR only
|
||||
#
|
||||
# JOB 2: sonar-branch-analysis (for long-lived branches)
|
||||
# - Triggered by: workflow_run (CI on push to devel)
|
||||
# - Steps: Download coverage → Run SonarCloud branch analysis
|
||||
# - Scans: Full codebase
|
||||
# - Quality gate: Focuses on overall project health
|
||||
#
|
||||
# This ensures coverage data is always available from CI before analysis runs.
|
||||
#
|
||||
# What files are scanned:
|
||||
# - All files in the repository that SonarCloud can analyze
|
||||
# - Excludes: tests, scripts, dev environments, external collections (see sonar-project.properties)
|
||||
|
||||
|
||||
# With much help from:
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/30
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/32
|
||||
name: SonarCloud
|
||||
on:
|
||||
workflow_run: # This is triggered by CI being completed.
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
permissions: read-all
|
||||
jobs:
|
||||
sonar-pr-analysis:
|
||||
name: SonarCloud PR Analysis
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.repository == 'ansible/awx'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Download all individual coverage artifacts from CI workflow
|
||||
- name: Download coverage artifacts
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
|
||||
# Extract PR metadata from workflow_run event
|
||||
- name: Set PR metadata and prepare files for analysis
|
||||
env:
|
||||
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
REPO_NAME: ${{ github.event.repository.full_name }}
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
|
||||
# Extract PR number from first coverage.xml file found
|
||||
first_coverage=$(find . -name "coverage.xml" -type f | head -1)
|
||||
if [ -f "$first_coverage" ]; then
|
||||
PR_NUMBER=$(grep -m 1 '<!-- PR' "$first_coverage" | awk '{print $3}' || echo "")
|
||||
else
|
||||
PR_NUMBER=""
|
||||
fi
|
||||
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number from coverage.xml: #${PR_NUMBER:-<not found>}"
|
||||
|
||||
if [ -z "$PR_NUMBER" ]; then
|
||||
echo "##[error]❌ FATAL: PR number not found in coverage.xml"
|
||||
echo "##[error]This job requires a PR number to run PR analysis."
|
||||
echo "##[error]The ci workflow should have injected the PR number into coverage.xml."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get PR metadata from GitHub API
|
||||
PR_DATA=$(gh api "repos/$REPO_NAME/pulls/$PR_NUMBER")
|
||||
PR_BASE=$(echo "$PR_DATA" | jq -r '.base.ref')
|
||||
PR_HEAD=$(echo "$PR_DATA" | jq -r '.head.ref')
|
||||
|
||||
# Print summary
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number: #$PR_NUMBER"
|
||||
echo "├── Base Branch: $PR_BASE"
|
||||
echo "├── Head Branch: $PR_HEAD"
|
||||
echo "├── Repo: $REPO_NAME"
|
||||
|
||||
# Export to GitHub env for later steps
|
||||
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
|
||||
echo "PR_BASE=$PR_BASE" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=$PR_HEAD" >> $GITHUB_ENV
|
||||
echo "COMMIT_SHA=$COMMIT_SHA" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
# Get all changed files from PR (with error handling)
|
||||
files=""
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
if gh api repos/$REPO_NAME/pulls/$PR_NUMBER/files --jq '.[].filename' > /tmp/pr_files.txt 2>/tmp/pr_error.txt; then
|
||||
files=$(cat /tmp/pr_files.txt)
|
||||
else
|
||||
echo "├── Changed Files: ⚠️ Could not fetch (likely test repo or PR not found)"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
# No files = no inclusions filter = full scan
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "├── PR Number: ⚠️ Not available"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get file extensions and count for summary
|
||||
extensions=$(echo "$files" | sed 's/.*\.//' | sort | uniq | tr '\n' ',' | sed 's/,$//')
|
||||
file_count=$(echo "$files" | wc -l)
|
||||
echo "├── Changed Files: $file_count file(s) (.${extensions})"
|
||||
|
||||
# Check if coverage.xml exists and has content
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available (analysis will proceed without coverage)"
|
||||
fi
|
||||
|
||||
# Prepare file list for Sonar
|
||||
echo "All changed files in PR:"
|
||||
echo "$files"
|
||||
|
||||
# Filter out files that are excluded by .coveragerc to avoid coverage conflicts
|
||||
# This prevents SonarCloud from analyzing files that have no coverage data
|
||||
if [ -n "$files" ]; then
|
||||
# Filter out files matching .coveragerc omit patterns
|
||||
filtered_files=$(echo "$files" | grep -v "settings/.*_defaults\.py$" | grep -v "settings/defaults\.py$" | grep -v "main/migrations/")
|
||||
|
||||
# Show which files were filtered out for transparency
|
||||
excluded_files=$(echo "$files" | grep -E "(settings/.*_defaults\.py$|settings/defaults\.py$|main/migrations/)" || true)
|
||||
if [ -n "$excluded_files" ]; then
|
||||
echo "├── Filtered out (coverage-excluded): $(echo "$excluded_files" | wc -l) file(s)"
|
||||
echo "$excluded_files" | sed 's/^/│ - /'
|
||||
fi
|
||||
|
||||
if [ -n "$filtered_files" ]; then
|
||||
inclusions=$(echo "$filtered_files" | tr '\n' ',' | sed 's/,$//')
|
||||
echo "SONAR_INCLUSIONS=$inclusions" >> $GITHUB_ENV
|
||||
echo "└── Result: ✅ Will scan these files (excluding coverage-omitted files): $inclusions"
|
||||
else
|
||||
echo "└── Result: ✅ All changed files are excluded by coverage config, running full SonarCloud analysis"
|
||||
# Don't set SONAR_INCLUSIONS, let it scan everything per sonar-project.properties
|
||||
fi
|
||||
else
|
||||
echo "└── Result: ✅ Running SonarCloud analysis"
|
||||
fi
|
||||
|
||||
- name: Add base branch
|
||||
if: env.PR_NUMBER != ''
|
||||
run: |
|
||||
gh pr checkout ${{ env.PR_NUMBER }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.scm.revision=${{ env.COMMIT_SHA }}
|
||||
-Dsonar.pullrequest.key=${{ env.PR_NUMBER }}
|
||||
-Dsonar.pullrequest.branch=${{ env.PR_HEAD }}
|
||||
-Dsonar.pullrequest.base=${{ env.PR_BASE }}
|
||||
-Dsonar.python.coverage.reportPaths=${{ env.COVERAGE_PATHS }}
|
||||
${{ env.SONAR_INCLUSIONS && format('-Dsonar.inclusions={0}', env.SONAR_INCLUSIONS) || '' }}
|
||||
|
||||
sonar-branch-analysis:
|
||||
name: SonarCloud Branch Analysis
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'push' &&
|
||||
github.repository == 'ansible/awx'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Download all individual coverage artifacts from CI workflow (optional for branch pushes)
|
||||
- name: Download coverage artifacts
|
||||
continue-on-error: true
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
|
||||
- name: Print SonarCloud Analysis Summary
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
|
||||
echo "🔍 SonarCloud Analysis Summary"
|
||||
echo "=============================="
|
||||
echo "├── CI Event: ✅ Push (via workflow_run)"
|
||||
echo "├── Branch: $BRANCH_NAME"
|
||||
echo "├── Coverage Files: ${coverage_files:-none}"
|
||||
echo "├── Python Changes: ➖ N/A (Full codebase scan)"
|
||||
echo "└── Result: ✅ Proceed - \"Running SonarCloud analysis\""
|
||||
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
|
||||
-Dsonar.branch.name=${{ github.event.workflow_run.head_branch }}
|
||||
${{ env.COVERAGE_PATHS && format('-Dsonar.python.coverage.reportPaths={0}', env.COVERAGE_PATHS) || '' }}
|
||||
177
.github/workflows/spec-sync-on-merge.yml
vendored
177
.github/workflows/spec-sync-on-merge.yml
vendored
@@ -1,177 +0,0 @@
|
||||
# Sync OpenAPI Spec on Merge
|
||||
#
|
||||
# This workflow runs when code is merged to protected branches (devel, stable-*).
|
||||
# It runs the dev environment to generate the OpenAPI spec, then syncs it to
|
||||
# the central spec repository.
|
||||
#
|
||||
# FLOW: PR merged → push to branch → dev environment runs → spec synced to central repo
|
||||
#
|
||||
# NOTE: This is an inlined version for testing with private forks.
|
||||
# Production version will use a reusable workflow from the org repos.
|
||||
name: Sync OpenAPI Spec on Merge
|
||||
env:
|
||||
LC_ALL: "C.UTF-8"
|
||||
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
- stable-2.6
|
||||
workflow_dispatch: # Allow manual triggering for testing
|
||||
jobs:
|
||||
sync-openapi-spec:
|
||||
name: Sync OpenAPI spec to central repo
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout Controller repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Build awx_devel image to use for schema gen
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Generate API Schema
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
|
||||
--workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema
|
||||
|
||||
- name: Verify spec file exists
|
||||
run: |
|
||||
SPEC_FILE="./schema.json"
|
||||
if [ ! -f "$SPEC_FILE" ]; then
|
||||
echo "❌ Spec file not found at $SPEC_FILE"
|
||||
echo "Contents of workspace:"
|
||||
ls -la .
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Found spec file at $SPEC_FILE"
|
||||
|
||||
- name: Checkout spec repo
|
||||
id: checkout_spec_repo
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ansible-automation-platform/aap-openapi-specs
|
||||
ref: ${{ github.ref_name }}
|
||||
path: spec-repo
|
||||
token: ${{ secrets.OPENAPI_SPEC_SYNC_TOKEN }}
|
||||
|
||||
- name: Fail if branch doesn't exist
|
||||
if: steps.checkout_spec_repo.outcome == 'failure'
|
||||
run: |
|
||||
echo "##[error]❌ Branch '${{ github.ref_name }}' does not exist in the central spec repository."
|
||||
echo "##[error]Expected branch: ${{ github.ref_name }}"
|
||||
echo "##[error]This branch must be created in the spec repo before specs can be synced."
|
||||
exit 1
|
||||
|
||||
- name: Compare specs
|
||||
id: compare
|
||||
run: |
|
||||
COMPONENT_SPEC="./schema.json"
|
||||
SPEC_REPO_FILE="spec-repo/controller.json"
|
||||
|
||||
# Check if spec file exists in spec repo
|
||||
if [ ! -f "$SPEC_REPO_FILE" ]; then
|
||||
echo "Spec file doesn't exist in spec repo - will create new file"
|
||||
echo "has_diff=true" >> $GITHUB_OUTPUT
|
||||
echo "is_new_file=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Compare files
|
||||
if diff -q "$COMPONENT_SPEC" "$SPEC_REPO_FILE" > /dev/null; then
|
||||
echo "✅ No differences found - specs are identical"
|
||||
echo "has_diff=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "📝 Differences found - spec has changed"
|
||||
echo "has_diff=true" >> $GITHUB_OUTPUT
|
||||
echo "is_new_file=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Update spec file
|
||||
if: steps.compare.outputs.has_diff == 'true'
|
||||
run: |
|
||||
cp "./schema.json" "spec-repo/controller.json"
|
||||
echo "✅ Updated spec-repo/controller.json"
|
||||
|
||||
- name: Create PR in spec repo
|
||||
if: steps.compare.outputs.has_diff == 'true'
|
||||
working-directory: spec-repo
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.OPENAPI_SPEC_SYNC_TOKEN }}
|
||||
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
||||
run: |
|
||||
# Configure git
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create branch for PR
|
||||
SHORT_SHA="${{ github.sha }}"
|
||||
SHORT_SHA="${SHORT_SHA:0:7}"
|
||||
BRANCH_NAME="update-Controller-${{ github.ref_name }}-${SHORT_SHA}"
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
|
||||
# Add and commit changes
|
||||
git add "controller.json"
|
||||
|
||||
if [ "${{ steps.compare.outputs.is_new_file }}" == "true" ]; then
|
||||
COMMIT_MSG="Add Controller OpenAPI spec for ${{ github.ref_name }}"
|
||||
else
|
||||
COMMIT_MSG="Update Controller OpenAPI spec for ${{ github.ref_name }}"
|
||||
fi
|
||||
|
||||
git commit -m "$COMMIT_MSG
|
||||
|
||||
Synced from ${{ github.repository }}@${{ github.sha }}
|
||||
Source branch: ${{ github.ref_name }}
|
||||
|
||||
Co-Authored-By: github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Push branch
|
||||
git push origin "$BRANCH_NAME"
|
||||
|
||||
# Create PR
|
||||
PR_TITLE="[${{ github.ref_name }}] Update Controller spec from merged commit"
|
||||
PR_BODY="## Summary
|
||||
Automated OpenAPI spec sync from component repository merge.
|
||||
|
||||
**Source:** ${{ github.repository }}@${{ github.sha }}
|
||||
**Branch:** \`${{ github.ref_name }}\`
|
||||
**Component:** \`Controller\`
|
||||
**Spec File:** \`controller.json\`
|
||||
|
||||
## Changes
|
||||
$(if [ "${{ steps.compare.outputs.is_new_file }}" == "true" ]; then echo "- 🆕 New spec file created"; else echo "- 📝 Spec file updated with latest changes"; fi)
|
||||
|
||||
## Source Commit
|
||||
\`\`\`
|
||||
${COMMIT_MESSAGE}
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
🤖 This PR was automatically generated by the OpenAPI spec sync workflow."
|
||||
|
||||
gh pr create \
|
||||
--title "$PR_TITLE" \
|
||||
--body "$PR_BODY" \
|
||||
--base "${{ github.ref_name }}" \
|
||||
--head "$BRANCH_NAME"
|
||||
|
||||
echo "✅ Created PR in spec repo"
|
||||
|
||||
- name: Report results
|
||||
if: always()
|
||||
run: |
|
||||
if [ "${{ steps.compare.outputs.has_diff }}" == "true" ]; then
|
||||
echo "📝 Spec sync completed - PR created in spec repo"
|
||||
else
|
||||
echo "✅ Spec sync completed - no changes needed"
|
||||
fi
|
||||
4
.github/workflows/stage.yml
vendored
4
.github/workflows/stage.yml
vendored
@@ -85,11 +85,9 @@ jobs:
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Setup node and npm for new UI build
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: awx/awx/ui/**/package-lock.json
|
||||
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
|
||||
42
.github/workflows/upload_schema.yml
vendored
42
.github/workflows/upload_schema.yml
vendored
@@ -11,7 +11,6 @@ on:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
- stable-**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -24,26 +23,35 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Build awx_devel image to use for schema gen
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- uses: ./.github/actions/setup-ssh-agent
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
|
||||
- name: Generate API Schema
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
|
||||
--workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema
|
||||
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
|
||||
|
||||
- name: Upload API Schema
|
||||
uses: keithweaver/aws-s3-github-action@4dd5a7b81d54abaa23bbac92b27e85d7f405ae53
|
||||
with:
|
||||
command: cp
|
||||
source: ${{ github.workspace }}/schema.json
|
||||
destination: s3://awx-public-ci-files/${{ github.event.repository.name }}/${{ github.ref_name }}/schema.json
|
||||
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_KEY }}
|
||||
aws_region: us-east-1
|
||||
flags: --acl public-read --only-show-errors
|
||||
env:
|
||||
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||
AWS_REGION: 'us-east-1'
|
||||
run: |
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,7 +1,6 @@
|
||||
# Ignore generated schema
|
||||
swagger.json
|
||||
schema.json
|
||||
schema.yaml
|
||||
reference-schema.json
|
||||
|
||||
# Tags
|
||||
|
||||
@@ -7,7 +7,7 @@ build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: >-
|
||||
3.12
|
||||
3.11
|
||||
commands:
|
||||
- pip install --user tox
|
||||
- python3 -m tox -e docs --notest -v
|
||||
|
||||
@@ -31,7 +31,7 @@ Have questions about this document or anything not covered here? Create a topic
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see [git push docs](https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt).
|
||||
- If submitting a large code change, it's a good idea to create a [forum topic tagged with 'awx'](https://forum.ansible.com/tag/awx), and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
## Setting up your development environment
|
||||
|
||||
|
||||
95
Makefile
95
Makefile
@@ -1,6 +1,6 @@
|
||||
-include awx/ui/Makefile
|
||||
|
||||
PYTHON := $(notdir $(shell for i in python3.12 python3.11 python3; do command -v $$i; done|sed 1q))
|
||||
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||
SHELL := bash
|
||||
DOCKER_COMPOSE ?= docker compose
|
||||
OFFICIAL ?= no
|
||||
@@ -19,16 +19,8 @@ COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d .
|
||||
COLLECTION_SANITY_ARGS ?= --docker
|
||||
# collection unit testing directories
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
# pytest added args to collect coverage
|
||||
COVERAGE_ARGS ?= --cov --cov-report=xml --junitxml=reports/junit.xml
|
||||
# pytest test directories
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
|
||||
# pytest args to run tests in parallel
|
||||
PARALLEL_TESTS ?= -n auto
|
||||
# collection integration test directories (defaults to all)
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
# Python version for ansible-test (must be 3.11, 3.12, or 3.13)
|
||||
ANSIBLE_TEST_PYTHON_VERSION ?= 3.13
|
||||
# args for collection install
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
@@ -79,7 +71,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==25.3 setuptools==80.9.0 setuptools_scm[toml]==9.2.2 wheel==0.45.1 cython==3.1.3
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==70.3.0 setuptools_scm[toml]==8.1.0 wheel==0.45.1 cython==3.0.11
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -107,8 +99,6 @@ else
|
||||
endif
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
update_requirements upgrade_requirements update_requirements_dev \
|
||||
docker_update_requirements docker_upgrade_requirements docker_update_requirements_dev \
|
||||
develop refresh adduser migrate dbchange \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
sdist \
|
||||
@@ -148,7 +138,7 @@ clean-api:
|
||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||
rm -rf .tox
|
||||
find . -type f -regex ".*\.py[co]$$" -delete
|
||||
find . -type d -name "__pycache__" -exec rm -rf {} +
|
||||
find . -type d -name "__pycache__" -delete
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
rm -rf requirements/vendor
|
||||
rm -rf awx/projects
|
||||
@@ -198,36 +188,6 @@ requirements_dev: requirements_awx requirements_awx_dev
|
||||
|
||||
requirements_test: requirements
|
||||
|
||||
## Update requirements files using pip-compile (run inside container)
|
||||
update_requirements:
|
||||
cd requirements && ./updater.sh run
|
||||
|
||||
## Upgrade all requirements to latest versions (run inside container)
|
||||
upgrade_requirements:
|
||||
cd requirements && ./updater.sh upgrade
|
||||
|
||||
## Update development requirements (run inside container)
|
||||
update_requirements_dev:
|
||||
cd requirements && ./updater.sh dev
|
||||
|
||||
## Update requirements using docker-runner
|
||||
docker_update_requirements:
|
||||
@echo "Running requirements updater..."
|
||||
AWX_DOCKER_CMD='make update_requirements' $(MAKE) docker-runner
|
||||
@echo "Requirements update complete!"
|
||||
|
||||
## Upgrade requirements using docker-runner
|
||||
docker_upgrade_requirements:
|
||||
@echo "Running requirements upgrader..."
|
||||
AWX_DOCKER_CMD='make upgrade_requirements' $(MAKE) docker-runner
|
||||
@echo "Requirements upgrade complete!"
|
||||
|
||||
## Update dev requirements using docker-runner
|
||||
docker_update_requirements_dev:
|
||||
@echo "Running dev requirements updater..."
|
||||
AWX_DOCKER_CMD='make update_requirements_dev' $(MAKE) docker-runner
|
||||
@echo "Dev requirements update complete!"
|
||||
|
||||
## "Install" awx package in development mode.
|
||||
develop:
|
||||
@if [ "$(VIRTUAL_ENV)" ]; then \
|
||||
@@ -289,7 +249,7 @@ dispatcher:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py dispatcherd
|
||||
$(PYTHON) manage.py run_dispatcher
|
||||
|
||||
## Run to start the zeromq callback receiver
|
||||
receiver:
|
||||
@@ -348,17 +308,20 @@ black: reports
|
||||
@echo "fi" >> .git/hooks/pre-commit
|
||||
@chmod +x .git/hooks/pre-commit
|
||||
|
||||
genschema: awx-link reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi-json --file schema.json
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
|
||||
mv swagger.json schema.json
|
||||
|
||||
genschema-yaml: awx-link reports
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi --file schema.yaml
|
||||
(set -o pipefail && py.test --cov --cov-report=xml --junitxml=reports/junit.xml $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
|
||||
check: black
|
||||
|
||||
@@ -371,12 +334,14 @@ api-lint:
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
|
||||
PYTEST_ARGS ?= -n auto
|
||||
## Run all API unit tests.
|
||||
test:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PARALLEL_TESTS) $(TEST_DIRS)
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PYTEST_ARGS) $(TEST_DIRS)
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||
|
||||
@@ -385,7 +350,7 @@ live_test:
|
||||
|
||||
## Run all API unit tests with coverage enabled.
|
||||
test_coverage:
|
||||
$(MAKE) test PYTEST_ADDOPTS="--create-db $(COVERAGE_ARGS)"
|
||||
$(MAKE) test PYTEST_ARGS="--create-db --cov --cov-report=xml --junitxml=reports/junit.xml"
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=awxkit/coverage.xml,reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
@@ -393,7 +358,7 @@ test_coverage:
|
||||
fi
|
||||
|
||||
test_migrations:
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test --create-db $(PARALLEL_TESTS) $(COVERAGE_ARGS) $(TEST_DIRS)
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test --create-db --cov=awx --cov-report=xml --junitxml=reports/junit.xml $(PYTEST_ARGS) $(TEST_DIRS)
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
@@ -411,7 +376,7 @@ test_collection:
|
||||
fi && \
|
||||
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
|
||||
py.test $(COLLECTION_TEST_DIRS) --cov --cov-report=xml --junitxml=reports/junit.xml -v
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
@@ -462,8 +427,8 @@ test_collection_sanity:
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && \
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test integration --python $(ANSIBLE_TEST_PYTHON_VERSION) --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
ansible-test integration --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
|
||||
@@ -568,16 +533,14 @@ docker-compose-test: awx/projects docker-compose-sources
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-schema: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 make genschema
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
|
||||
SCHEMA_DIFF_BASE_FOLDER ?= awx
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_FOLDER)/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
# diff exits with 1 when files differ - capture but don't fail
|
||||
-diff -u -b reference-schema.json schema.json
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
@@ -610,7 +573,7 @@ docker-compose-build: Dockerfile.dev
|
||||
docker-compose-buildx: Dockerfile.dev
|
||||
- docker buildx create --name docker-compose-buildx
|
||||
docker buildx use docker-compose-buildx
|
||||
docker buildx build \
|
||||
- docker buildx build \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
@@ -670,7 +633,7 @@ awx-kube-build: Dockerfile
|
||||
awx-kube-buildx: Dockerfile
|
||||
- docker buildx create --name awx-kube-buildx
|
||||
docker buildx use awx-kube-buildx
|
||||
docker buildx build \
|
||||
- docker buildx build \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--push \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
@@ -704,7 +667,7 @@ awx-kube-dev-build: Dockerfile.kube-dev
|
||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
- docker buildx create --name awx-kube-dev-buildx
|
||||
docker buildx use awx-kube-dev-buildx
|
||||
docker buildx build \
|
||||
- docker buildx build \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://codecov.io/github/ansible/awx) [](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://forum.ansible.com/tag/awx)
|
||||
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://codecov.io/github/ansible/awx) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://forum.ansible.com/tag/awx)
|
||||
[](https://chat.ansible.im/#/welcome) [](https://forum.ansible.com)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||
@@ -18,7 +18,7 @@ AWX provides a web-based user interface, REST API, and task engine built on top
|
||||
|
||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||
|
||||
To learn more about using AWX, view the [AWX docs site](https://docs.ansible.com/projects/awx/en/latest/).
|
||||
To learn more about using AWX, view the [AWX docs site](https://ansible.readthedocs.io/projects/awx/en/latest/).
|
||||
|
||||
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
|
||||
|
||||
@@ -41,11 +41,11 @@ If you're experiencing a problem that you feel is a bug in AWX or have ideas for
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
We require all of our community members and contributors to adhere to the [Ansible code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
We require all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
Get Involved
|
||||
------------
|
||||
|
||||
We welcome your feedback and ideas via the [Ansible Forum](https://forum.ansible.com/tag/awx).
|
||||
|
||||
For a full list of all the ways to talk with the Ansible Community, see the [AWX Communication guide](https://docs.ansible.com/projects/awx/en/latest/contributor/communication.html).
|
||||
For a full list of all the ways to talk with the Ansible Community, see the [AWX Communication guide](https://ansible.readthedocs.io/projects/awx/en/latest/contributor/communication.html).
|
||||
|
||||
@@ -7,6 +7,7 @@ from rest_framework import serializers
|
||||
# AWX
|
||||
from awx.conf import fields, register, register_validate
|
||||
|
||||
|
||||
register(
|
||||
'SESSION_COOKIE_AGE',
|
||||
field_class=fields.IntegerField,
|
||||
|
||||
@@ -21,7 +21,7 @@ class NullFieldMixin(object):
|
||||
"""
|
||||
|
||||
def validate_empty_values(self, data):
|
||||
is_empty_value, data = super(NullFieldMixin, self).validate_empty_values(data)
|
||||
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
|
||||
if is_empty_value and data is None:
|
||||
return (False, data)
|
||||
return (is_empty_value, data)
|
||||
|
||||
@@ -161,14 +161,16 @@ def get_view_description(view, html=False):
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
# drf-spectacular is configured via REST_FRAMEWORK['DEFAULT_SCHEMA_CLASS']
|
||||
# Just use the DRF default, which will pick up our CustomAutoSchema
|
||||
return views.APIView.schema
|
||||
if settings.DYNACONF.is_development_mode:
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
return schema_view
|
||||
else:
|
||||
return views.APIView.schema
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
# Schema is inherited from DRF's APIView, which uses DEFAULT_SCHEMA_CLASS
|
||||
# No need to override it here - drf-spectacular will handle it
|
||||
schema = get_default_schema()
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
@@ -764,7 +766,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
sub_id, res = self.unattach_validate(request)
|
||||
(sub_id, res) = self.unattach_validate(request)
|
||||
if res:
|
||||
return res
|
||||
return self.unattach_by_id(request, sub_id)
|
||||
@@ -842,7 +844,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
||||
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
||||
auditor_role = RoleDefinition.objects.filter(name="Platform Auditor").first()
|
||||
auditor_role = RoleDefinition.objects.filter(name="Controller System Auditor").first()
|
||||
if auditor_role:
|
||||
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
||||
return qs.distinct()
|
||||
@@ -1023,9 +1025,6 @@ class GenericCancelView(RetrieveAPIView):
|
||||
# In subclass set model, serializer_class
|
||||
obj_permission_type = 'cancel'
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
return super(GenericCancelView, self).get(request, *args, **kwargs)
|
||||
|
||||
@transaction.non_atomic_requests
|
||||
def dispatch(self, *args, **kwargs):
|
||||
return super(GenericCancelView, self).dispatch(*args, **kwargs)
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import MetricsView
|
||||
|
||||
|
||||
urls = [re_path(r'^$', MetricsView.as_view(), name='metrics_view')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -111,7 +111,7 @@ class UnifiedJobEventPagination(Pagination):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.use_limit_paginator = False
|
||||
self.limit_pagination = LimitPagination()
|
||||
super().__init__(*args, **kwargs)
|
||||
return super().__init__(*args, **kwargs)
|
||||
|
||||
def paginate_queryset(self, queryset, request, view=None):
|
||||
if 'limit' in request.query_params:
|
||||
|
||||
@@ -10,7 +10,7 @@ from rest_framework import permissions
|
||||
|
||||
# AWX
|
||||
from awx.main.access import check_user_access
|
||||
from awx.main.models import Inventory, UnifiedJob, Organization
|
||||
from awx.main.models import Inventory, UnifiedJob
|
||||
from awx.main.utils import get_object_or_400
|
||||
|
||||
logger = logging.getLogger('awx.api.permissions')
|
||||
@@ -228,19 +228,12 @@ class InventoryInventorySourcesUpdatePermission(ModelAccessPermission):
|
||||
class UserPermission(ModelAccessPermission):
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
if not request.data:
|
||||
return Organization.access_qs(request.user, 'change').exists()
|
||||
return request.user.admin_of_organizations.exists()
|
||||
elif request.user.is_superuser:
|
||||
return True
|
||||
raise PermissionDenied()
|
||||
|
||||
|
||||
class IsSystemAdmin(permissions.BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if not (request.user and request.user.is_authenticated):
|
||||
return False
|
||||
return request.user.is_superuser
|
||||
|
||||
|
||||
class IsSystemAdminOrAuditor(permissions.BasePermission):
|
||||
"""
|
||||
Allows write access only to system admin users.
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
import warnings
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from drf_spectacular.openapi import AutoSchema
|
||||
from drf_spectacular.views import (
|
||||
SpectacularAPIView,
|
||||
SpectacularSwaggerView,
|
||||
SpectacularRedocView,
|
||||
)
|
||||
|
||||
|
||||
def filter_credential_type_schema(
|
||||
result,
|
||||
generator, # NOSONAR
|
||||
request, # NOSONAR
|
||||
public, # NOSONAR
|
||||
):
|
||||
"""
|
||||
Postprocessing hook to filter CredentialType kind enum values.
|
||||
|
||||
For CredentialTypeRequest and PatchedCredentialTypeRequest schemas (POST/PUT/PATCH),
|
||||
filter the 'kind' enum to only show 'cloud' and 'net' values.
|
||||
|
||||
This ensures the OpenAPI schema accurately reflects that only 'cloud' and 'net'
|
||||
credential types can be created or modified via the API, matching the validation
|
||||
in CredentialTypeSerializer.validate().
|
||||
|
||||
Args:
|
||||
result: The OpenAPI schema dict to be modified
|
||||
generator, request, public: Required by drf-spectacular interface (unused)
|
||||
|
||||
Returns:
|
||||
The modified OpenAPI schema dict
|
||||
"""
|
||||
schemas = result.get('components', {}).get('schemas', {})
|
||||
|
||||
# Filter CredentialTypeRequest (POST/PUT) - field is required
|
||||
if 'CredentialTypeRequest' in schemas:
|
||||
kind_prop = schemas['CredentialTypeRequest'].get('properties', {}).get('kind', {})
|
||||
if 'enum' in kind_prop:
|
||||
# Filter to only cloud and net (no None - field is required)
|
||||
kind_prop['enum'] = ['cloud', 'net']
|
||||
kind_prop['description'] = "* `cloud` - Cloud\\n* `net` - Network"
|
||||
|
||||
# Filter PatchedCredentialTypeRequest (PATCH) - field is optional
|
||||
if 'PatchedCredentialTypeRequest' in schemas:
|
||||
kind_prop = schemas['PatchedCredentialTypeRequest'].get('properties', {}).get('kind', {})
|
||||
if 'enum' in kind_prop:
|
||||
# Filter to only cloud and net (None allowed - field can be omitted in PATCH)
|
||||
kind_prop['enum'] = ['cloud', 'net', None]
|
||||
kind_prop['description'] = "* `cloud` - Cloud\\n* `net` - Network"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class CustomAutoSchema(AutoSchema):
|
||||
"""Custom AutoSchema to add swagger_topic to tags and handle deprecated endpoints."""
|
||||
|
||||
def get_tags(self):
|
||||
tags = []
|
||||
try:
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
serializer = self.view.get_serializer()
|
||||
else:
|
||||
serializer = None
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for this view.'.format(self.view.__class__.__name__)
|
||||
)
|
||||
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
tags = super().get_tags() # Use default drf-spectacular behavior
|
||||
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
"""Return `True` if this operation is to be marked as deprecated."""
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
class AuthenticatedSpectacularAPIView(SpectacularAPIView):
|
||||
"""SpectacularAPIView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
class AuthenticatedSpectacularSwaggerView(SpectacularSwaggerView):
|
||||
"""SpectacularSwaggerView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
class AuthenticatedSpectacularRedocView(SpectacularRedocView):
|
||||
"""SpectacularRedocView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
# Schema view (returns OpenAPI schema JSON/YAML)
|
||||
schema_view = AuthenticatedSpectacularAPIView.as_view()
|
||||
|
||||
# Swagger UI view
|
||||
swagger_ui_view = AuthenticatedSpectacularSwaggerView.as_view(url_name='api:schema-json')
|
||||
|
||||
# ReDoc UI view
|
||||
redoc_view = AuthenticatedSpectacularRedocView.as_view(url_name='api:schema-json')
|
||||
@@ -7,7 +7,6 @@ import json
|
||||
import logging
|
||||
import re
|
||||
import yaml
|
||||
import urllib.parse
|
||||
from collections import Counter, OrderedDict
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
@@ -117,7 +116,6 @@ from awx.main.utils import (
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.plugins import load_combined_inventory_source_options
|
||||
from awx.main.utils.named_url_graph import reset_counters
|
||||
from awx.main.utils.inventory_vars import update_group_variables
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.signals import update_inventory_computed_fields
|
||||
@@ -734,22 +732,7 @@ class EmptySerializer(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class OpaQueryPathMixin(serializers.Serializer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def validate_opa_query_path(self, value):
|
||||
# Decode the URL and re-encode it
|
||||
decoded_value = urllib.parse.unquote(value)
|
||||
re_encoded_value = urllib.parse.quote(decoded_value, safe='/')
|
||||
|
||||
if value != re_encoded_value:
|
||||
raise serializers.ValidationError(_("The URL must be properly encoded."))
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer, OpaQueryPathMixin):
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
# As a base serializer, the capabilities prefetch is not used directly,
|
||||
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
|
||||
capabilities_prefetch = []
|
||||
@@ -963,13 +946,13 @@ class UnifiedJobSerializer(BaseSerializer):
|
||||
|
||||
class UnifiedJobListSerializer(UnifiedJobSerializer):
|
||||
class Meta:
|
||||
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-event_processing_finished', '-artifacts')
|
||||
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-event_processing_finished')
|
||||
|
||||
def get_field_names(self, declared_fields, info):
|
||||
field_names = super(UnifiedJobListSerializer, self).get_field_names(declared_fields, info)
|
||||
# Meta multiple inheritance and -field_name options don't seem to be
|
||||
# taking effect above, so remove the undesired fields here.
|
||||
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'event_processing_finished', 'artifacts'))
|
||||
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'event_processing_finished'))
|
||||
|
||||
def get_types(self):
|
||||
if type(self) is UnifiedJobListSerializer:
|
||||
@@ -1182,12 +1165,12 @@ class UserActivityStreamSerializer(UserSerializer):
|
||||
fields = ('*', '-is_system_auditor')
|
||||
|
||||
|
||||
class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
|
||||
class OrganizationSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
class Meta:
|
||||
model = Organization
|
||||
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment', 'opa_query_path')
|
||||
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment')
|
||||
read_only_fields = ('*', 'custom_virtualenv')
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -1230,7 +1213,7 @@ class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
|
||||
# to a team. This provides a hint to the ui so it can know to not
|
||||
# display these roles for team role selection.
|
||||
for key in ('admin_role', 'member_role'):
|
||||
if summary_dict and key in summary_dict.get('object_roles', {}):
|
||||
if key in summary_dict.get('object_roles', {}):
|
||||
summary_dict['object_roles'][key]['user_only'] = True
|
||||
|
||||
return summary_dict
|
||||
@@ -1541,7 +1524,7 @@ class LabelsListMixin(object):
|
||||
return res
|
||||
|
||||
|
||||
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQueryPathMixin):
|
||||
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||
show_capabilities = ['edit', 'delete', 'adhoc', 'copy']
|
||||
capabilities_prefetch = ['admin', 'adhoc', {'copy': 'organization.inventory_admin'}]
|
||||
|
||||
@@ -1562,7 +1545,6 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQuery
|
||||
'inventory_sources_with_failures',
|
||||
'pending_deletion',
|
||||
'prevent_instance_group_fallback',
|
||||
'opa_query_path',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -1632,68 +1614,8 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQuery
|
||||
|
||||
if kind == 'smart' and not host_filter:
|
||||
raise serializers.ValidationError({'host_filter': _('Smart inventories must specify host_filter')})
|
||||
|
||||
return super(InventorySerializer, self).validate(attrs)
|
||||
|
||||
@staticmethod
|
||||
def _update_variables(variables, inventory_id):
|
||||
"""
|
||||
Update the inventory variables of the 'all'-group.
|
||||
|
||||
The variables field contains vars from the inventory dialog, hence
|
||||
representing the "all"-group variables.
|
||||
|
||||
Since this is not an update from an inventory source, we update the
|
||||
variables when the inventory details form is saved.
|
||||
|
||||
A user edit on the inventory variables is considered a reset of the
|
||||
variables update history. Particularly if the user removes a variable by
|
||||
editing the inventory variables field, the variable is not supposed to
|
||||
reappear with a value from a previous inventory source update.
|
||||
|
||||
We achieve this by forcing `reset=True` on such an update.
|
||||
|
||||
As a side-effect, variables which have been set by source updates and
|
||||
have survived a user-edit (i.e. they have not been deleted from the
|
||||
variables field) will be assumed to originate from the user edit and are
|
||||
thus no longer deleted from the inventory when they are removed from
|
||||
their original source!
|
||||
|
||||
Note that we use the inventory source id -1 for user-edit updates
|
||||
because a regular inventory source cannot have an id of -1 since
|
||||
PostgreSQL assigns pk's starting from 1 (if this assumption doesn't hold
|
||||
true, we have to assign another special value for invsrc_id).
|
||||
|
||||
:param str variables: The variables as plain text in yaml or json
|
||||
format.
|
||||
:param int inventory_id: The primary key of the related inventory
|
||||
object.
|
||||
"""
|
||||
variables_dict = parse_yaml_or_json(variables, silent_failure=False)
|
||||
logger.debug(f"InventorySerializer._update_variables: {inventory_id=} {variables_dict=}, {variables=}")
|
||||
update_group_variables(
|
||||
group_id=None, # `None` denotes the 'all' group (which doesn't have a pk).
|
||||
newvars=variables_dict,
|
||||
dbvars=None,
|
||||
invsrc_id=-1,
|
||||
inventory_id=inventory_id,
|
||||
reset=True,
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Called when a new inventory has to be created."""
|
||||
logger.debug(f"InventorySerializer.create({validated_data=}) >>>>")
|
||||
obj = super().create(validated_data)
|
||||
self._update_variables(validated_data.get("variables") or "", obj.id)
|
||||
return obj
|
||||
|
||||
def update(self, obj, validated_data):
|
||||
"""Called when an existing inventory is updated."""
|
||||
logger.debug(f"InventorySerializer.update({validated_data=}) >>>>")
|
||||
obj = super().update(obj, validated_data)
|
||||
self._update_variables(validated_data.get("variables") or "", obj.id)
|
||||
return obj
|
||||
|
||||
|
||||
class ConstructedFieldMixin(serializers.Field):
|
||||
def get_attribute(self, instance):
|
||||
@@ -1983,12 +1905,10 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
return res
|
||||
|
||||
def validate(self, attrs):
|
||||
# Do not allow the group name to conflict with an existing host name.
|
||||
name = force_str(attrs.get('name', self.instance and self.instance.name or ''))
|
||||
inventory = attrs.get('inventory', self.instance and self.instance.inventory or '')
|
||||
if Host.objects.filter(name=name, inventory=inventory).exists():
|
||||
raise serializers.ValidationError(_('A Host with that name already exists.'))
|
||||
#
|
||||
return super(GroupSerializer, self).validate(attrs)
|
||||
|
||||
def validate_name(self, value):
|
||||
@@ -2165,13 +2085,13 @@ class BulkHostDeleteSerializer(serializers.Serializer):
|
||||
attrs['hosts_data'] = attrs['host_qs'].values()
|
||||
|
||||
if len(attrs['host_qs']) == 0:
|
||||
error_hosts = dict.fromkeys(attrs['hosts'], "Hosts do not exist or you lack permission to delete it")
|
||||
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in attrs['hosts']}
|
||||
raise serializers.ValidationError({'hosts': error_hosts})
|
||||
|
||||
if len(attrs['host_qs']) < len(attrs['hosts']):
|
||||
hosts_exists = [host['id'] for host in attrs['hosts_data']]
|
||||
failed_hosts = list(set(attrs['hosts']).difference(hosts_exists))
|
||||
error_hosts = dict.fromkeys(failed_hosts, "Hosts do not exist or you lack permission to delete it")
|
||||
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in failed_hosts}
|
||||
raise serializers.ValidationError({'hosts': error_hosts})
|
||||
|
||||
# Getting all inventories that the hosts can be in
|
||||
@@ -2839,7 +2759,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
{
|
||||
"role": {
|
||||
"id": None,
|
||||
"name": _("Platform Auditor"),
|
||||
"name": _("Controller System Auditor"),
|
||||
"description": _("Can view all aspects of the system"),
|
||||
"user_capabilities": {"unattach": False},
|
||||
},
|
||||
@@ -3027,6 +2947,11 @@ class CredentialSerializer(BaseSerializer):
|
||||
ret.remove(field)
|
||||
return ret
|
||||
|
||||
def validate_organization(self, org):
|
||||
if self.instance and (not self.instance.managed) and self.instance.credential_type.kind == 'galaxy' and org is None:
|
||||
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
|
||||
return org
|
||||
|
||||
def validate_credential_type(self, credential_type):
|
||||
if self.instance and credential_type.pk != self.instance.credential_type.pk:
|
||||
for related_objects in (
|
||||
@@ -3102,6 +3027,9 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
if attrs.get('team'):
|
||||
attrs['organization'] = attrs['team'].organization
|
||||
|
||||
if 'credential_type' in attrs and attrs['credential_type'].kind == 'galaxy' and list(owner_fields) != ['organization']:
|
||||
raise serializers.ValidationError({"organization": _("Galaxy credentials must be owned by an Organization.")})
|
||||
|
||||
return super(CredentialSerializerCreate, self).validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
@@ -3319,7 +3247,6 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
'webhook_service',
|
||||
'webhook_credential',
|
||||
'prevent_instance_group_fallback',
|
||||
'opa_query_path',
|
||||
)
|
||||
read_only_fields = ('*', 'custom_virtualenv')
|
||||
|
||||
@@ -3527,7 +3454,7 @@ class JobRelaunchSerializer(BaseSerializer):
|
||||
choices=NEW_JOB_TYPE_CHOICES,
|
||||
write_only=True,
|
||||
)
|
||||
credential_passwords = VerbatimField(required=False, write_only=True)
|
||||
credential_passwords = VerbatimField(required=True, write_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
@@ -5998,7 +5925,7 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
if self.instance and not self.instance.is_container_group:
|
||||
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
|
||||
|
||||
pod_spec_override_json = {}
|
||||
pod_spec_override_json = None
|
||||
# defect if the value is yaml or json if yaml convert to json
|
||||
try:
|
||||
# convert yaml to json
|
||||
|
||||
55
awx/api/swagger.py
Normal file
55
awx/api/swagger.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import warnings
|
||||
|
||||
from rest_framework.permissions import AllowAny
|
||||
from drf_yasg import openapi
|
||||
from drf_yasg.inspectors import SwaggerAutoSchema
|
||||
from drf_yasg.views import get_schema_view
|
||||
|
||||
|
||||
class CustomSwaggerAutoSchema(SwaggerAutoSchema):
|
||||
"""Custom SwaggerAutoSchema to add swagger_topic to tags."""
|
||||
|
||||
def get_tags(self, operation_keys=None):
|
||||
tags = []
|
||||
try:
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
serializer = self.view.get_serializer()
|
||||
else:
|
||||
serializer = None
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {}.'.format(self.view.__class__.__name__, operation_keys)
|
||||
)
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
"""Return `True` if this operation is to be marked as deprecated."""
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title='AWX API',
|
||||
default_version='v2',
|
||||
description='AWX API Documentation',
|
||||
terms_of_service='https://www.google.com/policies/terms/',
|
||||
contact=openapi.Contact(email='contact@snippets.local'),
|
||||
license=openapi.License(name='Apache License'),
|
||||
),
|
||||
public=True,
|
||||
permission_classes=[AllowAny],
|
||||
)
|
||||
@@ -1,6 +1,6 @@
|
||||
{% if content_only %}<div class="nocode ansi_fore ansi_back{% if dark %} ansi_dark{% endif %}">{% else %}
|
||||
<!DOCTYPE HTML>
|
||||
<html lang="en">
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<title>{{ title }}</title>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
---
|
||||
collections:
|
||||
- name: ansible.receptor
|
||||
version: 2.0.6
|
||||
version: 2.0.3
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import ActivityStreamList, ActivityStreamDetail
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'),
|
||||
|
||||
@@ -14,6 +14,7 @@ from awx.api.views import (
|
||||
AdHocCommandStdout,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import AdHocCommandEventDetail
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
|
||||
]
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
import awx.api.views.analytics as analytics
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', analytics.AnalyticsRootView.as_view(), name='analytics_root_view'),
|
||||
re_path(r'^authorized/$', analytics.AnalyticsAuthorizedView.as_view(), name='analytics_authorized'),
|
||||
|
||||
@@ -16,6 +16,7 @@ from awx.api.views import (
|
||||
CredentialExternalTest,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', CredentialList.as_view(), name='credential_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', CredentialTypeList.as_view(), name='credential_type_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'),
|
||||
|
||||
@@ -8,6 +8,7 @@ from awx.api.views import (
|
||||
ExecutionEnvironmentActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'),
|
||||
|
||||
@@ -18,6 +18,7 @@ from awx.api.views import (
|
||||
GroupAdHocCommandsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', GroupList.as_view(), name='group_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', GroupDetail.as_view(), name='group_detail'),
|
||||
|
||||
@@ -18,6 +18,7 @@ from awx.api.views import (
|
||||
HostAdHocCommandEventsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', HostList.as_view(), name='host_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', HostDetail.as_view(), name='host_detail'),
|
||||
|
||||
@@ -14,6 +14,7 @@ from awx.api.views import (
|
||||
)
|
||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', InstanceList.as_view(), name='instance_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
||||
|
||||
@@ -12,6 +12,7 @@ from awx.api.views import (
|
||||
InstanceGroupObjectRolesList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', InstanceGroupList.as_view(), name='instance_group_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),
|
||||
|
||||
@@ -29,6 +29,7 @@ from awx.api.views import (
|
||||
InventoryVariableData,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', InventoryList.as_view(), name='inventory_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'),
|
||||
|
||||
@@ -18,6 +18,7 @@ from awx.api.views import (
|
||||
InventorySourceNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', InventorySourceList.as_view(), name='inventory_source_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'),
|
||||
|
||||
@@ -15,6 +15,7 @@ from awx.api.views import (
|
||||
InventoryUpdateCredentialsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'),
|
||||
|
||||
@@ -19,6 +19,7 @@ from awx.api.views import (
|
||||
JobHostSummaryDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', JobList.as_view(), name='job_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', JobDetail.as_view(), name='job_detail'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import JobHostSummaryDetail
|
||||
|
||||
|
||||
urls = [re_path(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -23,6 +23,7 @@ from awx.api.views import (
|
||||
JobTemplateCopy,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', JobTemplateList.as_view(), name='job_template_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views.labels import LabelList, LabelDetail
|
||||
|
||||
|
||||
urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import NotificationList, NotificationDetail
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', NotificationList.as_view(), name='notification_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),
|
||||
|
||||
@@ -11,6 +11,7 @@ from awx.api.views import (
|
||||
NotificationTemplateCopy,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),
|
||||
|
||||
@@ -27,6 +27,7 @@ from awx.api.views.organization import (
|
||||
)
|
||||
from awx.api.views import OrganizationCredentialList
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', OrganizationList.as_view(), name='organization_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'),
|
||||
|
||||
@@ -22,6 +22,7 @@ from awx.api.views import (
|
||||
ProjectCopy,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', ProjectList.as_view(), name='project_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),
|
||||
|
||||
@@ -13,6 +13,7 @@ from awx.api.views import (
|
||||
ProjectUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', ProjectUpdateList.as_view(), name='project_update_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'),
|
||||
|
||||
@@ -8,6 +8,7 @@ from awx.api.views import (
|
||||
ReceptorAddressDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', ReceptorAddressesList.as_view(), name='receptor_addresses_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', ReceptorAddressDetail.as_view(), name='receptor_address_detail'),
|
||||
|
||||
@@ -3,13 +3,16 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList
|
||||
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', RoleList.as_view(), name='role_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', RoleDetail.as_view(), name='role_detail'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList, ScheduleLabelsList, ScheduleInstanceGroupList
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', ScheduleList.as_view(), name='schedule_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', SystemJobList.as_view(), name='system_job_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'),
|
||||
|
||||
@@ -14,6 +14,7 @@ from awx.api.views import (
|
||||
SystemJobTemplateNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'),
|
||||
|
||||
@@ -15,6 +15,7 @@ from awx.api.views import (
|
||||
TeamAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', TeamList.as_view(), name='team_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', TeamDetail.as_view(), name='team_detail'),
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.urls import include, re_path
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views.root import (
|
||||
ApiRootView,
|
||||
@@ -147,15 +148,21 @@ v2_urls = [
|
||||
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
re_path(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
|
||||
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||
# the docs/, schema-related endpoints used to be listed here but now exposed by DAB api_documentation app
|
||||
]
|
||||
if MODE == 'development':
|
||||
# Only include these if we are in the development environment
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [
|
||||
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
]
|
||||
|
||||
@@ -2,6 +2,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver, BitbucketDcWebhookReceiver
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
||||
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'),
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'),
|
||||
|
||||
@@ -14,6 +14,7 @@ from awx.api.views import (
|
||||
WorkflowJobActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'),
|
||||
|
||||
@@ -14,6 +14,7 @@ from awx.api.views import (
|
||||
WorkflowJobNodeInstanceGroupsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'),
|
||||
|
||||
@@ -22,6 +22,7 @@ from awx.api.views import (
|
||||
WorkflowJobTemplateLabelList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'),
|
||||
|
||||
@@ -15,6 +15,7 @@ from awx.api.views import (
|
||||
WorkflowJobTemplateNodeInstanceGroupsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,13 +10,11 @@ from awx.api.generics import APIView, Response
|
||||
from awx.api.permissions import AnalyticsPermission
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import get_awx_version
|
||||
from awx.main.utils.analytics_proxy import OIDCClient
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||
from rest_framework import status
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
AUTOMATION_ANALYTICS_API_URL_PATH = "/api/tower-analytics/v1"
|
||||
AWX_ANALYTICS_API_PREFIX = 'analytics'
|
||||
|
||||
@@ -40,8 +38,6 @@ class MissingSettings(Exception):
|
||||
|
||||
|
||||
class GetNotAllowedMixin(object):
|
||||
skip_ai_description = True
|
||||
|
||||
def get(self, request, format=None):
|
||||
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
|
||||
@@ -50,9 +46,7 @@ class AnalyticsRootView(APIView):
|
||||
permission_classes = (AnalyticsPermission,)
|
||||
name = _('Automation Analytics')
|
||||
swagger_topic = 'Automation Analytics'
|
||||
resource_purpose = 'automation analytics endpoints'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "A list of additional API endpoints related to analytics"})
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorized'] = reverse('api:analytics_authorized', request=request)
|
||||
@@ -105,8 +99,6 @@ class AnalyticsGenericView(APIView):
|
||||
return Response(response.json(), status=response.status_code)
|
||||
"""
|
||||
|
||||
resource_purpose = 'base view for analytics api proxy'
|
||||
|
||||
permission_classes = (AnalyticsPermission,)
|
||||
|
||||
@staticmethod
|
||||
@@ -210,16 +202,10 @@ class AnalyticsGenericView(APIView):
|
||||
if method not in ["GET", "POST", "OPTIONS"]:
|
||||
return self._error_response(ERROR_UNSUPPORTED_METHOD, method, remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
url = self._get_analytics_url(request.path)
|
||||
using_subscriptions_credentials = False
|
||||
try:
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
if not (rh_user and rh_password):
|
||||
rh_user = self._get_setting('SUBSCRIPTIONS_CLIENT_ID', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('SUBSCRIPTIONS_CLIENT_SECRET', None, ERROR_MISSING_PASSWORD)
|
||||
using_subscriptions_credentials = True
|
||||
|
||||
client = OIDCClient(rh_user, rh_password)
|
||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||
response = client.make_request(
|
||||
method,
|
||||
url,
|
||||
@@ -230,17 +216,17 @@ class AnalyticsGenericView(APIView):
|
||||
timeout=(31, 31),
|
||||
)
|
||||
except requests.RequestException:
|
||||
# subscriptions credentials are not valid for basic auth, so just return 401
|
||||
if using_subscriptions_credentials:
|
||||
response = Response(status=status.HTTP_401_UNAUTHORIZED)
|
||||
else:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||
except MissingSettings:
|
||||
rh_user = self._get_setting('SUBSCRIPTIONS_USERNAME', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('SUBSCRIPTIONS_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||
#
|
||||
# Missing or wrong user/pass
|
||||
#
|
||||
if response.status_code == status.HTTP_401_UNAUTHORIZED:
|
||||
text = response.get('text', '').rstrip("\n")
|
||||
text = (response.text or '').rstrip("\n")
|
||||
return self._error_response(ERROR_UNAUTHORIZED, text, remote=True, remote_status_code=response.status_code)
|
||||
#
|
||||
# Not found, No entitlement or No data in Analytics
|
||||
@@ -265,91 +251,67 @@ class AnalyticsGenericView(APIView):
|
||||
|
||||
|
||||
class AnalyticsGenericListView(AnalyticsGenericView):
|
||||
resource_purpose = 'analytics api proxy list view'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get analytics data from Red Hat Insights"})
|
||||
def get(self, request, format=None):
|
||||
return self._send_to_analytics(request, method="GET")
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Post query to Red Hat Insights analytics"})
|
||||
def post(self, request, format=None):
|
||||
return self._send_to_analytics(request, method="POST")
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get analytics endpoint options"})
|
||||
def options(self, request, format=None):
|
||||
return self._send_to_analytics(request, method="OPTIONS")
|
||||
|
||||
|
||||
class AnalyticsGenericDetailView(AnalyticsGenericView):
|
||||
resource_purpose = 'analytics api proxy detail view'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get specific analytics resource from Red Hat Insights"})
|
||||
def get(self, request, slug, format=None):
|
||||
return self._send_to_analytics(request, method="GET")
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Post query for specific analytics resource to Red Hat Insights"})
|
||||
def post(self, request, slug, format=None):
|
||||
return self._send_to_analytics(request, method="POST")
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get options for specific analytics resource"})
|
||||
def options(self, request, slug, format=None):
|
||||
return self._send_to_analytics(request, method="OPTIONS")
|
||||
|
||||
|
||||
@extend_schema_if_available(
|
||||
extensions={'x-ai-description': 'Check if the user has access to Red Hat Insights'},
|
||||
)
|
||||
class AnalyticsAuthorizedView(AnalyticsGenericListView):
|
||||
name = _("Authorized")
|
||||
resource_purpose = 'red hat insights authorization status'
|
||||
|
||||
|
||||
class AnalyticsReportsList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Reports")
|
||||
swagger_topic = "Automation Analytics"
|
||||
resource_purpose = 'automation analytics reports'
|
||||
|
||||
|
||||
class AnalyticsReportDetail(AnalyticsGenericDetailView):
|
||||
name = _("Report")
|
||||
resource_purpose = 'automation analytics report detail'
|
||||
|
||||
|
||||
class AnalyticsReportOptionsList(AnalyticsGenericListView):
|
||||
name = _("Report Options")
|
||||
resource_purpose = 'automation analytics report options'
|
||||
|
||||
|
||||
class AnalyticsAdoptionRateList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Adoption Rate")
|
||||
resource_purpose = 'automation analytics adoption rate data'
|
||||
|
||||
|
||||
class AnalyticsEventExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Event Explorer")
|
||||
resource_purpose = 'automation analytics event explorer data'
|
||||
|
||||
|
||||
class AnalyticsHostExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Host Explorer")
|
||||
resource_purpose = 'automation analytics host explorer data'
|
||||
|
||||
|
||||
class AnalyticsJobExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Job Explorer")
|
||||
resource_purpose = 'automation analytics job explorer data'
|
||||
|
||||
|
||||
class AnalyticsProbeTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Probe Templates")
|
||||
resource_purpose = 'automation analytics probe templates'
|
||||
|
||||
|
||||
class AnalyticsProbeTemplateForHostsList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Probe Template For Hosts")
|
||||
resource_purpose = 'automation analytics probe templates for hosts'
|
||||
|
||||
|
||||
class AnalyticsRoiTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("ROI Templates")
|
||||
resource_purpose = 'automation analytics roi templates'
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from collections import OrderedDict
|
||||
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
@@ -32,7 +30,6 @@ class BulkView(APIView):
|
||||
]
|
||||
allowed_methods = ['GET', 'OPTIONS']
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Retrieves a list of available bulk actions"})
|
||||
def get(self, request, format=None):
|
||||
'''List top level resources'''
|
||||
data = OrderedDict()
|
||||
@@ -48,13 +45,11 @@ class BulkJobLaunchView(GenericAPIView):
|
||||
serializer_class = serializers.BulkJobLaunchSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk job launch endpoint"})
|
||||
def get(self, request):
|
||||
data = OrderedDict()
|
||||
data['detail'] = "Specify a list of unified job templates to launch alongside their launchtime parameters"
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Bulk launch job templates"})
|
||||
def post(self, request):
|
||||
bulkjob_serializer = serializers.BulkJobLaunchSerializer(data=request.data, context={'request': request})
|
||||
if bulkjob_serializer.is_valid():
|
||||
@@ -69,11 +64,9 @@ class BulkHostCreateView(GenericAPIView):
|
||||
serializer_class = serializers.BulkHostCreateSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk host create endpoint"})
|
||||
def get(self, request):
|
||||
return Response({"detail": "Bulk create hosts with this endpoint"}, status=status.HTTP_200_OK)
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Bulk create hosts"})
|
||||
def post(self, request):
|
||||
serializer = serializers.BulkHostCreateSerializer(data=request.data, context={'request': request})
|
||||
if serializer.is_valid():
|
||||
@@ -88,11 +81,9 @@ class BulkHostDeleteView(GenericAPIView):
|
||||
serializer_class = serializers.BulkHostDeleteSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk host delete endpoint"})
|
||||
def get(self, request):
|
||||
return Response({"detail": "Bulk delete hosts with this endpoint"}, status=status.HTTP_200_OK)
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Bulk delete hosts"})
|
||||
def post(self, request):
|
||||
serializer = serializers.BulkHostDeleteSerializer(data=request.data, context={'request': request})
|
||||
if serializer.is_valid():
|
||||
|
||||
@@ -5,7 +5,6 @@ from django.conf import settings
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from awx.api.generics import APIView
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
from awx.main.scheduler import TaskManager, DependencyManager, WorkflowManager
|
||||
|
||||
@@ -15,9 +14,7 @@ class TaskManagerDebugView(APIView):
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
prefix = 'Task'
|
||||
resource_purpose = 'debug task manager'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Trigger task manager scheduling"})
|
||||
def get(self, request):
|
||||
TaskManager().schedule()
|
||||
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||
@@ -32,9 +29,7 @@ class DependencyManagerDebugView(APIView):
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
prefix = 'Dependency'
|
||||
resource_purpose = 'debug dependency manager'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Trigger dependency manager scheduling"})
|
||||
def get(self, request):
|
||||
DependencyManager().schedule()
|
||||
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||
@@ -49,9 +44,7 @@ class WorkflowManagerDebugView(APIView):
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
prefix = 'Workflow'
|
||||
resource_purpose = 'debug workflow manager'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Trigger workflow manager scheduling"})
|
||||
def get(self, request):
|
||||
WorkflowManager().schedule()
|
||||
if not settings.AWX_DISABLE_TASK_MANAGERS:
|
||||
@@ -65,9 +58,7 @@ class DebugRootView(APIView):
|
||||
_ignore_model_permissions = True
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
resource_purpose = 'debug endpoints root'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "List available debug endpoints"})
|
||||
def get(self, request, format=None):
|
||||
'''List of available debug urls'''
|
||||
data = OrderedDict()
|
||||
|
||||
@@ -10,10 +10,9 @@ import time
|
||||
import re
|
||||
|
||||
import asn1
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
from awx.api import serializers
|
||||
from awx.api.generics import GenericAPIView, Response
|
||||
from awx.api.permissions import IsSystemAdmin
|
||||
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||
from awx.main import models
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
@@ -49,10 +48,8 @@ class InstanceInstallBundle(GenericAPIView):
|
||||
name = _('Install Bundle')
|
||||
model = models.Instance
|
||||
serializer_class = serializers.InstanceSerializer
|
||||
permission_classes = (IsSystemAdmin,)
|
||||
resource_purpose = 'install bundle'
|
||||
permission_classes = (IsSystemAdminOrAuditor,)
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Generate and download install bundle for an instance"})
|
||||
def get(self, request, *args, **kwargs):
|
||||
instance_obj = self.get_object()
|
||||
|
||||
@@ -198,8 +195,8 @@ def generate_receptor_tls(instance_obj):
|
||||
.issuer_name(ca_cert.issuer)
|
||||
.public_key(csr.public_key())
|
||||
.serial_number(x509.random_serial_number())
|
||||
.not_valid_before(datetime.datetime.now(datetime.UTC))
|
||||
.not_valid_after(datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=3650))
|
||||
.not_valid_before(datetime.datetime.utcnow())
|
||||
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
|
||||
.add_extension(
|
||||
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
|
||||
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,
|
||||
|
||||
@@ -19,8 +19,6 @@ from rest_framework import serializers
|
||||
# AWX
|
||||
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
||||
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
@@ -45,6 +43,7 @@ from awx.api.views.mixin import RelatedJobsPreventDeleteMixin
|
||||
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@@ -56,7 +55,6 @@ class InventoryUpdateEventsList(SubListAPIView):
|
||||
name = _('Inventory Update Events List')
|
||||
search_fields = ('stdout',)
|
||||
pagination_class = UnifiedJobEventPagination
|
||||
resource_purpose = 'events of an inventory update'
|
||||
|
||||
def get_queryset(self):
|
||||
iu = self.get_parent_object()
|
||||
@@ -71,17 +69,11 @@ class InventoryUpdateEventsList(SubListAPIView):
|
||||
class InventoryList(ListCreateAPIView):
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
resource_purpose = 'inventories'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "A list of inventories."})
|
||||
def get(self, request, *args, **kwargs):
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
resource_purpose = 'inventory detail'
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@@ -108,39 +100,33 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
||||
|
||||
class ConstructedInventoryDetail(InventoryDetail):
|
||||
serializer_class = ConstructedInventorySerializer
|
||||
resource_purpose = 'constructed inventory detail'
|
||||
|
||||
|
||||
class ConstructedInventoryList(InventoryList):
|
||||
serializer_class = ConstructedInventorySerializer
|
||||
resource_purpose = 'constructed inventories'
|
||||
|
||||
def get_queryset(self):
|
||||
r = super().get_queryset()
|
||||
return r.filter(kind='constructed')
|
||||
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get or create input inventory inventory"})
|
||||
class InventoryInputInventoriesList(SubListAttachDetachAPIView):
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'input_inventories'
|
||||
resource_purpose = 'input inventories of a constructed inventory'
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.kind == 'constructed':
|
||||
raise serializers.ValidationError({'error': 'You cannot add a constructed inventory to another constructed inventory.'})
|
||||
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get activity stream for an inventory"})
|
||||
class InventoryActivityStreamList(SubListAPIView):
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
resource_purpose = 'activity stream for an inventory'
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
@@ -154,13 +140,11 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'instance_groups'
|
||||
resource_purpose = 'instance groups of an inventory'
|
||||
|
||||
|
||||
class InventoryAccessList(ResourceAccessList):
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Inventory
|
||||
resource_purpose = 'users who can access the inventory'
|
||||
|
||||
|
||||
class InventoryObjectRolesList(SubListAPIView):
|
||||
@@ -169,7 +153,6 @@ class InventoryObjectRolesList(SubListAPIView):
|
||||
parent_model = Inventory
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
resource_purpose = 'roles of an inventory'
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@@ -182,7 +165,6 @@ class InventoryJobTemplateList(SubListAPIView):
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'jobtemplates'
|
||||
resource_purpose = 'job templates using an inventory'
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
@@ -193,10 +175,8 @@ class InventoryJobTemplateList(SubListAPIView):
|
||||
|
||||
class InventoryLabelList(LabelSubListCreateAttachDetachView):
|
||||
parent_model = Inventory
|
||||
resource_purpose = 'labels of an inventory'
|
||||
|
||||
|
||||
class InventoryCopy(CopyAPIView):
|
||||
model = Inventory
|
||||
copy_return_serializer_class = InventorySerializer
|
||||
resource_purpose = 'copy of an inventory'
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
from awx.api.generics import SubListCreateAttachDetachAPIView, RetrieveUpdateAPIView, ListCreateAPIView
|
||||
from awx.main.models import Label
|
||||
from awx.api.serializers import LabelSerializer
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
# Django
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -25,10 +24,9 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
|
||||
model = Label
|
||||
serializer_class = LabelSerializer
|
||||
relationship = 'labels'
|
||||
resource_purpose = 'labels of a resource'
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
sub_id, res = super().unattach_validate(request)
|
||||
(sub_id, res) = super().unattach_validate(request)
|
||||
if res:
|
||||
return res
|
||||
|
||||
@@ -41,7 +39,6 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
|
||||
|
||||
return res
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Create or attach a label to a resource"})
|
||||
def post(self, request, *args, **kwargs):
|
||||
# If a label already exists in the database, attach it instead of erroring out
|
||||
# that it already exists
|
||||
@@ -64,11 +61,9 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
|
||||
class LabelDetail(RetrieveUpdateAPIView):
|
||||
model = Label
|
||||
serializer_class = LabelSerializer
|
||||
resource_purpose = 'label detail'
|
||||
|
||||
|
||||
class LabelList(ListCreateAPIView):
|
||||
name = _("Labels")
|
||||
model = Label
|
||||
serializer_class = LabelSerializer
|
||||
resource_purpose = 'labels'
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
from awx.api.generics import APIView, Response
|
||||
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||
@@ -14,9 +13,7 @@ class MeshVisualizer(APIView):
|
||||
name = _("Mesh Visualizer")
|
||||
permission_classes = (IsSystemAdminOrAuditor,)
|
||||
swagger_topic = "System Configuration"
|
||||
resource_purpose = 'mesh network topology visualization data'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get mesh network topology visualization data"})
|
||||
def get(self, request, format=None):
|
||||
data = {
|
||||
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
||||
|
||||
@@ -7,13 +7,13 @@ import logging
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
|
||||
# AWX
|
||||
# from awx.main.analytics import collectors
|
||||
import awx.main.analytics.subsystem_metrics as s_metrics
|
||||
@@ -22,13 +22,13 @@ from awx.api import renderers
|
||||
|
||||
from awx.api.generics import APIView
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.analytics')
|
||||
|
||||
|
||||
class MetricsView(APIView):
|
||||
name = _('Metrics')
|
||||
swagger_topic = 'Metrics'
|
||||
resource_purpose = 'prometheus metrics data'
|
||||
|
||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||
|
||||
@@ -37,7 +37,6 @@ class MetricsView(APIView):
|
||||
self.permission_classes = (AllowAny,)
|
||||
return super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get Prometheus metrics data"})
|
||||
def get(self, request):
|
||||
'''Show Metrics Details'''
|
||||
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor:
|
||||
|
||||
@@ -53,20 +53,21 @@ from awx.api.serializers import (
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin, OrganizationInstanceGroupMembershipMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
resource_purpose = 'organizations'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
resource_purpose = 'organization detail'
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationDetail, self).get_serializer_context(*args, **kwargs)
|
||||
@@ -104,25 +105,24 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
serializer_class = InventorySerializer
|
||||
parent_model = Organization
|
||||
relationship = 'inventories'
|
||||
resource_purpose = 'inventories of an organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'member_role.members'
|
||||
ordering = ('username',)
|
||||
resource_purpose = 'users of an organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'admin_role.members'
|
||||
ordering = ('username',)
|
||||
resource_purpose = 'administrators of an organization'
|
||||
|
||||
|
||||
class OrganizationProjectsList(SubListCreateAPIView):
|
||||
@@ -130,7 +130,6 @@ class OrganizationProjectsList(SubListCreateAPIView):
|
||||
serializer_class = ProjectSerializer
|
||||
parent_model = Organization
|
||||
parent_key = 'organization'
|
||||
resource_purpose = 'projects of an organization'
|
||||
|
||||
|
||||
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||
@@ -140,7 +139,6 @@ class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||
relationship = 'executionenvironments'
|
||||
parent_key = 'organization'
|
||||
swagger_topic = "Execution Environments"
|
||||
resource_purpose = 'execution environments of an organization'
|
||||
|
||||
|
||||
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||
@@ -148,7 +146,6 @@ class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Organization
|
||||
parent_key = 'organization'
|
||||
resource_purpose = 'job templates of an organization'
|
||||
|
||||
|
||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
@@ -156,16 +153,15 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
serializer_class = WorkflowJobTemplateSerializer
|
||||
parent_model = Organization
|
||||
parent_key = 'organization'
|
||||
resource_purpose = 'workflow job templates of an organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'teams'
|
||||
parent_key = 'organization'
|
||||
resource_purpose = 'teams of an organization'
|
||||
|
||||
|
||||
class OrganizationActivityStreamList(SubListAPIView):
|
||||
@@ -174,7 +170,6 @@ class OrganizationActivityStreamList(SubListAPIView):
|
||||
parent_model = Organization
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
resource_purpose = 'activity stream for an organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
@@ -183,34 +178,28 @@ class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates'
|
||||
parent_key = 'organization'
|
||||
resource_purpose = 'notification templates of an organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
resource_purpose = 'base view for notification templates of an organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
||||
relationship = 'notification_templates_started'
|
||||
resource_purpose = 'notification templates for job started events of an organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
||||
relationship = 'notification_templates_error'
|
||||
resource_purpose = 'notification templates for job error events of an organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
||||
relationship = 'notification_templates_success'
|
||||
resource_purpose = 'notification templates for job success events of an organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
||||
relationship = 'notification_templates_approvals'
|
||||
resource_purpose = 'notification templates for workflow approval events of an organization'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(OrganizationInstanceGroupMembershipMixin, SubListAttachDetachAPIView):
|
||||
@@ -219,7 +208,6 @@ class OrganizationInstanceGroupsList(OrganizationInstanceGroupMembershipMixin, S
|
||||
parent_model = Organization
|
||||
relationship = 'instance_groups'
|
||||
filter_read_permission = False
|
||||
resource_purpose = 'instance groups of an organization'
|
||||
|
||||
|
||||
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
@@ -228,7 +216,6 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
parent_model = Organization
|
||||
relationship = 'galaxy_credentials'
|
||||
filter_read_permission = False
|
||||
resource_purpose = 'galaxy credentials of an organization'
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.kind != 'galaxy_api_token':
|
||||
@@ -238,7 +225,6 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Organization
|
||||
resource_purpose = 'users who can access the organization'
|
||||
|
||||
|
||||
class OrganizationObjectRolesList(SubListAPIView):
|
||||
@@ -247,7 +233,6 @@ class OrganizationObjectRolesList(SubListAPIView):
|
||||
parent_model = Organization
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
deprecated = True
|
||||
resource_purpose = 'roles of an organization'
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
|
||||
@@ -8,8 +8,6 @@ import operator
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
@@ -23,21 +21,17 @@ from rest_framework import status
|
||||
|
||||
import requests
|
||||
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import APIView
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.tasks.system import clear_setting_cache
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import URLPathVersioning, reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
from awx.main.utils.analytics_proxy import TokenError
|
||||
from awx.main.utils.licensing import get_licenser
|
||||
|
||||
logger = logging.getLogger('awx.api.views.root')
|
||||
@@ -48,10 +42,8 @@ class ApiRootView(APIView):
|
||||
name = _('REST API')
|
||||
versioning_class = URLPathVersioning
|
||||
swagger_topic = 'Versioning'
|
||||
resource_purpose = 'api root and version information'
|
||||
|
||||
@method_decorator(ensure_csrf_cookie)
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "List supported API versions"})
|
||||
def get(self, request, format=None):
|
||||
'''List supported API versions'''
|
||||
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
|
||||
@@ -63,16 +55,14 @@ class ApiRootView(APIView):
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
if MODE == 'development':
|
||||
data['docs'] = drf_reverse('api:schema-swagger-ui')
|
||||
data['swagger'] = drf_reverse('api:schema-swagger-ui')
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiVersionRootView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
swagger_topic = 'Versioning'
|
||||
resource_purpose = 'api top-level resources'
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "List top-level API resources"})
|
||||
def get(self, request, format=None):
|
||||
'''List top level resources'''
|
||||
data = OrderedDict()
|
||||
@@ -132,7 +122,6 @@ class ApiVersionRootView(APIView):
|
||||
|
||||
class ApiV2RootView(ApiVersionRootView):
|
||||
name = _('Version 2')
|
||||
resource_purpose = 'api v2 root'
|
||||
|
||||
|
||||
class ApiV2PingView(APIView):
|
||||
@@ -144,11 +133,7 @@ class ApiV2PingView(APIView):
|
||||
authentication_classes = ()
|
||||
name = _('Ping')
|
||||
swagger_topic = 'System Configuration'
|
||||
resource_purpose = 'basic instance information'
|
||||
|
||||
@extend_schema_if_available(
|
||||
extensions={'x-ai-description': 'Return basic information about this instance'},
|
||||
)
|
||||
def get(self, request, format=None):
|
||||
"""Return some basic information about this instance
|
||||
|
||||
@@ -183,64 +168,27 @@ class ApiV2SubscriptionView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Subscriptions')
|
||||
swagger_topic = 'System Configuration'
|
||||
resource_purpose = 'aap subscription validation'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV2SubscriptionView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
@extend_schema_if_available(
|
||||
extensions={'x-ai-description': 'List valid AAP subscriptions'},
|
||||
)
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
|
||||
if data.get('subscriptions_password') == '$encrypted$':
|
||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||
try:
|
||||
user = None
|
||||
pw = None
|
||||
basic_auth = False
|
||||
# determine if the credentials are for basic auth or not
|
||||
if data.get('subscriptions_client_id'):
|
||||
user, pw = data.get('subscriptions_client_id'), data.get('subscriptions_client_secret')
|
||||
if pw == '$encrypted$':
|
||||
pw = settings.SUBSCRIPTIONS_CLIENT_SECRET
|
||||
elif data.get('subscriptions_username'):
|
||||
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||
if pw == '$encrypted$':
|
||||
pw = settings.SUBSCRIPTIONS_PASSWORD
|
||||
basic_auth = True
|
||||
|
||||
if not user or not pw:
|
||||
return Response({"error": _("Missing subscription credentials")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw, basic_auth)
|
||||
|
||||
# update settings if the credentials were valid
|
||||
if basic_auth:
|
||||
if user:
|
||||
settings.SUBSCRIPTIONS_USERNAME = user
|
||||
if pw:
|
||||
settings.SUBSCRIPTIONS_PASSWORD = pw
|
||||
# mutual exclusion for basic auth and service account
|
||||
# only one should be set at a given time so that
|
||||
# config/attach/ knows which credentials to use
|
||||
settings.SUBSCRIPTIONS_CLIENT_ID = ""
|
||||
settings.SUBSCRIPTIONS_CLIENT_SECRET = ""
|
||||
else:
|
||||
if user:
|
||||
settings.SUBSCRIPTIONS_CLIENT_ID = user
|
||||
if pw:
|
||||
settings.SUBSCRIPTIONS_CLIENT_SECRET = pw
|
||||
# mutual exclusion for basic auth and service account
|
||||
settings.SUBSCRIPTIONS_USERNAME = ""
|
||||
settings.SUBSCRIPTIONS_PASSWORD = ""
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
if user:
|
||||
settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username']
|
||||
if pw:
|
||||
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if isinstance(exc, TokenError) or (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
@@ -259,37 +207,24 @@ class ApiV2AttachView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Attach Subscription')
|
||||
swagger_topic = 'System Configuration'
|
||||
resource_purpose = 'subscription attachment'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV2AttachView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
@extend_schema_if_available(
|
||||
extensions={'x-ai-description': 'Attach a subscription'},
|
||||
)
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
subscription_id = data.get('subscription_id', None)
|
||||
if not subscription_id:
|
||||
return Response({"error": _("No subscription ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
# Ensure we always use the latest subscription credentials
|
||||
cache.delete_many(['SUBSCRIPTIONS_CLIENT_ID', 'SUBSCRIPTIONS_CLIENT_SECRET', 'SUBSCRIPTIONS_USERNAME', 'SUBSCRIPTIONS_PASSWORD'])
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
||||
basic_auth = False
|
||||
if not (user and pw):
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
basic_auth = True
|
||||
if not (user and pw):
|
||||
return Response({"error": _("Missing subscription credentials")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if subscription_id and user and pw:
|
||||
pool_id = data.get('pool_id', None)
|
||||
if not pool_id:
|
||||
return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw, basic_auth)
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
@@ -303,12 +238,10 @@ class ApiV2AttachView(APIView):
|
||||
else:
|
||||
logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
for sub in validated:
|
||||
if sub['subscription_id'] == subscription_id:
|
||||
if sub['pool_id'] == pool_id:
|
||||
sub['valid_key'] = True
|
||||
settings.LICENSE = sub
|
||||
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
|
||||
return Response(sub)
|
||||
|
||||
return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -318,20 +251,17 @@ class ApiV2ConfigView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
resource_purpose = 'system configuration and license management'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV2ConfigView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head', 'get'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
@extend_schema_if_available(
|
||||
extensions={'x-ai-description': 'Return various configuration settings'},
|
||||
)
|
||||
def get(self, request, format=None):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
license_data = get_licenser().validate()
|
||||
|
||||
if not license_data.get('valid_key', False):
|
||||
license_data = {}
|
||||
|
||||
@@ -366,7 +296,6 @@ class ApiV2ConfigView(APIView):
|
||||
|
||||
return Response(data)
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Add or update a subscription manifest license"})
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -396,7 +325,6 @@ class ApiV2ConfigView(APIView):
|
||||
|
||||
try:
|
||||
license_data_validated = get_licenser().license_from_manifest(license_data)
|
||||
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
|
||||
except Exception:
|
||||
logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -412,13 +340,9 @@ class ApiV2ConfigView(APIView):
|
||||
logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@extend_schema_if_available(
|
||||
extensions={'x-ai-description': 'Remove the current subscription'},
|
||||
)
|
||||
def delete(self, request):
|
||||
try:
|
||||
settings.LICENSE = {}
|
||||
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
|
||||
@@ -11,7 +11,6 @@ from rest_framework import status
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
from awx.api import serializers
|
||||
from awx.api.generics import APIView, GenericAPIView
|
||||
@@ -25,7 +24,6 @@ logger = logging.getLogger('awx.api.views.webhooks')
|
||||
class WebhookKeyView(GenericAPIView):
|
||||
serializer_class = serializers.EmptySerializer
|
||||
permission_classes = (WebhookKeyPermission,)
|
||||
resource_purpose = 'webhook key management'
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
|
||||
@@ -33,13 +31,11 @@ class WebhookKeyView(GenericAPIView):
|
||||
|
||||
return super().get_queryset()
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Get the webhook key for a template"})
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
|
||||
return Response({'webhook_key': obj.webhook_key})
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Rotate the webhook key for a template"})
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
obj.rotate_webhook_key()
|
||||
@@ -56,7 +52,6 @@ class WebhookReceiverBase(APIView):
|
||||
authentication_classes = ()
|
||||
|
||||
ref_keys = {}
|
||||
resource_purpose = 'webhook receiver for triggering jobs'
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
|
||||
@@ -132,8 +127,7 @@ class WebhookReceiverBase(APIView):
|
||||
raise PermissionDenied
|
||||
|
||||
@csrf_exempt
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Receive a webhook event and trigger a job"})
|
||||
def post(self, request, *args, **kwargs_in):
|
||||
def post(self, request, *args, **kwargs):
|
||||
# Ensure that the full contents of the request are captured for multiple uses.
|
||||
request.body
|
||||
|
||||
@@ -181,7 +175,6 @@ class WebhookReceiverBase(APIView):
|
||||
|
||||
class GithubWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'github'
|
||||
resource_purpose = 'github webhook receiver'
|
||||
|
||||
ref_keys = {
|
||||
'pull_request': 'pull_request.head.sha',
|
||||
@@ -219,7 +212,6 @@ class GithubWebhookReceiver(WebhookReceiverBase):
|
||||
|
||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'gitlab'
|
||||
resource_purpose = 'gitlab webhook receiver'
|
||||
|
||||
ref_keys = {'Push Hook': 'checkout_sha', 'Tag Push Hook': 'checkout_sha', 'Merge Request Hook': 'object_attributes.last_commit.id'}
|
||||
|
||||
@@ -258,7 +250,6 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
|
||||
class BitbucketDcWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'bitbucket_dc'
|
||||
resource_purpose = 'bitbucket data center webhook receiver'
|
||||
|
||||
ref_keys = {
|
||||
'repo:refs_changed': 'changes.0.toHash',
|
||||
|
||||
@@ -6,11 +6,11 @@ import urllib.parse as urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.core.validators import URLValidator, DomainNameValidator, _lazy_re_compile
|
||||
from django.core.validators import URLValidator, _lazy_re_compile
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, FloatField # noqa
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField # noqa
|
||||
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
|
||||
|
||||
# AWX
|
||||
@@ -160,11 +160,10 @@ class StringListIsolatedPathField(StringListField):
|
||||
class URLField(CharField):
|
||||
# these lines set up a custom regex that allow numbers in the
|
||||
# top-level domain
|
||||
|
||||
tld_re = (
|
||||
r'\.' # dot
|
||||
r'(?!-)' # can't start with a dash
|
||||
r'(?:[a-z' + DomainNameValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'|xn--[a-z0-9]{1,59})' # or punycode label
|
||||
r'(?<!-)' # can't end with a dash
|
||||
r'\.?' # may have a trailing dot
|
||||
@@ -208,8 +207,7 @@ class URLField(CharField):
|
||||
if self.allow_plain_hostname:
|
||||
try:
|
||||
url_parts = urlparse.urlsplit(value)
|
||||
looks_like_ipv6 = bool(url_parts.netloc and url_parts.netloc.startswith('[') and url_parts.netloc.endswith(']'))
|
||||
if not looks_like_ipv6 and url_parts.hostname and '.' not in url_parts.hostname:
|
||||
if url_parts.hostname and '.' not in url_parts.hostname:
|
||||
netloc = '{}.local'.format(url_parts.hostname)
|
||||
if url_parts.port:
|
||||
netloc = '{}:{}'.format(netloc, url_parts.port)
|
||||
|
||||
@@ -27,5 +27,5 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
||||
|
||||
|
||||
def prefill_rh_credentials(apps, schema_editor):
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_CLIENT_ID', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_CLIENT_SECRET', encrypted=True)
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True)
|
||||
|
||||
@@ -38,7 +38,6 @@ class SettingsRegistry(object):
|
||||
if setting in self._registry:
|
||||
raise ImproperlyConfigured('Setting "{}" is already registered.'.format(setting))
|
||||
category = kwargs.setdefault('category', None)
|
||||
kwargs.setdefault('required', False) # No setting is ordinarily required
|
||||
category_slug = kwargs.setdefault('category_slug', slugify(category or '') or None)
|
||||
if category_slug in {'all', 'changed', 'user-defaults'}:
|
||||
raise ImproperlyConfigured('"{}" is a reserved category slug.'.format(category_slug))
|
||||
|
||||
@@ -128,41 +128,3 @@ class TestURLField:
|
||||
else:
|
||||
with pytest.raises(ValidationError):
|
||||
field.run_validators(url)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"url, expect_error",
|
||||
[
|
||||
("https://[1:2:3]", True),
|
||||
("http://[1:2:3]", True),
|
||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888", True),
|
||||
("https://2001:db8:3333:4444:5555:6666:7777:8888", True),
|
||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888]", False),
|
||||
("https://[::1]", False),
|
||||
("https://[::]", False),
|
||||
("https://[2001:db8::1]", False),
|
||||
("https://[2001:db8:0:0:0:0:1:1]", False),
|
||||
("https://[fe80::2%eth0]", True), # ipv6 scope identifier
|
||||
("https://[fe80:0:0:0:200:f8ff:fe21:67cf]", False),
|
||||
("https://[::ffff:192.168.1.10]", False),
|
||||
("https://[0:0:0:0:0:ffff:c000:0201]", False),
|
||||
("https://[2001:0db8:000a:0001:0000:0000:0000:0000]", False),
|
||||
("https://[2001:db8:a:1::]", False),
|
||||
("https://[ff02::1]", False),
|
||||
("https://[ff02:0:0:0:0:0:0:1]", False),
|
||||
("https://[fc00::1]", False),
|
||||
("https://[fd12:3456:789a:1::1]", False),
|
||||
("https://[2001:db8::abcd:ef12:3456:7890]", False),
|
||||
("https://[2001:db8:0000:abcd:0000:ef12:0000:3456]", False),
|
||||
("https://[::ffff:10.0.0.1]", False),
|
||||
("https://[2001:db8:cafe::]", False),
|
||||
("https://[2001:db8:cafe:0:0:0:0:0]", False),
|
||||
("https://[fe80::210:f3ff:fedf:4567%3]", True), # ipv6 scope identifier, numerical interface
|
||||
],
|
||||
)
|
||||
def test_ipv6_urls(self, url, expect_error):
|
||||
field = URLField()
|
||||
if expect_error:
|
||||
with pytest.raises(ValidationError, match="Enter a valid URL"):
|
||||
field.run_validators(url)
|
||||
else:
|
||||
field.run_validators(url)
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import re_path
|
||||
|
||||
from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
|
||||
re_path(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),
|
||||
|
||||
@@ -31,7 +31,7 @@ from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
|
||||
from awx.conf import settings_registry
|
||||
from awx.main.utils.external_logging import reconfigure_rsyslog
|
||||
from ansible_base.lib.utils.schema import extend_schema_if_available
|
||||
|
||||
|
||||
SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'name'))
|
||||
|
||||
@@ -42,10 +42,6 @@ class SettingCategoryList(ListAPIView):
|
||||
filter_backends = []
|
||||
name = _('Setting Categories')
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "A list of additional API endpoints related to settings."})
|
||||
def get(self, request, *args, **kwargs):
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
setting_categories = []
|
||||
categories = settings_registry.get_registered_categories()
|
||||
@@ -67,10 +63,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
filter_backends = []
|
||||
name = _('Setting Detail')
|
||||
|
||||
@extend_schema_if_available(extensions={"x-ai-description": "Update system settings."})
|
||||
def patch(self, request, *args, **kwargs):
|
||||
return super().patch(request, *args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
self.category_slug = self.kwargs.get('category_slug', 'all')
|
||||
all_category_slugs = list(settings_registry.get_registered_categories().keys())
|
||||
|
||||
@@ -639,9 +639,7 @@ class UserAccess(BaseAccess):
|
||||
prefetch_related = ('resource',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (
|
||||
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
|
||||
):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
qs = User.objects.all()
|
||||
else:
|
||||
qs = (
|
||||
@@ -1226,9 +1224,7 @@ class TeamAccess(BaseAccess):
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (
|
||||
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
|
||||
):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
return self.model.objects.all()
|
||||
return self.model.objects.filter(
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) | Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
|
||||
@@ -2568,7 +2564,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
return self.model.access_qs(self.user, 'view')
|
||||
return self.model.objects.filter(
|
||||
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=Organization.access_qs(self.user, 'audit'))
|
||||
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=self.user.auditor_of_organizations)
|
||||
).distinct()
|
||||
|
||||
@check_superuser
|
||||
@@ -2603,7 +2599,7 @@ class NotificationAccess(BaseAccess):
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(notification_template__organization__in=Organization.access_qs(self.user, 'add_notificationtemplate'))
|
||||
| Q(notification_template__organization__in=Organization.access_qs(self.user, 'audit'))
|
||||
| Q(notification_template__organization__in=self.user.auditor_of_organizations)
|
||||
).distinct()
|
||||
|
||||
def can_delete(self, obj):
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# Dispatcherd
|
||||
from dispatcherd.publish import task
|
||||
|
||||
# AWX
|
||||
from awx.main.analytics.subsystem_metrics import DispatcherMetrics, CallbackReceiverMetrics
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_task_queuename
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
@task(queue=get_task_queuename, timeout=300, on_duplicate='discard')
|
||||
@task(queue=get_task_queuename)
|
||||
def send_subsystem_metrics():
|
||||
DispatcherMetrics().send_metrics()
|
||||
CallbackReceiverMetrics().send_metrics()
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import datetime
|
||||
import asyncio
|
||||
import logging
|
||||
import redis
|
||||
import redis.asyncio
|
||||
import re
|
||||
|
||||
from prometheus_client import (
|
||||
@@ -13,7 +15,7 @@ from prometheus_client import (
|
||||
)
|
||||
|
||||
from django.conf import settings
|
||||
from awx.main.utils.redis import get_redis_client, get_redis_client_async
|
||||
|
||||
|
||||
BROADCAST_WEBSOCKET_REDIS_KEY_NAME = 'broadcast_websocket_stats'
|
||||
|
||||
@@ -64,8 +66,6 @@ class FixedSlidingWindow:
|
||||
|
||||
|
||||
class RelayWebsocketStatsManager:
|
||||
_redis_client = None # Cached Redis client for get_stats_sync()
|
||||
|
||||
def __init__(self, local_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
self._stats = dict()
|
||||
@@ -80,7 +80,7 @@ class RelayWebsocketStatsManager:
|
||||
|
||||
async def run_loop(self):
|
||||
try:
|
||||
redis_conn = get_redis_client_async()
|
||||
redis_conn = await redis.asyncio.Redis.from_url(settings.BROKER_URL)
|
||||
while True:
|
||||
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
|
||||
await redis_conn.set(self._redis_key, stats_data_str)
|
||||
@@ -103,10 +103,8 @@ class RelayWebsocketStatsManager:
|
||||
"""
|
||||
Stringified verion of all the stats
|
||||
"""
|
||||
# Reuse cached Redis client to avoid creating new connection pools on every call
|
||||
if cls._redis_client is None:
|
||||
cls._redis_client = get_redis_client()
|
||||
stats_str = cls._redis_client.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
|
||||
redis_conn = redis.Redis.from_url(settings.BROKER_URL)
|
||||
stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
|
||||
return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
|
||||
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ def config(since, **kwargs):
|
||||
return {
|
||||
'platform': {
|
||||
'system': platform.system(),
|
||||
'dist': (distro.name(), distro.version(), distro.codename()),
|
||||
'dist': distro.linux_distribution(),
|
||||
'release': platform.release(),
|
||||
'type': install_type,
|
||||
},
|
||||
@@ -487,7 +487,9 @@ def unified_jobs_table(since, full_path, until, **kwargs):
|
||||
OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}'))
|
||||
AND main_unifiedjob.launch_type != 'sync'
|
||||
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(), until.isoformat())
|
||||
'''.format(
|
||||
since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
|
||||
|
||||
@@ -548,7 +550,9 @@ def workflow_job_node_table(since, full_path, until, **kwargs):
|
||||
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
|
||||
WHERE (main_workflowjobnode.modified > '{}' AND main_workflowjobnode.modified <= '{}')
|
||||
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(), until.isoformat())
|
||||
'''.format(
|
||||
since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)
|
||||
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from ansible_base.lib.utils.db import advisory_lock
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
||||
from awx.main.utils.analytics_proxy import OIDCClient
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||
|
||||
__all__ = ['register', 'gather', 'ship']
|
||||
|
||||
@@ -186,7 +186,7 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
|
||||
if not (
|
||||
settings.AUTOMATION_ANALYTICS_URL
|
||||
and ((settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD) or (settings.SUBSCRIPTIONS_CLIENT_ID and settings.SUBSCRIPTIONS_CLIENT_SECRET))
|
||||
and ((settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD) or (settings.SUBSCRIPTIONS_USERNAME and settings.SUBSCRIPTIONS_PASSWORD))
|
||||
):
|
||||
logger.log(log_level, "Not gathering analytics, configuration is invalid. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
@@ -324,10 +324,10 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||
|
||||
if collection_type != 'dry-run':
|
||||
for fpath in tarfiles:
|
||||
if os.path.exists(fpath):
|
||||
os.remove(fpath)
|
||||
|
||||
if succeeded:
|
||||
for fpath in tarfiles:
|
||||
if os.path.exists(fpath):
|
||||
os.remove(fpath)
|
||||
with disable_activity_stream():
|
||||
if not settings.AUTOMATION_ANALYTICS_LAST_GATHER or until > settings.AUTOMATION_ANALYTICS_LAST_GATHER:
|
||||
# `AUTOMATION_ANALYTICS_LAST_GATHER` is set whether collection succeeds or fails;
|
||||
@@ -368,20 +368,8 @@ def ship(path):
|
||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||
return False
|
||||
|
||||
rh_id = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_secret = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
|
||||
if not (rh_id and rh_secret):
|
||||
rh_id = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
||||
rh_secret = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
||||
|
||||
if not rh_id:
|
||||
logger.error('Neither REDHAT_USERNAME nor SUBSCRIPTIONS_CLIENT_ID are set')
|
||||
return False
|
||||
|
||||
if not rh_secret:
|
||||
logger.error('Neither REDHAT_PASSWORD nor SUBSCRIPTIONS_CLIENT_SECRET are set')
|
||||
return False
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
@@ -389,13 +377,25 @@ def ship(path):
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
try:
|
||||
client = OIDCClient(rh_id, rh_secret)
|
||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||
except requests.RequestException:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_id, rh_secret), headers=s.headers, timeout=(31, 31))
|
||||
|
||||
if rh_user and rh_password:
|
||||
try:
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||
except requests.RequestException:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||
elif not rh_user or not rh_password:
|
||||
logger.info('REDHAT_USERNAME and REDHAT_PASSWORD are not set, using SUBSCRIPTIONS_USERNAME and SUBSCRIPTIONS_PASSWORD')
|
||||
rh_user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
rh_password = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if rh_user and rh_password:
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||
elif not rh_user:
|
||||
logger.error('REDHAT_USERNAME and SUBSCRIPTIONS_USERNAME are not set')
|
||||
return False
|
||||
elif not rh_password:
|
||||
logger.error('REDHAT_PASSWORD and SUBSCRIPTIONS_USERNAME are not set')
|
||||
return False
|
||||
# Accept 2XX status_codes
|
||||
if response.status_code >= 300:
|
||||
logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
import http.client
|
||||
import socket
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_dispatcherd_metrics(request):
|
||||
metrics_cfg = settings.METRICS_SUBSYSTEM_CONFIG.get('server', {}).get(settings.METRICS_SERVICE_DISPATCHER, {})
|
||||
host = metrics_cfg.get('host', 'localhost')
|
||||
port = metrics_cfg.get('port', 8015)
|
||||
metrics_filter = []
|
||||
if request is not None and hasattr(request, "query_params"):
|
||||
try:
|
||||
nodes_filter = request.query_params.getlist("node")
|
||||
except Exception:
|
||||
nodes_filter = []
|
||||
if nodes_filter and settings.CLUSTER_HOST_ID not in nodes_filter:
|
||||
return ''
|
||||
try:
|
||||
metrics_filter = request.query_params.getlist("metric")
|
||||
except Exception:
|
||||
metrics_filter = []
|
||||
if metrics_filter:
|
||||
# Right now we have no way of filtering the dispatcherd metrics
|
||||
# so just avoid getting in the way if another metric is filtered for
|
||||
return ''
|
||||
url = f"http://{host}:{port}/metrics"
|
||||
try:
|
||||
with urllib.request.urlopen(url, timeout=1.0) as response:
|
||||
payload = response.read()
|
||||
if not payload:
|
||||
return ''
|
||||
return payload.decode('utf-8')
|
||||
except (urllib.error.URLError, UnicodeError, socket.timeout, TimeoutError, http.client.HTTPException) as exc:
|
||||
logger.debug(f"Failed to collect dispatcherd metrics from {url}: {exc}")
|
||||
return ''
|
||||
@@ -128,7 +128,6 @@ def metrics():
|
||||
registry=REGISTRY,
|
||||
)
|
||||
|
||||
LICENSE_EXPIRY = Gauge('awx_license_expiry', 'Time before license expires', registry=REGISTRY)
|
||||
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license', registry=REGISTRY)
|
||||
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license', registry=REGISTRY)
|
||||
|
||||
@@ -149,7 +148,6 @@ def metrics():
|
||||
}
|
||||
)
|
||||
|
||||
LICENSE_EXPIRY.set(str(license_info.get('time_remaining', 0)))
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0)))
|
||||
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
|
||||
|
||||
|
||||
@@ -14,8 +14,6 @@ from rest_framework.request import Request
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.utils import is_testing
|
||||
from awx.main.utils.redis import get_redis_client
|
||||
from .dispatcherd_metrics import get_dispatcherd_metrics
|
||||
|
||||
root_key = settings.SUBSYSTEM_METRICS_REDIS_KEY_PREFIX
|
||||
logger = logging.getLogger('awx.main.analytics')
|
||||
@@ -46,12 +44,11 @@ class MetricsServer(MetricsServerSettings):
|
||||
|
||||
|
||||
class BaseM:
|
||||
def __init__(self, field, help_text, labels=None):
|
||||
def __init__(self, field, help_text):
|
||||
self.field = field
|
||||
self.help_text = help_text
|
||||
self.current_value = 0
|
||||
self.metric_has_changed = False
|
||||
self.labels = labels or {}
|
||||
|
||||
def reset_value(self, conn):
|
||||
conn.hset(root_key, self.field, 0)
|
||||
@@ -72,16 +69,12 @@ class BaseM:
|
||||
value = conn.hget(root_key, self.field)
|
||||
return self.decode_value(value)
|
||||
|
||||
def to_prometheus(self, instance_data, namespace=None):
|
||||
def to_prometheus(self, instance_data):
|
||||
output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} gauge\n"
|
||||
for instance in instance_data:
|
||||
if self.field in instance_data[instance]:
|
||||
# Build label string
|
||||
labels = f'node="{instance}"'
|
||||
if namespace:
|
||||
labels += f',subsystem="{namespace}"'
|
||||
# on upgrade, if there are stale instances, we can end up with issues where new metrics are not present
|
||||
output_text += f'{self.field}{{{labels}}} {instance_data[instance][self.field]}\n'
|
||||
output_text += f'{self.field}{{node="{instance}"}} {instance_data[instance][self.field]}\n'
|
||||
return output_text
|
||||
|
||||
|
||||
@@ -174,17 +167,14 @@ class HistogramM(BaseM):
|
||||
self.sum.store_value(conn)
|
||||
self.inf.store_value(conn)
|
||||
|
||||
def to_prometheus(self, instance_data, namespace=None):
|
||||
def to_prometheus(self, instance_data):
|
||||
output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} histogram\n"
|
||||
for instance in instance_data:
|
||||
# Build label string
|
||||
node_label = f'node="{instance}"'
|
||||
subsystem_label = f',subsystem="{namespace}"' if namespace else ''
|
||||
for i, b in enumerate(self.buckets):
|
||||
output_text += f'{self.field}_bucket{{le="{b}",{node_label}{subsystem_label}}} {sum(instance_data[instance][self.field]["counts"][0:i+1])}\n'
|
||||
output_text += f'{self.field}_bucket{{le="+Inf",{node_label}{subsystem_label}}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_count{{{node_label}{subsystem_label}}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_sum{{{node_label}{subsystem_label}}} {instance_data[instance][self.field]["sum"]}\n'
|
||||
output_text += f'{self.field}_bucket{{le="{b}",node="{instance}"}} {sum(instance_data[instance][self.field]["counts"][0:i+1])}\n'
|
||||
output_text += f'{self.field}_bucket{{le="+Inf",node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_count{{node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_sum{{node="{instance}"}} {instance_data[instance][self.field]["sum"]}\n'
|
||||
return output_text
|
||||
|
||||
|
||||
@@ -200,8 +190,8 @@ class Metrics(MetricsNamespace):
|
||||
def __init__(self, namespace, auto_pipe_execute=False, instance_name=None, metrics_have_changed=True, **kwargs):
|
||||
MetricsNamespace.__init__(self, namespace)
|
||||
|
||||
self.conn = get_redis_client()
|
||||
self.pipe = self.conn.pipeline()
|
||||
self.pipe = redis.Redis.from_url(settings.BROKER_URL).pipeline()
|
||||
self.conn = redis.Redis.from_url(settings.BROKER_URL)
|
||||
self.last_pipe_execute = time.time()
|
||||
# track if metrics have been modified since last saved to redis
|
||||
# start with True so that we get an initial save to redis
|
||||
@@ -283,22 +273,20 @@ class Metrics(MetricsNamespace):
|
||||
|
||||
def pipe_execute(self):
|
||||
if self.metrics_have_changed is True:
|
||||
duration_pipe_exec = time.perf_counter()
|
||||
duration_to_save = time.perf_counter()
|
||||
for m in self.METRICS:
|
||||
self.METRICS[m].store_value(self.pipe)
|
||||
self.pipe.execute()
|
||||
self.last_pipe_execute = time.time()
|
||||
self.metrics_have_changed = False
|
||||
duration_pipe_exec = time.perf_counter() - duration_pipe_exec
|
||||
|
||||
duration_send_metrics = time.perf_counter()
|
||||
self.send_metrics()
|
||||
duration_send_metrics = time.perf_counter() - duration_send_metrics
|
||||
|
||||
# Increment operational metrics
|
||||
self.METRICS['subsystem_metrics_pipe_execute_seconds'].inc(duration_pipe_exec)
|
||||
duration_to_save = time.perf_counter() - duration_to_save
|
||||
self.METRICS['subsystem_metrics_pipe_execute_seconds'].inc(duration_to_save)
|
||||
self.METRICS['subsystem_metrics_pipe_execute_calls'].inc(1)
|
||||
self.METRICS['subsystem_metrics_send_metrics_seconds'].inc(duration_send_metrics)
|
||||
|
||||
duration_to_save = time.perf_counter()
|
||||
self.send_metrics()
|
||||
duration_to_save = time.perf_counter() - duration_to_save
|
||||
self.METRICS['subsystem_metrics_send_metrics_seconds'].inc(duration_to_save)
|
||||
|
||||
def send_metrics(self):
|
||||
# more than one thread could be calling this at the same time, so should
|
||||
@@ -364,13 +352,7 @@ class Metrics(MetricsNamespace):
|
||||
if instance_data:
|
||||
for field in self.METRICS:
|
||||
if len(metrics_filter) == 0 or field in metrics_filter:
|
||||
# Add subsystem label only for operational metrics
|
||||
namespace = (
|
||||
self._namespace
|
||||
if field in ['subsystem_metrics_pipe_execute_seconds', 'subsystem_metrics_pipe_execute_calls', 'subsystem_metrics_send_metrics_seconds']
|
||||
else None
|
||||
)
|
||||
output_text += self.METRICS[field].to_prometheus(instance_data, namespace)
|
||||
output_text += self.METRICS[field].to_prometheus(instance_data)
|
||||
return output_text
|
||||
|
||||
|
||||
@@ -399,6 +381,11 @@ class DispatcherMetrics(Metrics):
|
||||
SetFloatM('workflow_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||
SetFloatM('workflow_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow tasks'),
|
||||
SetFloatM('workflow_manager_get_tasks_seconds', 'Time spent loading workflow tasks from db'),
|
||||
# dispatcher subsystem metrics
|
||||
SetIntM('dispatcher_pool_scale_up_events', 'Number of times local dispatcher scaled up a worker since startup'),
|
||||
SetIntM('dispatcher_pool_active_task_count', 'Number of active tasks in the worker pool when last task was submitted'),
|
||||
SetIntM('dispatcher_pool_max_worker_count', 'Highest number of workers in worker pool in last collection interval, about 20s'),
|
||||
SetFloatM('dispatcher_availability', 'Fraction of time (in last collection interval) dispatcher was able to receive messages'),
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -426,12 +413,8 @@ class CallbackReceiverMetrics(Metrics):
|
||||
|
||||
def metrics(request):
|
||||
output_text = ''
|
||||
output_text += DispatcherMetrics().generate_metrics(request)
|
||||
output_text += CallbackReceiverMetrics().generate_metrics(request)
|
||||
|
||||
dispatcherd_metrics = get_dispatcherd_metrics(request)
|
||||
if dispatcherd_metrics:
|
||||
output_text += dispatcherd_metrics
|
||||
for m in [DispatcherMetrics(), CallbackReceiverMetrics()]:
|
||||
output_text += m.generate_metrics(request)
|
||||
return output_text
|
||||
|
||||
|
||||
@@ -457,10 +440,7 @@ class CustomToPrometheusMetricsCollector(prometheus_client.registry.Collector):
|
||||
logger.debug(f"No metric data not found in redis for metric namespace '{self._metrics._namespace}'")
|
||||
return None
|
||||
|
||||
if not (host_metrics := instance_data.get(my_hostname)):
|
||||
logger.debug(f"Metric data for this node '{my_hostname}' not found in redis for metric namespace '{self._metrics._namespace}'")
|
||||
return None
|
||||
|
||||
host_metrics = instance_data.get(my_hostname)
|
||||
for _, metric in self._metrics.METRICS.items():
|
||||
entry = host_metrics.get(metric.field)
|
||||
if not entry:
|
||||
@@ -481,6 +461,13 @@ class CallbackReceiverMetricsServer(MetricsServer):
|
||||
super().__init__(settings.METRICS_SERVICE_CALLBACK_RECEIVER, registry)
|
||||
|
||||
|
||||
class DispatcherMetricsServer(MetricsServer):
|
||||
def __init__(self):
|
||||
registry = CollectorRegistry(auto_describe=True)
|
||||
registry.register(CustomToPrometheusMetricsCollector(DispatcherMetrics(metrics_have_changed=False)))
|
||||
super().__init__(settings.METRICS_SERVICE_DISPATCHER, registry)
|
||||
|
||||
|
||||
class WebsocketsMetricsServer(MetricsServer):
|
||||
def __init__(self):
|
||||
registry = CollectorRegistry(auto_describe=True)
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import os
|
||||
|
||||
from dispatcherd.config import setup as dispatcher_setup
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db import connection
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from awx.main.utils.common import bypass_in_test, load_all_entry_points_for
|
||||
from awx.main.utils.migration import is_database_synchronized
|
||||
@@ -79,28 +76,9 @@ class MainConfig(AppConfig):
|
||||
cls = entry_point.load()
|
||||
InventorySourceOptions.injectors[entry_point_name] = cls
|
||||
|
||||
def configure_dispatcherd(self):
|
||||
"""This implements the default configuration for dispatcherd
|
||||
|
||||
If running the tasking service like awx-manage dispatcherd,
|
||||
some additional config will be applied on top of this.
|
||||
This configuration provides the minimum such that code can submit
|
||||
tasks to pg_notify to run those tasks.
|
||||
"""
|
||||
from awx.main.dispatch.config import get_dispatcherd_config
|
||||
|
||||
if connection.vendor != 'postgresql':
|
||||
config_dict = get_dispatcherd_config(mock_publish=True)
|
||||
else:
|
||||
config_dict = get_dispatcherd_config()
|
||||
|
||||
dispatcher_setup(config_dict)
|
||||
|
||||
def ready(self):
|
||||
super().ready()
|
||||
|
||||
self.configure_dispatcherd()
|
||||
|
||||
"""
|
||||
Credential loading triggers database operations. There are cases we want to call
|
||||
awx-manage collectstatic without a database. All management commands invoke the ready() code
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user