mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
1 Commits
dependabot
...
thedoubl3j
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6389316206 |
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,6 +11,8 @@ inputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||
|
||||
2
.github/actions/run_awx_devel/action.yml
vendored
2
.github/actions/run_awx_devel/action.yml
vendored
@@ -36,7 +36,7 @@ runs:
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
shell: bash
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Install system deps
|
||||
shell: bash
|
||||
|
||||
12
.github/workflows/api_schema_check.yml
vendored
12
.github/workflows/api_schema_check.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
|
||||
- name: Add schema diff to job summary
|
||||
if: always()
|
||||
# show text and if for some reason, it can't be generated, state that it can't be.
|
||||
# show text and if for some reason, it can't be generated, state that it can't be.
|
||||
run: |
|
||||
echo "## API Schema Change Detection Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -55,18 +55,12 @@ jobs:
|
||||
if grep -q "^+" schema-diff.txt || grep -q "^-" schema-diff.txt; then
|
||||
echo "### Schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
# Truncate to first 1000 lines to stay under GitHub's 1MB summary limit
|
||||
TOTAL_LINES=$(wc -l < schema-diff.txt)
|
||||
if [ $TOTAL_LINES -gt 1000 ]; then
|
||||
echo "_Showing first 1000 of ${TOTAL_LINES} lines. See job logs or download artifact for full diff._" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo '```diff' >> $GITHUB_STEP_SUMMARY
|
||||
head -n 1000 schema-diff.txt >> $GITHUB_STEP_SUMMARY
|
||||
cat schema-diff.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### No schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
echo "### Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
59
.github/workflows/ci.yml
vendored
59
.github/workflows/ci.yml
vendored
@@ -32,6 +32,9 @@ jobs:
|
||||
- name: api-lint
|
||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||
coverage-upload-name: ""
|
||||
- name: api-swagger
|
||||
command: /start_tests.sh swagger
|
||||
coverage-upload-name: ""
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
coverage-upload-name: "awx-collection"
|
||||
@@ -54,17 +57,6 @@ jobs:
|
||||
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
|
||||
make docker-runner
|
||||
|
||||
- name: Inject PR number into coverage.xml
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& github.event_name == 'pull_request'
|
||||
&& steps.make-run.outputs.cov-report-files != ''
|
||||
run: |
|
||||
if [ -f "reports/coverage.xml" ]; then
|
||||
sed -i '2i<!-- PR ${{ github.event.pull_request.number }} -->' reports/coverage.xml
|
||||
echo "Injected PR number ${{ github.event.pull_request.number }} into coverage.xml"
|
||||
fi
|
||||
|
||||
- name: Upload test coverage to Codecov
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -104,14 +96,6 @@ jobs:
|
||||
}}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload test artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.tests.name }}-artifacts
|
||||
path: reports/coverage.xml
|
||||
retention-days: 5
|
||||
|
||||
- name: Upload awx jUnit test reports
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -142,7 +126,7 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.x'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -185,11 +169,11 @@ jobs:
|
||||
- name: Setup python, referencing action at awx relative path
|
||||
uses: ./awx/.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python -m pip install docker
|
||||
python3 -m pip install docker
|
||||
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
@@ -203,8 +187,8 @@ jobs:
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
python -m pip install -r molecule/requirements.txt
|
||||
python -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
python3 -m pip install -r molecule/requirements.txt
|
||||
python3 -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
@@ -291,11 +275,7 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
python-version: '3.x'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -306,9 +286,8 @@ jobs:
|
||||
|
||||
- name: Install dependencies for running tests
|
||||
run: |
|
||||
python -m pip install -e ./awxkit/
|
||||
python -m pip install -r awx_collection/requirements.txt
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
python3 -m pip install -e ./awxkit/
|
||||
python3 -m pip install -r awx_collection/requirements.txt
|
||||
|
||||
- name: Run integration tests
|
||||
id: make-run
|
||||
@@ -320,7 +299,6 @@ jobs:
|
||||
echo 'password = password' >> ~/.tower_cli.cfg
|
||||
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
|
||||
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
|
||||
export PYTHONPATH="$(python -c 'import site; print(":".join(site.getsitepackages()))')${PYTHONPATH:+:$PYTHONPATH}"
|
||||
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
|
||||
env:
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
@@ -375,14 +353,10 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Download coverage artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -397,12 +371,11 @@ jobs:
|
||||
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
|
||||
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
cd ~/.ansible/collections/ansible_collections/awx/awx
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage combine --requirements
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage html
|
||||
ansible-test coverage combine --requirements
|
||||
ansible-test coverage html
|
||||
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo >> $GITHUB_STEP_SUMMARY
|
||||
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
257
.github/workflows/sonarcloud_pr.yml
vendored
257
.github/workflows/sonarcloud_pr.yml
vendored
@@ -1,232 +1,85 @@
|
||||
# SonarCloud Analysis Workflow for awx
|
||||
#
|
||||
# This workflow runs SonarCloud analysis triggered by CI workflow completion.
|
||||
# It is split into two separate jobs for clarity and maintainability:
|
||||
#
|
||||
# FLOW: CI completes → workflow_run triggers this workflow → appropriate job runs
|
||||
#
|
||||
# JOB 1: sonar-pr-analysis (for PRs)
|
||||
# - Triggered by: workflow_run (CI on pull_request)
|
||||
# - Steps: Download coverage → Get PR info → Get changed files → Run SonarCloud PR analysis
|
||||
# - Scans: All changed files in the PR (Python, YAML, JSON, etc.)
|
||||
# - Quality gate: Focuses on new/changed code in PR only
|
||||
#
|
||||
# JOB 2: sonar-branch-analysis (for long-lived branches)
|
||||
# - Triggered by: workflow_run (CI on push to devel)
|
||||
# - Steps: Download coverage → Run SonarCloud branch analysis
|
||||
# - Scans: Full codebase
|
||||
# - Quality gate: Focuses on overall project health
|
||||
#
|
||||
# This ensures coverage data is always available from CI before analysis runs.
|
||||
#
|
||||
# What files are scanned:
|
||||
# - All files in the repository that SonarCloud can analyze
|
||||
# - Excludes: tests, scripts, dev environments, external collections (see sonar-project.properties)
|
||||
---
|
||||
name: SonarQube
|
||||
|
||||
|
||||
# With much help from:
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/30
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/32
|
||||
name: SonarCloud
|
||||
on:
|
||||
workflow_run: # This is triggered by CI being completed.
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
sonar-pr-analysis:
|
||||
name: SonarCloud PR Analysis
|
||||
sonarqube:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.repository == 'ansible/awx'
|
||||
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Download all individual coverage artifacts from CI workflow
|
||||
- name: Download coverage artifacts
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
fetch-depth: 0
|
||||
show-progress: false
|
||||
|
||||
# Extract PR metadata from workflow_run event
|
||||
- name: Set PR metadata and prepare files for analysis
|
||||
env:
|
||||
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
REPO_NAME: ${{ github.event.repository.full_name }}
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Download coverage report artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: coverage.xml
|
||||
path: reports/
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Download PR number artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number
|
||||
path: .
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Extract PR number
|
||||
run: |
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
cat pr-number.txt
|
||||
echo "PR_NUMBER=$(cat pr-number.txt)" >> $GITHUB_ENV
|
||||
|
||||
# Extract PR number from first coverage.xml file found
|
||||
first_coverage=$(find . -name "coverage.xml" -type f | head -1)
|
||||
if [ -f "$first_coverage" ]; then
|
||||
PR_NUMBER=$(grep -m 1 '<!-- PR' "$first_coverage" | awk '{print $3}' || echo "")
|
||||
else
|
||||
PR_NUMBER=""
|
||||
fi
|
||||
- name: Get PR info
|
||||
uses: octokit/request-action@v2.x
|
||||
id: pr_info
|
||||
with:
|
||||
route: GET /repos/{repo}/pulls/{number}
|
||||
repo: ${{ github.event.repository.full_name }}
|
||||
number: ${{ env.PR_NUMBER }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number from coverage.xml: #${PR_NUMBER:-<not found>}"
|
||||
|
||||
if [ -z "$PR_NUMBER" ]; then
|
||||
echo "##[error]❌ FATAL: PR number not found in coverage.xml"
|
||||
echo "##[error]This job requires a PR number to run PR analysis."
|
||||
echo "##[error]The ci workflow should have injected the PR number into coverage.xml."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get PR metadata from GitHub API
|
||||
PR_DATA=$(gh api "repos/$REPO_NAME/pulls/$PR_NUMBER")
|
||||
PR_BASE=$(echo "$PR_DATA" | jq -r '.base.ref')
|
||||
PR_HEAD=$(echo "$PR_DATA" | jq -r '.head.ref')
|
||||
|
||||
# Print summary
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number: #$PR_NUMBER"
|
||||
echo "├── Base Branch: $PR_BASE"
|
||||
echo "├── Head Branch: $PR_HEAD"
|
||||
echo "├── Repo: $REPO_NAME"
|
||||
|
||||
# Export to GitHub env for later steps
|
||||
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
|
||||
echo "PR_BASE=$PR_BASE" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=$PR_HEAD" >> $GITHUB_ENV
|
||||
echo "COMMIT_SHA=$COMMIT_SHA" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
# Get all changed files from PR (with error handling)
|
||||
files=""
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
if gh api repos/$REPO_NAME/pulls/$PR_NUMBER/files --jq '.[].filename' > /tmp/pr_files.txt 2>/tmp/pr_error.txt; then
|
||||
files=$(cat /tmp/pr_files.txt)
|
||||
else
|
||||
echo "├── Changed Files: ⚠️ Could not fetch (likely test repo or PR not found)"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
# No files = no inclusions filter = full scan
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "├── PR Number: ⚠️ Not available"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get file extensions and count for summary
|
||||
extensions=$(echo "$files" | sed 's/.*\.//' | sort | uniq | tr '\n' ',' | sed 's/,$//')
|
||||
file_count=$(echo "$files" | wc -l)
|
||||
echo "├── Changed Files: $file_count file(s) (.${extensions})"
|
||||
|
||||
# Check if coverage.xml exists and has content
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available (analysis will proceed without coverage)"
|
||||
fi
|
||||
|
||||
# Prepare file list for Sonar
|
||||
echo "All changed files in PR:"
|
||||
echo "$files"
|
||||
|
||||
# Convert to comma-separated list for sonar.inclusions
|
||||
if [ -n "$files" ]; then
|
||||
inclusions=$(echo "$files" | tr '\n' ',' | sed 's/,$//')
|
||||
echo "SONAR_INCLUSIONS=$inclusions" >> $GITHUB_ENV
|
||||
echo "└── Result: ✅ Will scan these files: $inclusions"
|
||||
else
|
||||
echo "└── Result: ✅ Running SonarCloud analysis"
|
||||
fi
|
||||
- name: Set PR info into env
|
||||
run: |
|
||||
echo "PR_BASE=${{ fromJson(steps.pr_info.outputs.data).base.ref }}" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=${{ fromJson(steps.pr_info.outputs.data).head.ref }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Add base branch
|
||||
if: env.PR_NUMBER != ''
|
||||
run: |
|
||||
gh pr checkout ${{ env.PR_NUMBER }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
- name: Extract and export repo owner/name
|
||||
run: |
|
||||
REPO_SLUG="${GITHUB_REPOSITORY}"
|
||||
IFS="/" read -r REPO_OWNER REPO_NAME <<< "$REPO_SLUG"
|
||||
echo "REPO_OWNER=$REPO_OWNER" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
- name: SonarQube scan
|
||||
uses: SonarSource/sonarqube-scan-action@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
SONAR_TOKEN: ${{ secrets[format('{0}', vars.SONAR_TOKEN_SECRET_NAME)] }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.scm.revision=${{ env.COMMIT_SHA }}
|
||||
-Dsonar.organization=${{ env.REPO_OWNER }}
|
||||
-Dsonar.projectKey=${{ env.REPO_OWNER }}_${{ env.REPO_NAME }}
|
||||
-Dsonar.pullrequest.key=${{ env.PR_NUMBER }}
|
||||
-Dsonar.pullrequest.branch=${{ env.PR_HEAD }}
|
||||
-Dsonar.pullrequest.base=${{ env.PR_BASE }}
|
||||
-Dsonar.python.coverage.reportPaths=${{ env.COVERAGE_PATHS }}
|
||||
${{ env.SONAR_INCLUSIONS && format('-Dsonar.inclusions={0}', env.SONAR_INCLUSIONS) || '' }}
|
||||
|
||||
sonar-branch-analysis:
|
||||
name: SonarCloud Branch Analysis
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'push' &&
|
||||
github.repository == 'ansible/awx'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Download all individual coverage artifacts from CI workflow (optional for branch pushes)
|
||||
- name: Download coverage artifacts
|
||||
continue-on-error: true
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
|
||||
- name: Print SonarCloud Analysis Summary
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
|
||||
echo "🔍 SonarCloud Analysis Summary"
|
||||
echo "=============================="
|
||||
echo "├── CI Event: ✅ Push (via workflow_run)"
|
||||
echo "├── Branch: $BRANCH_NAME"
|
||||
echo "├── Coverage Files: ${coverage_files:-none}"
|
||||
echo "├── Python Changes: ➖ N/A (Full codebase scan)"
|
||||
echo "└── Result: ✅ Proceed - \"Running SonarCloud analysis\""
|
||||
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
|
||||
-Dsonar.branch.name=${{ github.event.workflow_run.head_branch }}
|
||||
${{ env.COVERAGE_PATHS && format('-Dsonar.python.coverage.reportPaths={0}', env.COVERAGE_PATHS) || '' }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,7 +1,6 @@
|
||||
# Ignore generated schema
|
||||
swagger.json
|
||||
schema.json
|
||||
schema.yaml
|
||||
reference-schema.json
|
||||
|
||||
# Tags
|
||||
|
||||
30
Makefile
30
Makefile
@@ -27,8 +27,6 @@ TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
|
||||
PARALLEL_TESTS ?= -n auto
|
||||
# collection integration test directories (defaults to all)
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
# Python version for ansible-test (must be 3.11, 3.12, or 3.13)
|
||||
ANSIBLE_TEST_PYTHON_VERSION ?= 3.13
|
||||
# args for collection install
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
@@ -316,17 +314,20 @@ black: reports
|
||||
@echo "fi" >> .git/hooks/pre-commit
|
||||
@chmod +x .git/hooks/pre-commit
|
||||
|
||||
genschema: awx-link reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi-json --file schema.json
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ADDOPTS="--genschema --create-db "
|
||||
mv swagger.json schema.json
|
||||
|
||||
genschema-yaml: awx-link reports
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi --file schema.yaml
|
||||
(set -o pipefail && py.test $(COVERAGE_ARGS) $(PARALLEL_TESTS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
|
||||
check: black
|
||||
|
||||
@@ -430,8 +431,8 @@ test_collection_sanity:
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && \
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test integration --python $(ANSIBLE_TEST_PYTHON_VERSION) --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
ansible-test integration --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
|
||||
@@ -536,15 +537,14 @@ docker-compose-test: awx/projects docker-compose-sources
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-schema: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 make genschema
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
# diff exits with 1 when files differ - capture but don't fail
|
||||
-diff -u -b reference-schema.json schema.json
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
@@ -161,14 +161,16 @@ def get_view_description(view, html=False):
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
# drf-spectacular is configured via REST_FRAMEWORK['DEFAULT_SCHEMA_CLASS']
|
||||
# Just use the DRF default, which will pick up our CustomAutoSchema
|
||||
return views.APIView.schema
|
||||
if settings.DYNACONF.is_development_mode:
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
return schema_view
|
||||
else:
|
||||
return views.APIView.schema
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
# Schema is inherited from DRF's APIView, which uses DEFAULT_SCHEMA_CLASS
|
||||
# No need to override it here - drf-spectacular will handle it
|
||||
schema = get_default_schema()
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import warnings
|
||||
|
||||
from drf_spectacular.openapi import AutoSchema
|
||||
from drf_spectacular.views import (
|
||||
SpectacularAPIView,
|
||||
SpectacularSwaggerView,
|
||||
SpectacularRedocView,
|
||||
)
|
||||
from rest_framework.permissions import AllowAny
|
||||
from drf_yasg import openapi
|
||||
from drf_yasg.inspectors import SwaggerAutoSchema
|
||||
from drf_yasg.views import get_schema_view
|
||||
|
||||
|
||||
class CustomAutoSchema(AutoSchema):
|
||||
"""Custom AutoSchema to add swagger_topic to tags and handle deprecated endpoints."""
|
||||
class CustomSwaggerAutoSchema(SwaggerAutoSchema):
|
||||
"""Custom SwaggerAutoSchema to add swagger_topic to tags."""
|
||||
|
||||
def get_tags(self):
|
||||
def get_tags(self, operation_keys=None):
|
||||
tags = []
|
||||
try:
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
@@ -23,22 +21,19 @@ class CustomAutoSchema(AutoSchema):
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for this view.'.format(self.view.__class__.__name__)
|
||||
'generated for {}.'.format(self.view.__class__.__name__, operation_keys)
|
||||
)
|
||||
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
tags = super().get_tags() # Use default drf-spectacular behavior
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
@@ -46,11 +41,15 @@ class CustomAutoSchema(AutoSchema):
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
# Schema view (returns OpenAPI schema JSON/YAML)
|
||||
schema_view = SpectacularAPIView.as_view()
|
||||
|
||||
# Swagger UI view
|
||||
swagger_ui_view = SpectacularSwaggerView.as_view(url_name='api:schema-json')
|
||||
|
||||
# ReDoc UI view
|
||||
redoc_view = SpectacularRedocView.as_view(url_name='api:schema-json')
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title='AWX API',
|
||||
default_version='v2',
|
||||
description='AWX API Documentation',
|
||||
terms_of_service='https://www.google.com/policies/terms/',
|
||||
contact=openapi.Contact(email='contact@snippets.local'),
|
||||
license=openapi.License(name='Apache License'),
|
||||
),
|
||||
public=True,
|
||||
permission_classes=[AllowAny],
|
||||
)
|
||||
@@ -1,4 +1,4 @@
|
||||
---
|
||||
collections:
|
||||
- name: ansible.receptor
|
||||
version: 2.0.6
|
||||
version: 2.0.3
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.urls import include, re_path
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views.root import (
|
||||
ApiRootView,
|
||||
@@ -147,21 +148,21 @@ v2_urls = [
|
||||
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
# Import schema views (needed for both development and testing)
|
||||
from awx.api.schema import schema_view, swagger_ui_view, redoc_view
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
re_path(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
|
||||
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||
# Schema endpoints (available in all modes for API documentation and testing)
|
||||
re_path(r'^schema/$', schema_view, name='schema-json'),
|
||||
re_path(r'^swagger/$', swagger_ui_view, name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', redoc_view, name='schema-redoc'),
|
||||
]
|
||||
if MODE == 'development':
|
||||
# Only include these if we are in the development environment
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [
|
||||
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
]
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils.functional import Promise
|
||||
from django.utils.encoding import force_str
|
||||
|
||||
from drf_yasg.codecs import OpenAPICodecJson
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import drf_reverse
|
||||
@@ -42,10 +43,10 @@ class TestSwaggerGeneration:
|
||||
@pytest.fixture(autouse=True, scope='function')
|
||||
def _prepare(self, get, admin):
|
||||
if not self.__class__.JSON:
|
||||
# drf-spectacular returns OpenAPI schema directly from schema endpoint
|
||||
url = drf_reverse('api:schema-json') + '?format=json'
|
||||
url = drf_reverse('api:schema-swagger-ui') + '?format=openapi'
|
||||
response = get(url, user=admin)
|
||||
data = response.data
|
||||
codec = OpenAPICodecJson([])
|
||||
data = codec.generate_swagger_object(response.data)
|
||||
if response.has_header('X-Deprecated-Paths'):
|
||||
data['deprecated_paths'] = json.loads(response['X-Deprecated-Paths'])
|
||||
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
import warnings
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from awx.api.schema import CustomAutoSchema
|
||||
|
||||
|
||||
class TestCustomAutoSchema:
|
||||
"""Unit tests for CustomAutoSchema class."""
|
||||
|
||||
def test_get_tags_with_swagger_topic(self):
|
||||
"""Test get_tags returns swagger_topic when available."""
|
||||
view = Mock()
|
||||
view.swagger_topic = 'custom_topic'
|
||||
view.get_serializer = Mock(return_value=Mock())
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Custom_Topic']
|
||||
|
||||
def test_get_tags_with_serializer_meta_model(self):
|
||||
"""Test get_tags returns model verbose_name_plural from serializer."""
|
||||
# Create a mock model with verbose_name_plural
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'test models'
|
||||
|
||||
# Create a mock serializer with Meta.model
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta.model = mock_model
|
||||
|
||||
view = Mock(spec=[]) # View without swagger_topic
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Test Models']
|
||||
|
||||
def test_get_tags_with_view_model(self):
|
||||
"""Test get_tags returns model verbose_name_plural from view."""
|
||||
# Create a mock model with verbose_name_plural
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'view models'
|
||||
|
||||
view = Mock(spec=['model']) # View without swagger_topic or get_serializer
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['View Models']
|
||||
|
||||
def test_get_tags_without_get_serializer(self):
|
||||
"""Test get_tags when view doesn't have get_serializer method."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'test objects'
|
||||
|
||||
view = Mock(spec=['model'])
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Test Objects']
|
||||
|
||||
def test_get_tags_serializer_exception_with_warning(self):
|
||||
"""Test get_tags handles exception in get_serializer with warning."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'fallback models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model', '__class__'])
|
||||
view.__class__.__name__ = 'TestView'
|
||||
view.get_serializer = Mock(side_effect=Exception('Serializer error'))
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Check that a warning was raised
|
||||
assert len(w) == 1
|
||||
assert 'TestView.get_serializer() raised an exception' in str(w[0].message)
|
||||
|
||||
# Should still get tags from view.model
|
||||
assert tags == ['Fallback Models']
|
||||
|
||||
def test_get_tags_serializer_without_meta_model(self):
|
||||
"""Test get_tags when serializer doesn't have Meta.model."""
|
||||
mock_serializer = Mock(spec=[]) # No Meta attribute
|
||||
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.__class__.__name__ = 'NoMetaView'
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=['Default Tag']) as mock_super:
|
||||
tags = schema.get_tags()
|
||||
mock_super.assert_called_once()
|
||||
assert tags == ['Default Tag']
|
||||
|
||||
def test_get_tags_fallback_to_super(self):
|
||||
"""Test get_tags falls back to parent class method."""
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.get_serializer = Mock(return_value=Mock(spec=[]))
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=['Super Tag']) as mock_super:
|
||||
tags = schema.get_tags()
|
||||
mock_super.assert_called_once()
|
||||
assert tags == ['Super Tag']
|
||||
|
||||
def test_get_tags_empty_with_warning(self):
|
||||
"""Test get_tags returns 'api' fallback when no tags can be determined."""
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.__class__.__name__ = 'EmptyView'
|
||||
view.get_serializer = Mock(return_value=Mock(spec=[]))
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=[]):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Check that a warning was raised
|
||||
assert len(w) == 1
|
||||
assert 'Could not determine tags for EmptyView' in str(w[0].message)
|
||||
|
||||
# Should fallback to 'api'
|
||||
assert tags == ['api']
|
||||
|
||||
def test_get_tags_swagger_topic_title_case(self):
|
||||
"""Test that swagger_topic is properly title-cased."""
|
||||
view = Mock()
|
||||
view.swagger_topic = 'multi_word_topic'
|
||||
view.get_serializer = Mock(return_value=Mock())
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Multi_Word_Topic']
|
||||
|
||||
def test_is_deprecated_true(self):
|
||||
"""Test is_deprecated returns True when view has deprecated=True."""
|
||||
view = Mock()
|
||||
view.deprecated = True
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is True
|
||||
|
||||
def test_is_deprecated_false(self):
|
||||
"""Test is_deprecated returns False when view has deprecated=False."""
|
||||
view = Mock()
|
||||
view.deprecated = False
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is False
|
||||
|
||||
def test_is_deprecated_missing_attribute(self):
|
||||
"""Test is_deprecated returns False when view doesn't have deprecated attribute."""
|
||||
view = Mock(spec=[])
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is False
|
||||
|
||||
def test_get_tags_serializer_meta_without_model(self):
|
||||
"""Test get_tags when serializer has Meta but no model attribute."""
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta = Mock(spec=[]) # Meta exists but no model
|
||||
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'backup models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model'])
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
# Should fall back to view.model
|
||||
assert tags == ['Backup Models']
|
||||
|
||||
def test_get_tags_complex_scenario_exception_recovery(self):
|
||||
"""Test complex scenario where serializer fails but view.model exists."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'recovery models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model', '__class__'])
|
||||
view.__class__.__name__ = 'ComplexView'
|
||||
view.get_serializer = Mock(side_effect=ValueError('Invalid serializer'))
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Should have warned about the exception
|
||||
assert len(w) == 1
|
||||
assert 'ComplexView.get_serializer() raised an exception' in str(w[0].message)
|
||||
|
||||
# But still recovered and got tags from view.model
|
||||
assert tags == ['Recovery Models']
|
||||
|
||||
def test_get_tags_priority_order(self):
|
||||
"""Test that get_tags respects priority: swagger_topic > serializer.Meta.model > view.model."""
|
||||
# Set up a view with all three options
|
||||
mock_model_view = Mock()
|
||||
mock_model_view._meta.verbose_name_plural = 'view models'
|
||||
|
||||
mock_model_serializer = Mock()
|
||||
mock_model_serializer._meta.verbose_name_plural = 'serializer models'
|
||||
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta.model = mock_model_serializer
|
||||
|
||||
view = Mock()
|
||||
view.swagger_topic = 'priority_topic'
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
view.model = mock_model_view
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
# swagger_topic should take priority
|
||||
assert tags == ['Priority_Topic']
|
||||
@@ -375,13 +375,15 @@ REST_FRAMEWORK = {
|
||||
'VIEW_DESCRIPTION_FUNCTION': 'awx.api.generics.get_view_description',
|
||||
'NON_FIELD_ERRORS_KEY': '__all__',
|
||||
'DEFAULT_VERSION': 'v2',
|
||||
# For OpenAPI schema generation with drf-spectacular
|
||||
# For swagger schema generation
|
||||
# see https://github.com/encode/django-rest-framework/pull/6532
|
||||
'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema',
|
||||
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.AutoSchema',
|
||||
# 'URL_FORMAT_OVERRIDE': None,
|
||||
}
|
||||
|
||||
# SWAGGER_SETTINGS removed - migrated to drf-spectacular (see SPECTACULAR_SETTINGS below)
|
||||
SWAGGER_SETTINGS = {
|
||||
'DEFAULT_AUTO_SCHEMA_CLASS': 'awx.api.swagger.CustomSwaggerAutoSchema',
|
||||
}
|
||||
|
||||
AUTHENTICATION_BACKENDS = ('awx.main.backends.AWXModelBackend',)
|
||||
|
||||
@@ -1034,44 +1036,7 @@ ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
ANSIBLE_BASE_PERMISSION_MODEL = 'main.Permission'
|
||||
|
||||
# Defaults to be overridden by DAB
|
||||
SPECTACULAR_SETTINGS = {
|
||||
'TITLE': 'AWX API',
|
||||
'DESCRIPTION': 'AWX API Documentation',
|
||||
'VERSION': 'v2',
|
||||
'OAS_VERSION': '3.0.3', # Set OpenAPI Specification version to 3.0.3
|
||||
'SERVE_INCLUDE_SCHEMA': False,
|
||||
'SCHEMA_PATH_PREFIX': r'/api/v[0-9]',
|
||||
'DEFAULT_GENERATOR_CLASS': 'drf_spectacular.generators.SchemaGenerator',
|
||||
'SCHEMA_COERCE_PATH_PK_SUFFIX': True,
|
||||
'CONTACT': {'email': 'contact@snippets.local'},
|
||||
'LICENSE': {'name': 'Apache License'},
|
||||
'TERMS_OF_SERVICE': 'https://www.google.com/policies/terms/',
|
||||
# Use our custom schema class that handles swagger_topic and deprecated views
|
||||
'DEFAULT_SCHEMA_CLASS': 'awx.api.schema.CustomAutoSchema',
|
||||
'COMPONENT_SPLIT_REQUEST': True,
|
||||
'SWAGGER_UI_SETTINGS': {
|
||||
'deepLinking': True,
|
||||
'persistAuthorization': True,
|
||||
'displayOperationId': True,
|
||||
},
|
||||
# Resolve enum naming collisions with meaningful names
|
||||
'ENUM_NAME_OVERRIDES': {
|
||||
# Status field collisions
|
||||
'Status4e1Enum': 'UnifiedJobStatusEnum',
|
||||
'Status876Enum': 'JobStatusEnum',
|
||||
# Job type field collisions
|
||||
'JobType8b8Enum': 'JobTemplateJobTypeEnum',
|
||||
'JobType95bEnum': 'AdHocCommandJobTypeEnum',
|
||||
'JobType963Enum': 'ProjectUpdateJobTypeEnum',
|
||||
# Verbosity field collisions
|
||||
'Verbosity481Enum': 'JobVerbosityEnum',
|
||||
'Verbosity8cfEnum': 'InventoryUpdateVerbosityEnum',
|
||||
# Event field collision
|
||||
'Event4d3Enum': 'JobEventEnum',
|
||||
# Kind field collision
|
||||
'Kind362Enum': 'InventoryKindEnum',
|
||||
},
|
||||
}
|
||||
SPECTACULAR_SETTINGS = {}
|
||||
OAUTH2_PROVIDER = {}
|
||||
|
||||
# Add a postfix to the API URL patterns
|
||||
|
||||
@@ -41,14 +41,11 @@ PENDO_TRACKING_STATE = "off"
|
||||
INSIGHTS_TRACKING_STATE = False
|
||||
|
||||
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
||||
INSTALLED_APPS = "@merge drf_spectacular,debug_toolbar"
|
||||
INSTALLED_APPS = "@merge drf_yasg,debug_toolbar"
|
||||
MIDDLEWARE = "@insert 0 debug_toolbar.middleware.DebugToolbarMiddleware"
|
||||
|
||||
DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
|
||||
# drf-spectacular settings for API schema generation
|
||||
# SPECTACULAR_SETTINGS moved to defaults.py so it's available in all environments
|
||||
|
||||
# Configure a default UUID for development only.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
Copyright © 2011-present, Encode OSS Ltd.
|
||||
Copyright © 2019-2021, T. Franzel <tfranzel@gmail.com>, Cashlink Technologies GmbH.
|
||||
Copyright © 2021-present, T. Franzel <tfranzel@gmail.com>.
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -1,23 +0,0 @@
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
10
pytest.ini
10
pytest.ini
@@ -23,8 +23,7 @@ filterwarnings =
|
||||
|
||||
# NOTE: the following are present using python 3.11
|
||||
# FIXME: Set `USE_TZ` to `True`.
|
||||
# Note: RemovedInDjango50Warning may not exist in newer Django versions
|
||||
ignore:The default value of USE_TZ will change from False to True in Django 5.0. Set USE_TZ to False in your project settings if you want to keep the current default behavior.
|
||||
once:The default value of USE_TZ will change from False to True in Django 5.0. Set USE_TZ to False in your project settings if you want to keep the current default behavior.:django.utils.deprecation.RemovedInDjango50Warning:django.conf
|
||||
|
||||
# FIXME: Delete this entry once `pyparsing` is updated.
|
||||
once:module 'sre_constants' is deprecated:DeprecationWarning:_pytest.assertion.rewrite
|
||||
@@ -47,12 +46,11 @@ filterwarnings =
|
||||
once:DateTimeField User.date_joined received a naive datetime .2020-01-01 00.00.00. while time zone support is active.:RuntimeWarning:django.db.models.fields
|
||||
|
||||
# FIXME: Delete this entry once the deprecation is acted upon.
|
||||
# Note: RemovedInDjango51Warning may not exist in newer Django versions
|
||||
ignore:'index_together' is deprecated. Use 'Meta.indexes' in 'main.\w+' instead.
|
||||
once:'index_together' is deprecated. Use 'Meta.indexes' in 'main.\w+' instead.:django.utils.deprecation.RemovedInDjango51Warning:django.db.models.options
|
||||
|
||||
# FIXME: Update `awx.main.migrations._dab_rbac` and delete this entry.
|
||||
# Note: RemovedInDjango50Warning may not exist in newer Django versions
|
||||
ignore:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.
|
||||
# once:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.:django.utils.deprecation.RemovedInDjango50Warning:django.db.models.query
|
||||
once:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.:django.utils.deprecation.RemovedInDjango50Warning:awx.main.migrations._dab_rbac
|
||||
|
||||
# FIXME: Delete this entry once the **broken** always-true assertions in the
|
||||
# FIXME: following tests are fixed:
|
||||
|
||||
@@ -22,7 +22,6 @@ django-polymorphic
|
||||
django-solo
|
||||
djangorestframework==3.15.2 # upgrading to 3.16+ throws NOT_REQUIRED_DEFAULT error on required fields in serializer that have no default
|
||||
djangorestframework-yaml
|
||||
drf-spectacular>=0.27.0
|
||||
dynaconf
|
||||
filelock
|
||||
GitPython>=3.1.37 # CVE-2023-41040
|
||||
|
||||
@@ -8,7 +8,7 @@ aiohappyeyeballs==2.6.1
|
||||
# via aiohttp
|
||||
aiohttp[speedups]==3.13.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# aiohttp-retry
|
||||
# opa-python-client
|
||||
# twilio
|
||||
@@ -17,18 +17,21 @@ aiohttp-retry==2.9.1
|
||||
aiosignal==1.4.0
|
||||
# via aiohttp
|
||||
ansi2html==1.9.2
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# git+https://github.com/ansible/ansible-runner.git@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
asciichartpy==1.5.25
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
asgiref==3.10.0
|
||||
# via
|
||||
# channels
|
||||
# channels-redis
|
||||
# daphne
|
||||
# django
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
asn1==3.1.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
attrs==25.4.0
|
||||
# via
|
||||
# aiohttp
|
||||
@@ -42,6 +45,12 @@ autocommand==2.2.2
|
||||
# via jaraco-text
|
||||
automat==25.4.16
|
||||
# via twisted
|
||||
# awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
awx-plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# awx-plugins-core
|
||||
azure-core==1.35.1
|
||||
# via
|
||||
# azure-identity
|
||||
@@ -50,28 +59,31 @@ azure-core==1.35.1
|
||||
# azure-keyvault-secrets
|
||||
# msrest
|
||||
azure-identity==1.25.1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
azure-keyvault==4.2.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
azure-keyvault-certificates==4.10.0
|
||||
# via azure-keyvault
|
||||
azure-keyvault-keys==4.11.0
|
||||
# via azure-keyvault
|
||||
azure-keyvault-secrets==4.10.0
|
||||
# via azure-keyvault
|
||||
backports-tarfile==1.2.0
|
||||
# via jaraco-context
|
||||
boto3==1.40.46
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
botocore==1.40.46
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# boto3
|
||||
# s3transfer
|
||||
brotli==1.1.0
|
||||
# via aiohttp
|
||||
cachetools==6.2.0
|
||||
# via google-auth
|
||||
certifi==2025.10.5
|
||||
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# kubernetes
|
||||
# msrest
|
||||
# requests
|
||||
@@ -82,10 +94,10 @@ cffi==2.0.0
|
||||
# pynacl
|
||||
channels==4.3.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
channels-redis==4.3.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
charset-normalizer==3.4.3
|
||||
# via requests
|
||||
click==8.1.8
|
||||
@@ -94,27 +106,29 @@ constantly==23.10.4
|
||||
# via twisted
|
||||
cryptography==46.0.2
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# adal
|
||||
# autobahn
|
||||
# azure-identity
|
||||
# azure-keyvault-keys
|
||||
# django-ansible-base
|
||||
# msal
|
||||
# pyjwt
|
||||
# pyopenssl
|
||||
# service-identity
|
||||
cython==3.1.3
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
daphne==4.2.1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
dispatcherd==2025.5.21
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
distro==1.9.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django==4.2.21
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
# django-crum
|
||||
# django-extensions
|
||||
@@ -123,37 +137,42 @@ django==4.2.21
|
||||
# django-polymorphic
|
||||
# django-solo
|
||||
# djangorestframework
|
||||
# drf-spectacular
|
||||
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
django-cors-headers==4.9.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-crum==0.7.9
|
||||
# via -r requirements.in
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-extensions==4.1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-flags==5.0.14
|
||||
# via -r requirements.in
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-guid==3.5.2
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-polymorphic==4.1.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-solo==2.4.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
djangorestframework==3.15.2
|
||||
# via
|
||||
# -r requirements.in
|
||||
# drf-spectacular
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
djangorestframework-yaml==2.0.0
|
||||
# via -r requirements.in
|
||||
drf-spectacular==0.29.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
durationpy==0.10
|
||||
# via kubernetes
|
||||
dynaconf==3.2.11
|
||||
# via -r requirements.in
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
enum-compat==0.0.3
|
||||
# via asn1
|
||||
filelock==3.19.1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
frozenlist==1.8.0
|
||||
# via
|
||||
# aiohttp
|
||||
@@ -161,7 +180,7 @@ frozenlist==1.8.0
|
||||
gitdb==4.0.12
|
||||
# via gitpython
|
||||
gitpython==3.1.45
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
google-auth==2.41.1
|
||||
# via kubernetes
|
||||
googleapis-common-protos==1.70.0
|
||||
@@ -170,7 +189,7 @@ googleapis-common-protos==1.70.0
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
grpcio==1.75.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
hiredis==3.2.1
|
||||
# via redis
|
||||
@@ -180,19 +199,21 @@ hyperlink==21.0.0
|
||||
# twisted
|
||||
idna==3.10
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# hyperlink
|
||||
# requests
|
||||
# twisted
|
||||
# yarl
|
||||
importlib-metadata==8.7.0
|
||||
# via opentelemetry-api
|
||||
importlib-resources==6.5.2
|
||||
# via irc
|
||||
incremental==24.7.2
|
||||
# via twisted
|
||||
inflection==0.5.1
|
||||
# via drf-spectacular
|
||||
# via django-ansible-base
|
||||
irc==20.5.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
isodate==0.7.2
|
||||
# via
|
||||
# azure-keyvault-certificates
|
||||
@@ -217,19 +238,17 @@ jaraco-text==4.0.0
|
||||
# irc
|
||||
# jaraco-collections
|
||||
jinja2==3.1.6
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
jq==1.10.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
json-log-formatter==1.1.1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema==4.25.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# drf-spectacular
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema-specifications==2025.9.1
|
||||
# via jsonschema
|
||||
kubernetes==34.1.0
|
||||
@@ -237,11 +256,11 @@ kubernetes==34.1.0
|
||||
lockfile==0.12.2
|
||||
# via python-daemon
|
||||
markdown==3.9
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
markupsafe==3.0.3
|
||||
# via jinja2
|
||||
maturin==1.9.6
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
more-itertools==10.8.0
|
||||
# via
|
||||
# irc
|
||||
@@ -256,12 +275,12 @@ msal-extensions==1.3.1
|
||||
# via azure-identity
|
||||
msgpack==1.1.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
msrest==0.7.1
|
||||
# via msrestazure
|
||||
msrestazure==0.6.4.post1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
multidict==6.7.0
|
||||
# via
|
||||
# aiohttp
|
||||
@@ -269,12 +288,12 @@ multidict==6.7.0
|
||||
oauthlib==3.3.1
|
||||
# via requests-oauthlib
|
||||
opa-python-client==2.0.2
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
openshift==0.13.2
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-api==1.37.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-instrumentation
|
||||
@@ -282,7 +301,7 @@ opentelemetry-api==1.37.0
|
||||
# opentelemetry-sdk
|
||||
# opentelemetry-semantic-conventions
|
||||
opentelemetry-exporter-otlp==1.37.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-exporter-otlp-proto-common==1.37.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -294,7 +313,7 @@ opentelemetry-exporter-otlp-proto-http==1.37.0
|
||||
opentelemetry-instrumentation==0.58b0
|
||||
# via opentelemetry-instrumentation-logging
|
||||
opentelemetry-instrumentation-logging==0.58b0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-proto==1.37.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-common
|
||||
@@ -302,7 +321,7 @@ opentelemetry-proto==1.37.0
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-sdk==1.37.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-semantic-conventions==0.58b0
|
||||
@@ -311,30 +330,33 @@ opentelemetry-semantic-conventions==0.58b0
|
||||
# opentelemetry-sdk
|
||||
packaging==25.0
|
||||
# via
|
||||
# ansible-runner
|
||||
# django-guid
|
||||
# opentelemetry-instrumentation
|
||||
# setuptools-scm
|
||||
pbr==7.0.1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pexpect==4.9.0
|
||||
# via -r requirements.in
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
pkgconfig==1.5.5
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
prometheus-client==0.23.1
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
propcache==0.4.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
protobuf==6.33.0
|
||||
protobuf==6.32.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# googleapis-common-protos
|
||||
# opentelemetry-proto
|
||||
psutil==7.1.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psycopg==3.2.10
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
pyasn1==0.6.1
|
||||
@@ -351,12 +373,15 @@ pycares==4.11.0
|
||||
pycparser==2.23
|
||||
# via cffi
|
||||
pygerduty==0.38.3
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pygithub==2.8.1
|
||||
# via -r requirements.in
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# awx-plugins-core
|
||||
pyjwt[crypto]==2.10.1
|
||||
# via
|
||||
# adal
|
||||
# django-ansible-base
|
||||
# msal
|
||||
# pygithub
|
||||
# twilio
|
||||
@@ -364,12 +389,14 @@ pynacl==1.6.0
|
||||
# via pygithub
|
||||
pyopenssl==25.3.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# twisted
|
||||
pyparsing==2.4.7
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
python-daemon==3.1.2
|
||||
# via -r requirements.in
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
python-dateutil==2.9.0.post0
|
||||
# via
|
||||
# adal
|
||||
@@ -378,28 +405,28 @@ python-dateutil==2.9.0.post0
|
||||
# receptorctl
|
||||
# tempora
|
||||
python-dsv-sdk==1.0.4
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
python-string-utils==1.0.0
|
||||
# via openshift
|
||||
python-tss-sdk==2.0.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pytz==2025.2
|
||||
# via irc
|
||||
pyyaml==6.0.3
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
# dispatcherd
|
||||
# djangorestframework-yaml
|
||||
# drf-spectacular
|
||||
# kubernetes
|
||||
# receptorctl
|
||||
pyzstd==0.18.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
receptorctl==1.6.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
redis[hiredis]==6.4.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
referencing==0.36.2
|
||||
# via
|
||||
@@ -407,9 +434,10 @@ referencing==0.36.2
|
||||
# jsonschema-specifications
|
||||
requests==2.32.5
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# adal
|
||||
# azure-core
|
||||
# django-ansible-base
|
||||
# kubernetes
|
||||
# msal
|
||||
# msrest
|
||||
@@ -437,9 +465,9 @@ semantic-version==2.10.0
|
||||
service-identity==24.2.0
|
||||
# via twisted
|
||||
setuptools-rust==1.10.2
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
setuptools-scm[toml]==8.1.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
six==1.17.0
|
||||
# via
|
||||
# azure-core
|
||||
@@ -449,27 +477,29 @@ six==1.17.0
|
||||
# pygerduty
|
||||
# python-dateutil
|
||||
slack-sdk==3.37.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
smmap==5.0.2
|
||||
# via gitdb
|
||||
sqlparse==0.5.3
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django
|
||||
# django-ansible-base
|
||||
tempora==5.8.1
|
||||
# via
|
||||
# irc
|
||||
# jaraco-logging
|
||||
twilio==9.8.3
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
twisted[tls]==25.5.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# daphne
|
||||
txaio==25.9.2
|
||||
# via autobahn
|
||||
typing-extensions==4.15.0
|
||||
# via
|
||||
# aiosignal
|
||||
# azure-core
|
||||
# azure-identity
|
||||
# azure-keyvault-certificates
|
||||
@@ -481,25 +511,28 @@ typing-extensions==4.15.0
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-sdk
|
||||
# opentelemetry-semantic-conventions
|
||||
# psycopg
|
||||
# pygithub
|
||||
# pyopenssl
|
||||
# pyzstd
|
||||
# referencing
|
||||
# twisted
|
||||
uritemplate==4.2.0
|
||||
# via drf-spectacular
|
||||
urllib3==2.3.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# botocore
|
||||
# django-ansible-base
|
||||
# kubernetes
|
||||
# pygithub
|
||||
# requests
|
||||
uwsgi==2.0.30
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
uwsgitop==0.12
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
websocket-client==1.8.0
|
||||
# via kubernetes
|
||||
wheel==0.42.0
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
wrapt==1.17.3
|
||||
# via opentelemetry-instrumentation
|
||||
yarl==1.22.0
|
||||
@@ -508,13 +541,15 @@ zipp==3.23.0
|
||||
# via importlib-metadata
|
||||
zope-interface==8.0.1
|
||||
# via twisted
|
||||
zstandard==0.25.0
|
||||
# via aiohttp
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
pip==21.2.4
|
||||
# via -r requirements.in
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
setuptools==80.9.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# asciichartpy
|
||||
# autobahn
|
||||
# incremental
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
build
|
||||
django-debug-toolbar==3.2.4
|
||||
django-test-migrations
|
||||
drf-spectacular>=0.27.0 # Modern OpenAPI 3.0 schema generator
|
||||
drf-yasg<1.21.10 # introduces new DeprecationWarning that is turned into error
|
||||
# pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed
|
||||
ipython>=7.31.1 # https://github.com/ansible/awx/security/dependabot/30
|
||||
unittest2
|
||||
|
||||
Reference in New Issue
Block a user