mirror of
https://github.com/ansible/awx.git
synced 2026-02-04 19:18:13 -03:30
Compare commits
17 Commits
thedoubl3j
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5264e4a12b | ||
|
|
2b2f2b73ac | ||
|
|
e03beb4d54 | ||
|
|
4db52e074b | ||
|
|
4e1911f7c4 | ||
|
|
b02117979d | ||
|
|
2fa2cd8beb | ||
|
|
f81859510c | ||
|
|
335a4bbbc6 | ||
|
|
5ea2fe65b0 | ||
|
|
f3f10ae9ce | ||
|
|
5be4462395 | ||
|
|
d1d3a3471b | ||
|
|
a53fdaddae | ||
|
|
f72591195e | ||
|
|
0d9483b54c | ||
|
|
f3fd9945d6 |
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,8 +11,6 @@ inputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||
|
||||
2
.github/actions/run_awx_devel/action.yml
vendored
2
.github/actions/run_awx_devel/action.yml
vendored
@@ -36,7 +36,7 @@ runs:
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
shell: bash
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Install system deps
|
||||
shell: bash
|
||||
|
||||
72
.github/workflows/api_schema_check.yml
vendored
Normal file
72
.github/workflows/api_schema_check.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
---
|
||||
name: API Schema Change Detection
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
|
||||
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||
UPSTREAM_REPOSITORY_ID: 91594105
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
- stable-**
|
||||
|
||||
jobs:
|
||||
api-schema-detection:
|
||||
name: Detect API Schema Changes
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build awx_devel image for schema check
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Detect API schema changes
|
||||
id: schema-check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS' \
|
||||
AWX_DOCKER_CMD='make detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}' \
|
||||
make docker-runner 2>&1 | tee schema-diff.txt
|
||||
exit ${PIPESTATUS[0]}
|
||||
|
||||
- name: Add schema diff to job summary
|
||||
if: always()
|
||||
# show text and if for some reason, it can't be generated, state that it can't be.
|
||||
run: |
|
||||
echo "## API Schema Change Detection Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
if [ -f schema-diff.txt ]; then
|
||||
if grep -q "^+" schema-diff.txt || grep -q "^-" schema-diff.txt; then
|
||||
echo "### Schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
# Truncate to first 1000 lines to stay under GitHub's 1MB summary limit
|
||||
TOTAL_LINES=$(wc -l < schema-diff.txt)
|
||||
if [ $TOTAL_LINES -gt 1000 ]; then
|
||||
echo "_Showing first 1000 of ${TOTAL_LINES} lines. See job logs or download artifact for full diff._" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo '```diff' >> $GITHUB_STEP_SUMMARY
|
||||
head -n 1000 schema-diff.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### No schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
echo "### Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
65
.github/workflows/ci.yml
vendored
65
.github/workflows/ci.yml
vendored
@@ -32,18 +32,9 @@ jobs:
|
||||
- name: api-lint
|
||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||
coverage-upload-name: ""
|
||||
- name: api-swagger
|
||||
command: /start_tests.sh swagger
|
||||
coverage-upload-name: ""
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
coverage-upload-name: "awx-collection"
|
||||
- name: api-schema
|
||||
command: >-
|
||||
/start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{
|
||||
github.event.pull_request.base.ref || github.ref_name
|
||||
}}
|
||||
coverage-upload-name: ""
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -63,6 +54,17 @@ jobs:
|
||||
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
|
||||
make docker-runner
|
||||
|
||||
- name: Inject PR number into coverage.xml
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& github.event_name == 'pull_request'
|
||||
&& steps.make-run.outputs.cov-report-files != ''
|
||||
run: |
|
||||
if [ -f "reports/coverage.xml" ]; then
|
||||
sed -i '2i<!-- PR ${{ github.event.pull_request.number }} -->' reports/coverage.xml
|
||||
echo "Injected PR number ${{ github.event.pull_request.number }} into coverage.xml"
|
||||
fi
|
||||
|
||||
- name: Upload test coverage to Codecov
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -102,6 +104,14 @@ jobs:
|
||||
}}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload test artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.tests.name }}-artifacts
|
||||
path: reports/coverage.xml
|
||||
retention-days: 5
|
||||
|
||||
- name: Upload awx jUnit test reports
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -132,7 +142,7 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -175,11 +185,11 @@ jobs:
|
||||
- name: Setup python, referencing action at awx relative path
|
||||
uses: ./awx/.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python3 -m pip install docker
|
||||
python -m pip install docker
|
||||
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
@@ -193,8 +203,8 @@ jobs:
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
python3 -m pip install -r molecule/requirements.txt
|
||||
python3 -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
python -m pip install -r molecule/requirements.txt
|
||||
python -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
@@ -281,7 +291,11 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -292,8 +306,9 @@ jobs:
|
||||
|
||||
- name: Install dependencies for running tests
|
||||
run: |
|
||||
python3 -m pip install -e ./awxkit/
|
||||
python3 -m pip install -r awx_collection/requirements.txt
|
||||
python -m pip install -e ./awxkit/
|
||||
python -m pip install -r awx_collection/requirements.txt
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
|
||||
- name: Run integration tests
|
||||
id: make-run
|
||||
@@ -305,6 +320,7 @@ jobs:
|
||||
echo 'password = password' >> ~/.tower_cli.cfg
|
||||
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
|
||||
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
|
||||
export PYTHONPATH="$(python -c 'import site; print(":".join(site.getsitepackages()))')${PYTHONPATH:+:$PYTHONPATH}"
|
||||
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
|
||||
env:
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
@@ -359,10 +375,14 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Download coverage artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -377,11 +397,12 @@ jobs:
|
||||
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
|
||||
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
cd ~/.ansible/collections/ansible_collections/awx/awx
|
||||
ansible-test coverage combine --requirements
|
||||
ansible-test coverage html
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage combine --requirements
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage html
|
||||
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo >> $GITHUB_STEP_SUMMARY
|
||||
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
271
.github/workflows/sonarcloud_pr.yml
vendored
271
.github/workflows/sonarcloud_pr.yml
vendored
@@ -1,85 +1,248 @@
|
||||
---
|
||||
name: SonarQube
|
||||
# SonarCloud Analysis Workflow for awx
|
||||
#
|
||||
# This workflow runs SonarCloud analysis triggered by CI workflow completion.
|
||||
# It is split into two separate jobs for clarity and maintainability:
|
||||
#
|
||||
# FLOW: CI completes → workflow_run triggers this workflow → appropriate job runs
|
||||
#
|
||||
# JOB 1: sonar-pr-analysis (for PRs)
|
||||
# - Triggered by: workflow_run (CI on pull_request)
|
||||
# - Steps: Download coverage → Get PR info → Get changed files → Run SonarCloud PR analysis
|
||||
# - Scans: All changed files in the PR (Python, YAML, JSON, etc.)
|
||||
# - Quality gate: Focuses on new/changed code in PR only
|
||||
#
|
||||
# JOB 2: sonar-branch-analysis (for long-lived branches)
|
||||
# - Triggered by: workflow_run (CI on push to devel)
|
||||
# - Steps: Download coverage → Run SonarCloud branch analysis
|
||||
# - Scans: Full codebase
|
||||
# - Quality gate: Focuses on overall project health
|
||||
#
|
||||
# This ensures coverage data is always available from CI before analysis runs.
|
||||
#
|
||||
# What files are scanned:
|
||||
# - All files in the repository that SonarCloud can analyze
|
||||
# - Excludes: tests, scripts, dev environments, external collections (see sonar-project.properties)
|
||||
|
||||
|
||||
# With much help from:
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/30
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/32
|
||||
name: SonarCloud
|
||||
on:
|
||||
workflow_run:
|
||||
workflow_run: # This is triggered by CI being completed.
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
sonarqube:
|
||||
sonar-pr-analysis:
|
||||
name: SonarCloud PR Analysis
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.repository == 'ansible/awx'
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
show-progress: false
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Download coverage report artifact
|
||||
uses: actions/download-artifact@v4
|
||||
# Download all individual coverage artifacts from CI workflow
|
||||
- name: Download coverage artifacts
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
with:
|
||||
name: coverage-report
|
||||
path: reports/
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
|
||||
- name: Download PR number artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number
|
||||
path: .
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Extract PR number
|
||||
run: |
|
||||
cat pr-number.txt
|
||||
echo "PR_NUMBER=$(cat pr-number.txt)" >> $GITHUB_ENV
|
||||
|
||||
- name: Get PR info
|
||||
uses: octokit/request-action@v2.x
|
||||
id: pr_info
|
||||
with:
|
||||
route: GET /repos/{repo}/pulls/{number}
|
||||
repo: ${{ github.event.repository.full_name }}
|
||||
number: ${{ env.PR_NUMBER }}
|
||||
# Extract PR metadata from workflow_run event
|
||||
- name: Set PR metadata and prepare files for analysis
|
||||
env:
|
||||
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
REPO_NAME: ${{ github.event.repository.full_name }}
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set PR info into env
|
||||
run: |
|
||||
echo "PR_BASE=${{ fromJson(steps.pr_info.outputs.data).base.ref }}" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=${{ fromJson(steps.pr_info.outputs.data).head.ref }}" >> $GITHUB_ENV
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
|
||||
# Extract PR number from first coverage.xml file found
|
||||
first_coverage=$(find . -name "coverage.xml" -type f | head -1)
|
||||
if [ -f "$first_coverage" ]; then
|
||||
PR_NUMBER=$(grep -m 1 '<!-- PR' "$first_coverage" | awk '{print $3}' || echo "")
|
||||
else
|
||||
PR_NUMBER=""
|
||||
fi
|
||||
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number from coverage.xml: #${PR_NUMBER:-<not found>}"
|
||||
|
||||
if [ -z "$PR_NUMBER" ]; then
|
||||
echo "##[error]❌ FATAL: PR number not found in coverage.xml"
|
||||
echo "##[error]This job requires a PR number to run PR analysis."
|
||||
echo "##[error]The ci workflow should have injected the PR number into coverage.xml."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get PR metadata from GitHub API
|
||||
PR_DATA=$(gh api "repos/$REPO_NAME/pulls/$PR_NUMBER")
|
||||
PR_BASE=$(echo "$PR_DATA" | jq -r '.base.ref')
|
||||
PR_HEAD=$(echo "$PR_DATA" | jq -r '.head.ref')
|
||||
|
||||
# Print summary
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number: #$PR_NUMBER"
|
||||
echo "├── Base Branch: $PR_BASE"
|
||||
echo "├── Head Branch: $PR_HEAD"
|
||||
echo "├── Repo: $REPO_NAME"
|
||||
|
||||
# Export to GitHub env for later steps
|
||||
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
|
||||
echo "PR_BASE=$PR_BASE" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=$PR_HEAD" >> $GITHUB_ENV
|
||||
echo "COMMIT_SHA=$COMMIT_SHA" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
# Get all changed files from PR (with error handling)
|
||||
files=""
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
if gh api repos/$REPO_NAME/pulls/$PR_NUMBER/files --jq '.[].filename' > /tmp/pr_files.txt 2>/tmp/pr_error.txt; then
|
||||
files=$(cat /tmp/pr_files.txt)
|
||||
else
|
||||
echo "├── Changed Files: ⚠️ Could not fetch (likely test repo or PR not found)"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
# No files = no inclusions filter = full scan
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "├── PR Number: ⚠️ Not available"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get file extensions and count for summary
|
||||
extensions=$(echo "$files" | sed 's/.*\.//' | sort | uniq | tr '\n' ',' | sed 's/,$//')
|
||||
file_count=$(echo "$files" | wc -l)
|
||||
echo "├── Changed Files: $file_count file(s) (.${extensions})"
|
||||
|
||||
# Check if coverage.xml exists and has content
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available (analysis will proceed without coverage)"
|
||||
fi
|
||||
|
||||
# Prepare file list for Sonar
|
||||
echo "All changed files in PR:"
|
||||
echo "$files"
|
||||
|
||||
# Filter out files that are excluded by .coveragerc to avoid coverage conflicts
|
||||
# This prevents SonarCloud from analyzing files that have no coverage data
|
||||
if [ -n "$files" ]; then
|
||||
# Filter out files matching .coveragerc omit patterns
|
||||
filtered_files=$(echo "$files" | grep -v "settings/.*_defaults\.py$" | grep -v "settings/defaults\.py$" | grep -v "main/migrations/")
|
||||
|
||||
# Show which files were filtered out for transparency
|
||||
excluded_files=$(echo "$files" | grep -E "(settings/.*_defaults\.py$|settings/defaults\.py$|main/migrations/)" || true)
|
||||
if [ -n "$excluded_files" ]; then
|
||||
echo "├── Filtered out (coverage-excluded): $(echo "$excluded_files" | wc -l) file(s)"
|
||||
echo "$excluded_files" | sed 's/^/│ - /'
|
||||
fi
|
||||
|
||||
if [ -n "$filtered_files" ]; then
|
||||
inclusions=$(echo "$filtered_files" | tr '\n' ',' | sed 's/,$//')
|
||||
echo "SONAR_INCLUSIONS=$inclusions" >> $GITHUB_ENV
|
||||
echo "└── Result: ✅ Will scan these files (excluding coverage-omitted files): $inclusions"
|
||||
else
|
||||
echo "└── Result: ✅ All changed files are excluded by coverage config, running full SonarCloud analysis"
|
||||
# Don't set SONAR_INCLUSIONS, let it scan everything per sonar-project.properties
|
||||
fi
|
||||
else
|
||||
echo "└── Result: ✅ Running SonarCloud analysis"
|
||||
fi
|
||||
|
||||
- name: Add base branch
|
||||
if: env.PR_NUMBER != ''
|
||||
run: |
|
||||
gh pr checkout ${{ env.PR_NUMBER }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract and export repo owner/name
|
||||
run: |
|
||||
REPO_SLUG="${GITHUB_REPOSITORY}"
|
||||
IFS="/" read -r REPO_OWNER REPO_NAME <<< "$REPO_SLUG"
|
||||
echo "REPO_OWNER=$REPO_OWNER" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
- name: SonarQube scan
|
||||
uses: SonarSource/sonarqube-scan-action@v5
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets[format('{0}', vars.SONAR_TOKEN_SECRET_NAME)] }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.organization=${{ env.REPO_OWNER }}
|
||||
-Dsonar.projectKey=${{ env.REPO_OWNER }}_${{ env.REPO_NAME }}
|
||||
-Dsonar.scm.revision=${{ env.COMMIT_SHA }}
|
||||
-Dsonar.pullrequest.key=${{ env.PR_NUMBER }}
|
||||
-Dsonar.pullrequest.branch=${{ env.PR_HEAD }}
|
||||
-Dsonar.pullrequest.base=${{ env.PR_BASE }}
|
||||
-Dsonar.python.coverage.reportPaths=${{ env.COVERAGE_PATHS }}
|
||||
${{ env.SONAR_INCLUSIONS && format('-Dsonar.inclusions={0}', env.SONAR_INCLUSIONS) || '' }}
|
||||
|
||||
sonar-branch-analysis:
|
||||
name: SonarCloud Branch Analysis
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'push' &&
|
||||
github.repository == 'ansible/awx'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Download all individual coverage artifacts from CI workflow (optional for branch pushes)
|
||||
- name: Download coverage artifacts
|
||||
continue-on-error: true
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
|
||||
- name: Print SonarCloud Analysis Summary
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
|
||||
echo "🔍 SonarCloud Analysis Summary"
|
||||
echo "=============================="
|
||||
echo "├── CI Event: ✅ Push (via workflow_run)"
|
||||
echo "├── Branch: $BRANCH_NAME"
|
||||
echo "├── Coverage Files: ${coverage_files:-none}"
|
||||
echo "├── Python Changes: ➖ N/A (Full codebase scan)"
|
||||
echo "└── Result: ✅ Proceed - \"Running SonarCloud analysis\""
|
||||
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
|
||||
-Dsonar.branch.name=${{ github.event.workflow_run.head_branch }}
|
||||
${{ env.COVERAGE_PATHS && format('-Dsonar.python.coverage.reportPaths={0}', env.COVERAGE_PATHS) || '' }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
# Ignore generated schema
|
||||
swagger.json
|
||||
schema.json
|
||||
schema.yaml
|
||||
reference-schema.json
|
||||
|
||||
# Tags
|
||||
|
||||
32
Makefile
32
Makefile
@@ -27,6 +27,8 @@ TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
|
||||
PARALLEL_TESTS ?= -n auto
|
||||
# collection integration test directories (defaults to all)
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
# Python version for ansible-test (must be 3.11, 3.12, or 3.13)
|
||||
ANSIBLE_TEST_PYTHON_VERSION ?= 3.13
|
||||
# args for collection install
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
@@ -314,20 +316,17 @@ black: reports
|
||||
@echo "fi" >> .git/hooks/pre-commit
|
||||
@chmod +x .git/hooks/pre-commit
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ADDOPTS="--genschema --create-db "
|
||||
mv swagger.json schema.json
|
||||
|
||||
swagger: reports
|
||||
genschema: awx-link reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(COVERAGE_ARGS) $(PARALLEL_TESTS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi-json --file schema.json
|
||||
|
||||
genschema-yaml: awx-link reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi --file schema.yaml
|
||||
|
||||
check: black
|
||||
|
||||
@@ -431,8 +430,8 @@ test_collection_sanity:
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && \
|
||||
ansible-test integration --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test integration --python $(ANSIBLE_TEST_PYTHON_VERSION) --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
|
||||
@@ -537,14 +536,15 @@ docker-compose-test: awx/projects docker-compose-sources
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
docker-compose-build-schema: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 make genschema
|
||||
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
diff -u -b reference-schema.json schema.json
|
||||
# diff exits with 1 when files differ - capture but don't fail
|
||||
-diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
@@ -161,16 +161,14 @@ def get_view_description(view, html=False):
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
if settings.DYNACONF.is_development_mode:
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
return schema_view
|
||||
else:
|
||||
return views.APIView.schema
|
||||
# drf-spectacular is configured via REST_FRAMEWORK['DEFAULT_SCHEMA_CLASS']
|
||||
# Just use the DRF default, which will pick up our CustomAutoSchema
|
||||
return views.APIView.schema
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
schema = get_default_schema()
|
||||
# Schema is inherited from DRF's APIView, which uses DEFAULT_SCHEMA_CLASS
|
||||
# No need to override it here - drf-spectacular will handle it
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
|
||||
75
awx/api/schema.py
Normal file
75
awx/api/schema.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import warnings
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from drf_spectacular.openapi import AutoSchema
|
||||
from drf_spectacular.views import (
|
||||
SpectacularAPIView,
|
||||
SpectacularSwaggerView,
|
||||
SpectacularRedocView,
|
||||
)
|
||||
|
||||
|
||||
class CustomAutoSchema(AutoSchema):
|
||||
"""Custom AutoSchema to add swagger_topic to tags and handle deprecated endpoints."""
|
||||
|
||||
def get_tags(self):
|
||||
tags = []
|
||||
try:
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
serializer = self.view.get_serializer()
|
||||
else:
|
||||
serializer = None
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for this view.'.format(self.view.__class__.__name__)
|
||||
)
|
||||
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
tags = super().get_tags() # Use default drf-spectacular behavior
|
||||
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
"""Return `True` if this operation is to be marked as deprecated."""
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
class AuthenticatedSpectacularAPIView(SpectacularAPIView):
|
||||
"""SpectacularAPIView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
class AuthenticatedSpectacularSwaggerView(SpectacularSwaggerView):
|
||||
"""SpectacularSwaggerView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
class AuthenticatedSpectacularRedocView(SpectacularRedocView):
|
||||
"""SpectacularRedocView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
# Schema view (returns OpenAPI schema JSON/YAML)
|
||||
schema_view = AuthenticatedSpectacularAPIView.as_view()
|
||||
|
||||
# Swagger UI view
|
||||
swagger_ui_view = AuthenticatedSpectacularSwaggerView.as_view(url_name='api:schema-json')
|
||||
|
||||
# ReDoc UI view
|
||||
redoc_view = AuthenticatedSpectacularRedocView.as_view(url_name='api:schema-json')
|
||||
@@ -1,55 +0,0 @@
|
||||
import warnings
|
||||
|
||||
from rest_framework.permissions import AllowAny
|
||||
from drf_yasg import openapi
|
||||
from drf_yasg.inspectors import SwaggerAutoSchema
|
||||
from drf_yasg.views import get_schema_view
|
||||
|
||||
|
||||
class CustomSwaggerAutoSchema(SwaggerAutoSchema):
|
||||
"""Custom SwaggerAutoSchema to add swagger_topic to tags."""
|
||||
|
||||
def get_tags(self, operation_keys=None):
|
||||
tags = []
|
||||
try:
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
serializer = self.view.get_serializer()
|
||||
else:
|
||||
serializer = None
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {}.'.format(self.view.__class__.__name__, operation_keys)
|
||||
)
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
"""Return `True` if this operation is to be marked as deprecated."""
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title='AWX API',
|
||||
default_version='v2',
|
||||
description='AWX API Documentation',
|
||||
terms_of_service='https://www.google.com/policies/terms/',
|
||||
contact=openapi.Contact(email='contact@snippets.local'),
|
||||
license=openapi.License(name='Apache License'),
|
||||
),
|
||||
public=True,
|
||||
permission_classes=[AllowAny],
|
||||
)
|
||||
@@ -1,4 +1,4 @@
|
||||
---
|
||||
collections:
|
||||
- name: ansible.receptor
|
||||
version: 2.0.3
|
||||
version: 2.0.6
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.urls import include, re_path
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views.root import (
|
||||
ApiRootView,
|
||||
@@ -148,21 +147,21 @@ v2_urls = [
|
||||
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
# Import schema views (needed for both development and testing)
|
||||
from awx.api.schema import schema_view, swagger_ui_view, redoc_view
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
re_path(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
|
||||
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||
# Schema endpoints (available in all modes for API documentation and testing)
|
||||
re_path(r'^schema/$', schema_view, name='schema-json'),
|
||||
re_path(r'^docs/$', swagger_ui_view, name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', redoc_view, name='schema-redoc'),
|
||||
]
|
||||
if MODE == 'development':
|
||||
# Only include these if we are in the development environment
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [
|
||||
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
]
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
|
||||
@@ -59,7 +59,7 @@ class ApiRootView(APIView):
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
if MODE == 'development':
|
||||
data['swagger'] = drf_reverse('api:schema-swagger-ui')
|
||||
data['docs'] = drf_reverse('api:schema-swagger-ui')
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from awx.main.dispatch import get_task_queuename
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=300, on_duplicate='discard')
|
||||
def send_subsystem_metrics():
|
||||
DispatcherMetrics().send_metrics()
|
||||
CallbackReceiverMetrics().send_metrics()
|
||||
|
||||
@@ -5,6 +5,7 @@ import time
|
||||
from uuid import uuid4
|
||||
|
||||
from dispatcherd.publish import submit_task
|
||||
from dispatcherd.processors.blocker import Blocker
|
||||
from dispatcherd.utils import resolve_callable
|
||||
|
||||
from django_guid import get_guid
|
||||
@@ -60,13 +61,17 @@ class task:
|
||||
print(f"Time I was dispatched: {dispatch_time}")
|
||||
"""
|
||||
|
||||
def __init__(self, queue=None, bind_kwargs=None):
|
||||
def __init__(self, queue=None, bind_kwargs=None, timeout=None, on_duplicate=None):
|
||||
self.queue = queue
|
||||
self.bind_kwargs = bind_kwargs
|
||||
self.timeout = timeout
|
||||
self.on_duplicate = on_duplicate
|
||||
|
||||
def __call__(self, fn=None):
|
||||
queue = self.queue
|
||||
bind_kwargs = self.bind_kwargs
|
||||
timeout = self.timeout
|
||||
on_duplicate = self.on_duplicate
|
||||
|
||||
class PublisherMixin(object):
|
||||
queue = None
|
||||
@@ -102,7 +107,19 @@ class task:
|
||||
if flag_enabled('FEATURE_DISPATCHERD_ENABLED'):
|
||||
# At this point we have the import string, and submit_task wants the method, so back to that
|
||||
actual_task = resolve_callable(cls.name)
|
||||
return submit_task(actual_task, args=args, kwargs=kwargs, queue=queue, uuid=uuid, **kw)
|
||||
processor_options = ()
|
||||
if on_duplicate is not None:
|
||||
processor_options = (Blocker.Params(on_duplicate=on_duplicate),)
|
||||
return submit_task(
|
||||
actual_task,
|
||||
args=args,
|
||||
kwargs=kwargs,
|
||||
queue=queue,
|
||||
uuid=uuid,
|
||||
timeout=timeout,
|
||||
processor_options=processor_options,
|
||||
**kw,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f"[DISPATCHER] Failed to check for alternative dispatcherd implementation for {cls.name}")
|
||||
# Continue with original implementation if anything fails
|
||||
|
||||
@@ -38,13 +38,13 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(rename_jts, migrations.RunPython.noop),
|
||||
migrations.RunPython(rename_projects, migrations.RunPython.noop),
|
||||
migrations.AddField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='org_unique',
|
||||
field=models.BooleanField(blank=True, default=True, editable=False, help_text='Used internally to selectively enforce database constraint on name'),
|
||||
),
|
||||
migrations.RunPython(rename_jts, migrations.RunPython.noop),
|
||||
migrations.RunPython(rename_projects, migrations.RunPython.noop),
|
||||
migrations.RunPython(rename_wfjt, migrations.RunPython.noop),
|
||||
migrations.RunPython(change_inventory_source_org_unique, migrations.RunPython.noop),
|
||||
migrations.AddConstraint(
|
||||
|
||||
@@ -159,7 +159,7 @@ def cleanup_old_indirect_host_entries() -> None:
|
||||
IndirectManagedNodeAudit.objects.filter(created__lt=limit).delete()
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@task(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> None:
|
||||
try:
|
||||
job = Job.objects.get(id=job_id)
|
||||
@@ -201,7 +201,7 @@ def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> Non
|
||||
logger.exception(f'Error processing indirect host data for job_id={job_id}')
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@task(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def cleanup_and_save_indirect_host_entries_fallback() -> None:
|
||||
if not flag_enabled("FEATURE_INDIRECT_NODE_COUNTING_ENABLED"):
|
||||
return
|
||||
|
||||
@@ -1321,7 +1321,7 @@ class RunProjectUpdate(BaseTask):
|
||||
|
||||
galaxy_creds_are_defined = project_update.project.organization and project_update.project.organization.galaxy_credentials.exists()
|
||||
if not galaxy_creds_are_defined and (settings.AWX_ROLES_ENABLED or settings.AWX_COLLECTIONS_ENABLED):
|
||||
logger.warning('Galaxy role/collection syncing is enabled, but no credentials are configured for {project_update.project.organization}.')
|
||||
logger.warning(f'Galaxy role/collection syncing is enabled, but no credentials are configured for {project_update.project.organization}.')
|
||||
|
||||
extra_vars.update(
|
||||
{
|
||||
@@ -1346,7 +1346,7 @@ class RunProjectUpdate(BaseTask):
|
||||
extra_vars['scm_refspec'] = project_update.scm_refspec
|
||||
elif project_update.project.allow_override:
|
||||
# If branch is override-able, do extra fetch for all branches
|
||||
extra_vars['scm_refspec'] = 'refs/heads/*:refs/remotes/origin/*'
|
||||
extra_vars['scm_refspec'] = '+refs/heads/*:refs/remotes/origin/*'
|
||||
|
||||
if project_update.scm_type == 'archive':
|
||||
# for raw archive, prevent error moving files between volumes
|
||||
|
||||
@@ -852,7 +852,7 @@ def reload_receptor():
|
||||
raise RuntimeError("Receptor reload failed")
|
||||
|
||||
|
||||
@task_awx()
|
||||
@task_awx(on_duplicate='queue_one')
|
||||
def write_receptor_config():
|
||||
"""
|
||||
This task runs async on each control node, K8S only.
|
||||
@@ -875,7 +875,7 @@ def write_receptor_config():
|
||||
reload_receptor()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, on_duplicate='discard')
|
||||
def remove_deprovisioned_node(hostname):
|
||||
InstanceLink.objects.filter(source__hostname=hostname).update(link_state=InstanceLink.States.REMOVING)
|
||||
InstanceLink.objects.filter(target__instance__hostname=hostname).update(link_state=InstanceLink.States.REMOVING)
|
||||
|
||||
@@ -184,7 +184,7 @@ def inform_cluster_of_shutdown():
|
||||
logger.warning("Normal shutdown processed for instance %s; instance removed from capacity pool.", inst.hostname)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def migrate_jsonfield(table, pkfield, columns):
|
||||
batchsize = 10000
|
||||
with advisory_lock(f'json_migration_{table}', wait=False) as acquired:
|
||||
@@ -230,7 +230,7 @@ def migrate_jsonfield(table, pkfield, columns):
|
||||
logger.warning(f"Migration of {table} to jsonb is finished.")
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='queue_one')
|
||||
def apply_cluster_membership_policies():
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
@@ -342,7 +342,7 @@ def apply_cluster_membership_policies():
|
||||
logger.debug('Cluster policy computation finished in {} seconds'.format(time.time() - started_compute))
|
||||
|
||||
|
||||
@task_awx(queue='tower_settings_change')
|
||||
@task_awx(queue='tower_settings_change', timeout=600)
|
||||
def clear_setting_cache(setting_keys):
|
||||
# log that cache is being cleared
|
||||
logger.info(f"clear_setting_cache of keys {setting_keys}")
|
||||
@@ -355,7 +355,7 @@ def clear_setting_cache(setting_keys):
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
|
||||
@task_awx(queue='tower_broadcast_all')
|
||||
@task_awx(queue='tower_broadcast_all', timeout=600)
|
||||
def delete_project_files(project_path):
|
||||
# TODO: possibly implement some retry logic
|
||||
lock_file = project_path + '.lock'
|
||||
@@ -383,7 +383,7 @@ def profile_sql(threshold=1, minutes=1):
|
||||
logger.error('SQL QUERIES >={}s ENABLED FOR {} MINUTE(S)'.format(threshold, minutes))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=1800)
|
||||
def send_notifications(notification_list, job_id=None):
|
||||
if not isinstance(notification_list, list):
|
||||
raise TypeError("notification_list should be of type list")
|
||||
@@ -428,13 +428,13 @@ def events_processed_hook(unified_job):
|
||||
save_indirect_host_entries.delay(unified_job.id)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5, on_duplicate='discard')
|
||||
def gather_analytics():
|
||||
if is_run_threshold_reached(getattr(settings, 'AUTOMATION_ANALYTICS_LAST_GATHER', None), settings.AUTOMATION_ANALYTICS_GATHER_INTERVAL):
|
||||
analytics.gather()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=600, on_duplicate='queue_one')
|
||||
def purge_old_stdout_files():
|
||||
nowtime = time.time()
|
||||
for f in os.listdir(settings.JOBOUTPUT_ROOT):
|
||||
@@ -496,37 +496,18 @@ class CleanupImagesAndFiles:
|
||||
cls.run_remote(this_inst, **kwargs)
|
||||
|
||||
|
||||
@task_awx(queue='tower_broadcast_all')
|
||||
@task_awx(queue='tower_broadcast_all', timeout=3600)
|
||||
def handle_removed_image(remove_images=None):
|
||||
"""Special broadcast invocation of this method to handle case of deleted EE"""
|
||||
CleanupImagesAndFiles.run(remove_images=remove_images, file_pattern='')
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='queue_one')
|
||||
def cleanup_images_and_files():
|
||||
CleanupImagesAndFiles.run(image_prune=True)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
def cluster_node_health_check(node):
|
||||
"""
|
||||
Used for the health check endpoint, refreshes the status of the instance, but must be ran on target node
|
||||
"""
|
||||
if node == '':
|
||||
logger.warning('Local health check incorrectly called with blank string')
|
||||
return
|
||||
elif node != settings.CLUSTER_HOST_ID:
|
||||
logger.warning(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}')
|
||||
return
|
||||
try:
|
||||
this_inst = Instance.objects.me()
|
||||
except Instance.DoesNotExist:
|
||||
logger.warning(f'Instance record for {node} missing, could not check capacity.')
|
||||
return
|
||||
this_inst.local_health_check()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=600, on_duplicate='queue_one')
|
||||
def execution_node_health_check(node):
|
||||
if node == '':
|
||||
logger.warning('Remote health check incorrectly called with blank string')
|
||||
@@ -850,7 +831,7 @@ def _heartbeat_handle_lost_instances(lost_instances, this_inst):
|
||||
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=1800, on_duplicate='queue_one')
|
||||
def awx_receptor_workunit_reaper():
|
||||
"""
|
||||
When an AWX job is launched via receptor, files such as status, stdin, and stdout are created
|
||||
@@ -896,7 +877,7 @@ def awx_receptor_workunit_reaper():
|
||||
administrative_workunit_reaper(receptor_work_list)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=1800, on_duplicate='queue_one')
|
||||
def awx_k8s_reaper():
|
||||
if not settings.RECEPTOR_RELEASE_WORK:
|
||||
return
|
||||
@@ -919,7 +900,7 @@ def awx_k8s_reaper():
|
||||
logger.exception("Failed to delete orphaned pod {} from {}".format(job.log_format, group))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5, on_duplicate='discard')
|
||||
def awx_periodic_scheduler():
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000
|
||||
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
@@ -978,7 +959,7 @@ def awx_periodic_scheduler():
|
||||
emit_channel_notification('schedules-changed', dict(id=schedule.id, group_name="schedules"))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600)
|
||||
def handle_failure_notifications(task_ids):
|
||||
"""A task-ified version of the method that sends notifications."""
|
||||
found_task_ids = set()
|
||||
@@ -993,7 +974,7 @@ def handle_failure_notifications(task_ids):
|
||||
logger.warning(f'Could not send notifications for {deleted_tasks} because they were not found in the database')
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def update_inventory_computed_fields(inventory_id):
|
||||
"""
|
||||
Signal handler and wrapper around inventory.update_computed_fields to
|
||||
@@ -1043,7 +1024,7 @@ def update_smart_memberships_for_inventory(smart_inventory):
|
||||
return False
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='queue_one')
|
||||
def update_host_smart_inventory_memberships():
|
||||
smart_inventories = Inventory.objects.filter(kind='smart', host_filter__isnull=False, pending_deletion=False)
|
||||
changed_inventories = set([])
|
||||
@@ -1059,7 +1040,7 @@ def update_host_smart_inventory_memberships():
|
||||
smart_inventory.update_computed_fields()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def delete_inventory(inventory_id, user_id, retries=5):
|
||||
# Delete inventory as user
|
||||
if user_id is None:
|
||||
@@ -1121,7 +1102,7 @@ def _reconstruct_relationships(copy_mapping):
|
||||
new_obj.save()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=600)
|
||||
def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, permission_check_func=None):
|
||||
logger.debug('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk))
|
||||
|
||||
@@ -1176,7 +1157,7 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='discard')
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
|
||||
@@ -7,7 +7,6 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils.functional import Promise
|
||||
from django.utils.encoding import force_str
|
||||
|
||||
from drf_yasg.codecs import OpenAPICodecJson
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import drf_reverse
|
||||
@@ -43,10 +42,10 @@ class TestSwaggerGeneration:
|
||||
@pytest.fixture(autouse=True, scope='function')
|
||||
def _prepare(self, get, admin):
|
||||
if not self.__class__.JSON:
|
||||
url = drf_reverse('api:schema-swagger-ui') + '?format=openapi'
|
||||
# drf-spectacular returns OpenAPI schema directly from schema endpoint
|
||||
url = drf_reverse('api:schema-json') + '?format=json'
|
||||
response = get(url, user=admin)
|
||||
codec = OpenAPICodecJson([])
|
||||
data = codec.generate_swagger_object(response.data)
|
||||
data = response.data
|
||||
if response.has_header('X-Deprecated-Paths'):
|
||||
data['deprecated_paths'] = json.loads(response['X-Deprecated-Paths'])
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ def test_metrics_counts(organization_factory, job_template_factory, workflow_job
|
||||
for gauge in gauges:
|
||||
for sample in gauge.samples:
|
||||
# name, label, value, timestamp, exemplar
|
||||
name, _, value, _, _ = sample
|
||||
name, _, value, _, _, _ = sample
|
||||
assert EXPECTED_VALUES[name] == value
|
||||
|
||||
|
||||
|
||||
@@ -1,36 +1,30 @@
|
||||
import pytest
|
||||
from django.test import override_settings
|
||||
|
||||
from flags.state import get_flags, flag_state
|
||||
from ansible_base.feature_flags.models import AAPFlag
|
||||
from ansible_base.feature_flags.utils import create_initial_data as seed_feature_flags
|
||||
from django.conf import settings
|
||||
from awx.main.models import User
|
||||
|
||||
|
||||
@override_settings(FLAGS={})
|
||||
@pytest.mark.django_db
|
||||
def test_feature_flags_list_endpoint(get):
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=False)
|
||||
|
||||
url = "/api/v2/feature_flags_state/"
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=True)
|
||||
url = "/api/v2/feature_flags/states/"
|
||||
response = get(url, user=bob, expect=200)
|
||||
assert len(response.data) == 0
|
||||
assert len(get_flags()) > 0
|
||||
assert len(response.data["results"]) == len(get_flags())
|
||||
|
||||
|
||||
@override_settings(
|
||||
FLAGS={
|
||||
"FEATURE_SOME_PLATFORM_FLAG_ENABLED": [
|
||||
{"condition": "boolean", "value": False},
|
||||
{"condition": "before date", "value": "2022-06-01T12:00Z"},
|
||||
],
|
||||
"FEATURE_SOME_PLATFORM_FLAG_FOO_ENABLED": [
|
||||
{"condition": "boolean", "value": True},
|
||||
],
|
||||
}
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_feature_flags_list_endpoint_override(get):
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=False)
|
||||
@pytest.mark.parametrize('flag_val', (True, False))
|
||||
def test_feature_flags_list_endpoint_override(get, flag_val):
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=True)
|
||||
|
||||
url = "/api/v2/feature_flags_state/"
|
||||
AAPFlag.objects.all().delete()
|
||||
flag_name = "FEATURE_DISPATCHERD_ENABLED"
|
||||
setattr(settings, flag_name, flag_val)
|
||||
seed_feature_flags()
|
||||
url = "/api/v2/feature_flags/states/"
|
||||
response = get(url, user=bob, expect=200)
|
||||
assert len(response.data) == 2
|
||||
assert response.data["FEATURE_SOME_PLATFORM_FLAG_ENABLED"] is False
|
||||
assert response.data["FEATURE_SOME_PLATFORM_FLAG_FOO_ENABLED"] is True
|
||||
assert len(response.data["results"]) == 6
|
||||
assert flag_state(flag_name) == flag_val
|
||||
|
||||
@@ -93,6 +93,7 @@ def test_default_cred_types():
|
||||
'gpg_public_key',
|
||||
'hashivault_kv',
|
||||
'hashivault_ssh',
|
||||
'hcp_terraform',
|
||||
'insights',
|
||||
'kubernetes_bearer_token',
|
||||
'net',
|
||||
|
||||
@@ -5,11 +5,8 @@ import signal
|
||||
import time
|
||||
import yaml
|
||||
from unittest import mock
|
||||
from copy import deepcopy
|
||||
|
||||
from flags.state import disable_flag, enable_flag
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.conf import settings
|
||||
from django.test.utils import override_settings
|
||||
import pytest
|
||||
|
||||
from awx.main.models import Job, WorkflowJob, Instance
|
||||
@@ -302,13 +299,14 @@ class TestTaskDispatcher:
|
||||
assert str(result) == "No module named 'awx.foo'" # noqa
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTaskPublisher:
|
||||
@pytest.fixture(autouse=True)
|
||||
def _disable_dispatcherd(self):
|
||||
ffs = deepcopy(settings.FLAGS)
|
||||
ffs['FEATURE_DISPATCHERD_ENABLED'][0]['value'] = False
|
||||
with override_settings(FLAGS=ffs):
|
||||
yield
|
||||
flag_name = "FEATURE_DISPATCHERD_ENABLED"
|
||||
disable_flag(flag_name)
|
||||
yield
|
||||
enable_flag(flag_name)
|
||||
|
||||
def test_function_callable(self):
|
||||
assert add(2, 2) == 4
|
||||
|
||||
@@ -139,7 +139,7 @@ def podman_image_generator():
|
||||
|
||||
@pytest.fixture
|
||||
def project_factory(post, default_org, admin):
|
||||
def _rf(scm_url=None, local_path=None):
|
||||
def _rf(scm_url=None, local_path=None, **extra_kwargs):
|
||||
proj_kwargs = {}
|
||||
if local_path:
|
||||
# manual path
|
||||
@@ -153,6 +153,9 @@ def project_factory(post, default_org, admin):
|
||||
else:
|
||||
raise RuntimeError('Need to provide scm_url or local_path')
|
||||
|
||||
if extra_kwargs:
|
||||
proj_kwargs.update(extra_kwargs)
|
||||
|
||||
proj_kwargs['name'] = project_name
|
||||
proj_kwargs['organization'] = default_org.id
|
||||
|
||||
|
||||
@@ -1,2 +1,25 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
from awx.main.tests.live.tests.conftest import wait_for_job
|
||||
|
||||
|
||||
def test_git_file_project(live_tmp_folder, run_job_from_playbook):
|
||||
run_job_from_playbook('test_git_file_project', 'debug.yml', scm_url=f'file://{live_tmp_folder}/debug')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('allow_override', [True, False])
|
||||
def test_amend_commit(live_tmp_folder, project_factory, allow_override):
|
||||
proj = project_factory(scm_url=f'file://{live_tmp_folder}/debug', allow_override=allow_override)
|
||||
assert proj.current_job
|
||||
wait_for_job(proj.current_job)
|
||||
assert proj.allow_override is allow_override
|
||||
|
||||
source_dir = os.path.join(live_tmp_folder, 'debug')
|
||||
subprocess.run('git commit --amend --no-edit', cwd=source_dir, shell=True)
|
||||
|
||||
update = proj.update()
|
||||
update.signal_start()
|
||||
wait_for_job(update)
|
||||
|
||||
273
awx/main/tests/unit/api/test_schema.py
Normal file
273
awx/main/tests/unit/api/test_schema.py
Normal file
@@ -0,0 +1,273 @@
|
||||
import warnings
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
|
||||
from awx.api.schema import (
|
||||
CustomAutoSchema,
|
||||
AuthenticatedSpectacularAPIView,
|
||||
AuthenticatedSpectacularSwaggerView,
|
||||
AuthenticatedSpectacularRedocView,
|
||||
)
|
||||
|
||||
|
||||
class TestCustomAutoSchema:
|
||||
"""Unit tests for CustomAutoSchema class."""
|
||||
|
||||
def test_get_tags_with_swagger_topic(self):
|
||||
"""Test get_tags returns swagger_topic when available."""
|
||||
view = Mock()
|
||||
view.swagger_topic = 'custom_topic'
|
||||
view.get_serializer = Mock(return_value=Mock())
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Custom_Topic']
|
||||
|
||||
def test_get_tags_with_serializer_meta_model(self):
|
||||
"""Test get_tags returns model verbose_name_plural from serializer."""
|
||||
# Create a mock model with verbose_name_plural
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'test models'
|
||||
|
||||
# Create a mock serializer with Meta.model
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta.model = mock_model
|
||||
|
||||
view = Mock(spec=[]) # View without swagger_topic
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Test Models']
|
||||
|
||||
def test_get_tags_with_view_model(self):
|
||||
"""Test get_tags returns model verbose_name_plural from view."""
|
||||
# Create a mock model with verbose_name_plural
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'view models'
|
||||
|
||||
view = Mock(spec=['model']) # View without swagger_topic or get_serializer
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['View Models']
|
||||
|
||||
def test_get_tags_without_get_serializer(self):
|
||||
"""Test get_tags when view doesn't have get_serializer method."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'test objects'
|
||||
|
||||
view = Mock(spec=['model'])
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Test Objects']
|
||||
|
||||
def test_get_tags_serializer_exception_with_warning(self):
|
||||
"""Test get_tags handles exception in get_serializer with warning."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'fallback models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model', '__class__'])
|
||||
view.__class__.__name__ = 'TestView'
|
||||
view.get_serializer = Mock(side_effect=Exception('Serializer error'))
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Check that a warning was raised
|
||||
assert len(w) == 1
|
||||
assert 'TestView.get_serializer() raised an exception' in str(w[0].message)
|
||||
|
||||
# Should still get tags from view.model
|
||||
assert tags == ['Fallback Models']
|
||||
|
||||
def test_get_tags_serializer_without_meta_model(self):
|
||||
"""Test get_tags when serializer doesn't have Meta.model."""
|
||||
mock_serializer = Mock(spec=[]) # No Meta attribute
|
||||
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.__class__.__name__ = 'NoMetaView'
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=['Default Tag']) as mock_super:
|
||||
tags = schema.get_tags()
|
||||
mock_super.assert_called_once()
|
||||
assert tags == ['Default Tag']
|
||||
|
||||
def test_get_tags_fallback_to_super(self):
|
||||
"""Test get_tags falls back to parent class method."""
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.get_serializer = Mock(return_value=Mock(spec=[]))
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=['Super Tag']) as mock_super:
|
||||
tags = schema.get_tags()
|
||||
mock_super.assert_called_once()
|
||||
assert tags == ['Super Tag']
|
||||
|
||||
def test_get_tags_empty_with_warning(self):
|
||||
"""Test get_tags returns 'api' fallback when no tags can be determined."""
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.__class__.__name__ = 'EmptyView'
|
||||
view.get_serializer = Mock(return_value=Mock(spec=[]))
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=[]):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Check that a warning was raised
|
||||
assert len(w) == 1
|
||||
assert 'Could not determine tags for EmptyView' in str(w[0].message)
|
||||
|
||||
# Should fallback to 'api'
|
||||
assert tags == ['api']
|
||||
|
||||
def test_get_tags_swagger_topic_title_case(self):
|
||||
"""Test that swagger_topic is properly title-cased."""
|
||||
view = Mock()
|
||||
view.swagger_topic = 'multi_word_topic'
|
||||
view.get_serializer = Mock(return_value=Mock())
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Multi_Word_Topic']
|
||||
|
||||
def test_is_deprecated_true(self):
|
||||
"""Test is_deprecated returns True when view has deprecated=True."""
|
||||
view = Mock()
|
||||
view.deprecated = True
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is True
|
||||
|
||||
def test_is_deprecated_false(self):
|
||||
"""Test is_deprecated returns False when view has deprecated=False."""
|
||||
view = Mock()
|
||||
view.deprecated = False
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is False
|
||||
|
||||
def test_is_deprecated_missing_attribute(self):
|
||||
"""Test is_deprecated returns False when view doesn't have deprecated attribute."""
|
||||
view = Mock(spec=[])
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is False
|
||||
|
||||
def test_get_tags_serializer_meta_without_model(self):
|
||||
"""Test get_tags when serializer has Meta but no model attribute."""
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta = Mock(spec=[]) # Meta exists but no model
|
||||
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'backup models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model'])
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
# Should fall back to view.model
|
||||
assert tags == ['Backup Models']
|
||||
|
||||
def test_get_tags_complex_scenario_exception_recovery(self):
|
||||
"""Test complex scenario where serializer fails but view.model exists."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'recovery models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model', '__class__'])
|
||||
view.__class__.__name__ = 'ComplexView'
|
||||
view.get_serializer = Mock(side_effect=ValueError('Invalid serializer'))
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Should have warned about the exception
|
||||
assert len(w) == 1
|
||||
assert 'ComplexView.get_serializer() raised an exception' in str(w[0].message)
|
||||
|
||||
# But still recovered and got tags from view.model
|
||||
assert tags == ['Recovery Models']
|
||||
|
||||
def test_get_tags_priority_order(self):
|
||||
"""Test that get_tags respects priority: swagger_topic > serializer.Meta.model > view.model."""
|
||||
# Set up a view with all three options
|
||||
mock_model_view = Mock()
|
||||
mock_model_view._meta.verbose_name_plural = 'view models'
|
||||
|
||||
mock_model_serializer = Mock()
|
||||
mock_model_serializer._meta.verbose_name_plural = 'serializer models'
|
||||
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta.model = mock_model_serializer
|
||||
|
||||
view = Mock()
|
||||
view.swagger_topic = 'priority_topic'
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
view.model = mock_model_view
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
# swagger_topic should take priority
|
||||
assert tags == ['Priority_Topic']
|
||||
|
||||
|
||||
class TestAuthenticatedSchemaViews:
|
||||
"""Unit tests for authenticated schema view classes."""
|
||||
|
||||
def test_authenticated_spectacular_api_view_requires_authentication(self):
|
||||
"""Test that AuthenticatedSpectacularAPIView requires authentication."""
|
||||
assert IsAuthenticated in AuthenticatedSpectacularAPIView.permission_classes
|
||||
|
||||
def test_authenticated_spectacular_swagger_view_requires_authentication(self):
|
||||
"""Test that AuthenticatedSpectacularSwaggerView requires authentication."""
|
||||
assert IsAuthenticated in AuthenticatedSpectacularSwaggerView.permission_classes
|
||||
|
||||
def test_authenticated_spectacular_redoc_view_requires_authentication(self):
|
||||
"""Test that AuthenticatedSpectacularRedocView requires authentication."""
|
||||
assert IsAuthenticated in AuthenticatedSpectacularRedocView.permission_classes
|
||||
@@ -9,6 +9,9 @@ LOCAL_SETTINGS = (
|
||||
'DEBUG',
|
||||
'NAMED_URL_GRAPH',
|
||||
'DISPATCHER_MOCK_PUBLISH',
|
||||
# Platform flags are managed by the platform flags system and have environment-specific defaults
|
||||
'FEATURE_DISPATCHERD_ENABLED',
|
||||
'FEATURE_INDIRECT_NODE_COUNTING_ENABLED',
|
||||
)
|
||||
|
||||
|
||||
@@ -28,7 +31,7 @@ def test_default_settings():
|
||||
continue
|
||||
default_val = getattr(settings.default_settings, k, None)
|
||||
snapshot_val = settings.DEFAULTS_SNAPSHOT[k]
|
||||
assert default_val == snapshot_val, f'Setting for {k} does not match shapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||
assert default_val == snapshot_val, f'Setting for {k} does not match snapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||
|
||||
|
||||
def test_django_conf_settings_is_awx_settings():
|
||||
@@ -69,3 +72,27 @@ def test_merge_application_name():
|
||||
result = merge_application_name(settings)["DATABASES__default__OPTIONS__application_name"]
|
||||
assert result.startswith("awx-")
|
||||
assert "test-cluster" in result
|
||||
|
||||
|
||||
def test_development_defaults_feature_flags(monkeypatch):
|
||||
"""Ensure that development_defaults.py sets the correct feature flags."""
|
||||
monkeypatch.setenv('AWX_MODE', 'development')
|
||||
|
||||
# Import the development_defaults module directly to trigger coverage of the new lines
|
||||
import importlib.util
|
||||
import os
|
||||
|
||||
spec = importlib.util.spec_from_file_location("development_defaults", os.path.join(os.path.dirname(__file__), "../../../settings/development_defaults.py"))
|
||||
development_defaults = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(development_defaults)
|
||||
|
||||
# Also import through the development settings to ensure both paths are tested
|
||||
from awx.settings.development import FEATURE_INDIRECT_NODE_COUNTING_ENABLED, FEATURE_DISPATCHERD_ENABLED
|
||||
|
||||
# Verify the feature flags are set correctly in both the module and settings
|
||||
assert hasattr(development_defaults, 'FEATURE_INDIRECT_NODE_COUNTING_ENABLED')
|
||||
assert development_defaults.FEATURE_INDIRECT_NODE_COUNTING_ENABLED is True
|
||||
assert hasattr(development_defaults, 'FEATURE_DISPATCHERD_ENABLED')
|
||||
assert development_defaults.FEATURE_DISPATCHERD_ENABLED is True
|
||||
assert FEATURE_INDIRECT_NODE_COUNTING_ENABLED is True
|
||||
assert FEATURE_DISPATCHERD_ENABLED is True
|
||||
|
||||
@@ -461,6 +461,7 @@ class TestExtraVarSanitation(TestJobExecution):
|
||||
|
||||
|
||||
class TestGenericRun:
|
||||
@pytest.mark.django_db(reset_sequences=True)
|
||||
def test_generic_failure(self, patch_Job, execution_environment, mock_me, mock_create_partition):
|
||||
job = Job(status='running', inventory=Inventory(), project=Project(local_path='/projects/_23_foo'))
|
||||
job.websocket_emit_status = mock.Mock()
|
||||
@@ -545,6 +546,7 @@ class TestGenericRun:
|
||||
private_data_dir, extra_vars, safe_dict = call_args
|
||||
assert extra_vars['super_secret'] == "CLASSIFIED"
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment, mock_me):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
job.execution_environment = execution_environment
|
||||
@@ -845,6 +847,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
[None, '0'],
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir, mock_me):
|
||||
task = jobs.RunJob()
|
||||
task.instance = job
|
||||
@@ -901,6 +904,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
|
||||
assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_awx_task_env(self, settings, private_data_dir, job, mock_me):
|
||||
settings.AWX_TASK_ENV = {'FOO': 'BAR'}
|
||||
task = jobs.RunJob()
|
||||
@@ -1086,6 +1090,70 @@ class TestProjectUpdateCredentials(TestJobExecution):
|
||||
assert env['FOO'] == 'BAR'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProjectUpdateRefspec(TestJobExecution):
|
||||
@pytest.fixture
|
||||
def project_update(self, execution_environment):
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org, allow_override=True)
|
||||
project_update = ProjectUpdate(pk=1, project=proj, scm_type='git')
|
||||
project_update.websocket_emit_status = mock.Mock()
|
||||
project_update.execution_environment = execution_environment
|
||||
return project_update
|
||||
|
||||
def test_refspec_with_allow_override_includes_plus_prefix(self, project_update, private_data_dir, mock_me):
|
||||
"""Test that refspec includes + prefix to allow non-fast-forward updates when allow_override is True"""
|
||||
task = jobs.RunProjectUpdate()
|
||||
task.instance = project_update
|
||||
|
||||
# Call build_extra_vars_file which sets the refspec
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
# Read the extra vars file to check the refspec
|
||||
with open(os.path.join(private_data_dir, 'env', 'extravars')) as fd:
|
||||
extra_vars = yaml.load(fd, Loader=SafeLoader)
|
||||
|
||||
# Verify the refspec includes the + prefix for force updates
|
||||
assert 'scm_refspec' in extra_vars
|
||||
assert extra_vars['scm_refspec'] == '+refs/heads/*:refs/remotes/origin/*'
|
||||
|
||||
def test_custom_refspec_not_overridden(self, project_update, private_data_dir, mock_me):
|
||||
"""Test that custom user-provided refspec is not overridden"""
|
||||
task = jobs.RunProjectUpdate()
|
||||
task.instance = project_update
|
||||
project_update.scm_refspec = 'refs/pull/*/head:refs/remotes/origin/pr/*'
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with open(os.path.join(private_data_dir, 'env', 'extravars')) as fd:
|
||||
extra_vars = yaml.load(fd, Loader=SafeLoader)
|
||||
|
||||
# Custom refspec should be preserved
|
||||
assert extra_vars['scm_refspec'] == 'refs/pull/*/head:refs/remotes/origin/pr/*'
|
||||
|
||||
def test_no_refspec_without_allow_override(self, execution_environment, private_data_dir, mock_me):
|
||||
"""Test that no refspec is set when allow_override is False"""
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org, allow_override=False)
|
||||
project_update = ProjectUpdate(pk=1, project=proj, scm_type='git')
|
||||
project_update.websocket_emit_status = mock.Mock()
|
||||
project_update.execution_environment = execution_environment
|
||||
|
||||
task = jobs.RunProjectUpdate()
|
||||
task.instance = project_update
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with open(os.path.join(private_data_dir, 'env', 'extravars')) as fd:
|
||||
extra_vars = yaml.load(fd, Loader=SafeLoader)
|
||||
|
||||
# No refspec should be set
|
||||
assert 'scm_refspec' not in extra_vars
|
||||
|
||||
|
||||
class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
@pytest.fixture
|
||||
def inventory_update(self, execution_environment):
|
||||
|
||||
@@ -139,7 +139,7 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
return tmpl
|
||||
|
||||
|
||||
@task_awx(queue='rsyslog_configurer')
|
||||
@task_awx(queue='rsyslog_configurer', timeout=600, on_duplicate='queue_one')
|
||||
def reconfigure_rsyslog():
|
||||
tmpl = construct_rsyslog_conf_template()
|
||||
# Write config to a temp file then move it to preserve atomicity
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
from ansible_base.resource_registry.shared_types import RoleDefinitionType
|
||||
|
||||
from ansible_base.resource_registry.shared_types import (
|
||||
FeatureFlagType,
|
||||
RoleDefinitionType,
|
||||
OrganizationType,
|
||||
TeamType,
|
||||
UserType,
|
||||
)
|
||||
from ansible_base.feature_flags.models import AAPFlag
|
||||
from awx.main import models
|
||||
|
||||
|
||||
@@ -15,7 +21,11 @@ RESOURCE_LIST = (
|
||||
models.Organization,
|
||||
shared_resource=SharedResource(serializer=OrganizationType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(models.User, shared_resource=SharedResource(serializer=UserType, is_provider=False), name_field="username"),
|
||||
ResourceConfig(
|
||||
models.User,
|
||||
shared_resource=SharedResource(serializer=UserType, is_provider=False),
|
||||
name_field="username",
|
||||
),
|
||||
ResourceConfig(
|
||||
models.Team,
|
||||
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
||||
@@ -25,4 +35,8 @@ RESOURCE_LIST = (
|
||||
RoleDefinition,
|
||||
shared_resource=SharedResource(serializer=RoleDefinitionType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(
|
||||
AAPFlag,
|
||||
shared_resource=SharedResource(serializer=FeatureFlagType, is_provider=False),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -8,7 +8,6 @@ from ansible_base.lib.dynamic_config import (
|
||||
load_envvars,
|
||||
load_python_file_with_injected_context,
|
||||
load_standard_settings_files,
|
||||
toggle_feature_flags,
|
||||
)
|
||||
from .functions import (
|
||||
assert_production_settings,
|
||||
@@ -71,12 +70,5 @@ DYNACONF.update(
|
||||
merge=True,
|
||||
)
|
||||
|
||||
# Toggle feature flags based on installer settings
|
||||
DYNACONF.update(
|
||||
toggle_feature_flags(DYNACONF),
|
||||
loader_identifier="awx.settings:toggle_feature_flags",
|
||||
merge=True,
|
||||
)
|
||||
|
||||
# Update django.conf.settings with DYNACONF values
|
||||
export(__name__, DYNACONF)
|
||||
|
||||
@@ -83,7 +83,7 @@ USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = [
|
||||
os.path.join(BASE_DIR, 'ui', 'build', 'static'),
|
||||
os.path.join(BASE_DIR, 'ui', 'build'),
|
||||
os.path.join(BASE_DIR, 'static'),
|
||||
]
|
||||
|
||||
@@ -375,15 +375,13 @@ REST_FRAMEWORK = {
|
||||
'VIEW_DESCRIPTION_FUNCTION': 'awx.api.generics.get_view_description',
|
||||
'NON_FIELD_ERRORS_KEY': '__all__',
|
||||
'DEFAULT_VERSION': 'v2',
|
||||
# For swagger schema generation
|
||||
# For OpenAPI schema generation with drf-spectacular
|
||||
# see https://github.com/encode/django-rest-framework/pull/6532
|
||||
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.AutoSchema',
|
||||
'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema',
|
||||
# 'URL_FORMAT_OVERRIDE': None,
|
||||
}
|
||||
|
||||
SWAGGER_SETTINGS = {
|
||||
'DEFAULT_AUTO_SCHEMA_CLASS': 'awx.api.swagger.CustomSwaggerAutoSchema',
|
||||
}
|
||||
# SWAGGER_SETTINGS removed - migrated to drf-spectacular (see SPECTACULAR_SETTINGS below)
|
||||
|
||||
AUTHENTICATION_BACKENDS = ('awx.main.backends.AWXModelBackend',)
|
||||
|
||||
@@ -1036,7 +1034,44 @@ ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
ANSIBLE_BASE_PERMISSION_MODEL = 'main.Permission'
|
||||
|
||||
# Defaults to be overridden by DAB
|
||||
SPECTACULAR_SETTINGS = {}
|
||||
SPECTACULAR_SETTINGS = {
|
||||
'TITLE': 'AWX API',
|
||||
'DESCRIPTION': 'AWX API Documentation',
|
||||
'VERSION': 'v2',
|
||||
'OAS_VERSION': '3.0.3', # Set OpenAPI Specification version to 3.0.3
|
||||
'SERVE_INCLUDE_SCHEMA': False,
|
||||
'SCHEMA_PATH_PREFIX': r'/api/v[0-9]',
|
||||
'DEFAULT_GENERATOR_CLASS': 'drf_spectacular.generators.SchemaGenerator',
|
||||
'SCHEMA_COERCE_PATH_PK_SUFFIX': True,
|
||||
'CONTACT': {'email': 'controller-eng@redhat.com'},
|
||||
'LICENSE': {'name': 'Apache License'},
|
||||
'TERMS_OF_SERVICE': 'https://www.google.com/policies/terms/',
|
||||
# Use our custom schema class that handles swagger_topic and deprecated views
|
||||
'DEFAULT_SCHEMA_CLASS': 'awx.api.schema.CustomAutoSchema',
|
||||
'COMPONENT_SPLIT_REQUEST': True,
|
||||
'SWAGGER_UI_SETTINGS': {
|
||||
'deepLinking': True,
|
||||
'persistAuthorization': True,
|
||||
'displayOperationId': True,
|
||||
},
|
||||
# Resolve enum naming collisions with meaningful names
|
||||
'ENUM_NAME_OVERRIDES': {
|
||||
# Status field collisions
|
||||
'Status4e1Enum': 'UnifiedJobStatusEnum',
|
||||
'Status876Enum': 'JobStatusEnum',
|
||||
# Job type field collisions
|
||||
'JobType8b8Enum': 'JobTemplateJobTypeEnum',
|
||||
'JobType95bEnum': 'AdHocCommandJobTypeEnum',
|
||||
'JobType963Enum': 'ProjectUpdateJobTypeEnum',
|
||||
# Verbosity field collisions
|
||||
'Verbosity481Enum': 'JobVerbosityEnum',
|
||||
'Verbosity8cfEnum': 'InventoryUpdateVerbosityEnum',
|
||||
# Event field collision
|
||||
'Event4d3Enum': 'JobEventEnum',
|
||||
# Kind field collision
|
||||
'Kind362Enum': 'InventoryKindEnum',
|
||||
},
|
||||
}
|
||||
OAUTH2_PROVIDER = {}
|
||||
|
||||
# Add a postfix to the API URL patterns
|
||||
@@ -1113,11 +1148,8 @@ OPA_REQUEST_TIMEOUT = 1.5 # The number of seconds after which the connection to
|
||||
OPA_REQUEST_RETRIES = 2 # The number of retry attempts for connecting to the OPA server. Default is 2.
|
||||
|
||||
# feature flags
|
||||
FLAG_SOURCES = ('flags.sources.SettingsFlagsSource',)
|
||||
FLAGS = {
|
||||
'FEATURE_INDIRECT_NODE_COUNTING_ENABLED': [{'condition': 'boolean', 'value': False}],
|
||||
'FEATURE_DISPATCHERD_ENABLED': [{'condition': 'boolean', 'value': False}],
|
||||
}
|
||||
FEATURE_INDIRECT_NODE_COUNTING_ENABLED = False
|
||||
FEATURE_DISPATCHERD_ENABLED = False
|
||||
|
||||
# Dispatcher worker lifetime. If set to None, workers will never be retired
|
||||
# based on age. Note workers will finish their last task before retiring if
|
||||
|
||||
@@ -11,8 +11,6 @@ import socket
|
||||
# /usr/lib64/python/mimetypes.py
|
||||
import mimetypes
|
||||
|
||||
from dynaconf import post_hook
|
||||
|
||||
# awx-manage shell_plus --notebook
|
||||
NOTEBOOK_ARGUMENTS = ['--NotebookApp.token=', '--ip', '0.0.0.0', '--port', '9888', '--allow-root', '--no-browser']
|
||||
|
||||
@@ -41,11 +39,14 @@ PENDO_TRACKING_STATE = "off"
|
||||
INSIGHTS_TRACKING_STATE = False
|
||||
|
||||
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
||||
INSTALLED_APPS = "@merge drf_yasg,debug_toolbar"
|
||||
INSTALLED_APPS = "@merge drf_spectacular,debug_toolbar"
|
||||
MIDDLEWARE = "@insert 0 debug_toolbar.middleware.DebugToolbarMiddleware"
|
||||
|
||||
DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
|
||||
# drf-spectacular settings for API schema generation
|
||||
# SPECTACULAR_SETTINGS moved to defaults.py so it's available in all environments
|
||||
|
||||
# Configure a default UUID for development only.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
@@ -67,11 +68,5 @@ AWX_DISABLE_TASK_MANAGERS = False
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
|
||||
# This modifies FLAGS set by defaults, must be deferred to run later
|
||||
@post_hook
|
||||
def set_dev_flags(settings):
|
||||
defaults_flags = settings.get("FLAGS", {})
|
||||
defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
||||
defaults_flags['FEATURE_DISPATCHERD_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
||||
return {'FLAGS': defaults_flags}
|
||||
FEATURE_INDIRECT_NODE_COUNTING_ENABLED = True
|
||||
FEATURE_DISPATCHERD_ENABLED = True
|
||||
|
||||
1305
docs/docsite/rst/contributor/API_REQUIREMENTS.rst
Normal file
1305
docs/docsite/rst/contributor/API_REQUIREMENTS.rst
Normal file
File diff suppressed because it is too large
Load Diff
725
docs/docsite/rst/contributor/DJANGO_REQUIREMENTS.rst
Normal file
725
docs/docsite/rst/contributor/DJANGO_REQUIREMENTS.rst
Normal file
@@ -0,0 +1,725 @@
|
||||
=====================================================
|
||||
Django Development Requirements
|
||||
=====================================================
|
||||
|
||||
**AWX Codebase Best Practices**
|
||||
|
||||
:Version: 1.0
|
||||
:Date: September 2025
|
||||
:Based on: AWX Enterprise Django Application Analysis
|
||||
:Generated by: Claude Code AI
|
||||
|
||||
----
|
||||
|
||||
.. contents:: Table of Contents
|
||||
:depth: 3
|
||||
:local:
|
||||
|
||||
----
|
||||
|
||||
1. Project Structure
|
||||
====================
|
||||
|
||||
1.1 Modular Application Architecture
|
||||
------------------------------------
|
||||
|
||||
**REQUIRED**: Organize Django project with clear separation of concerns::
|
||||
|
||||
awx/
|
||||
├── __init__.py # Version management and environment detection
|
||||
├── main/ # Core business logic and models
|
||||
├── api/ # REST API layer (Django REST Framework)
|
||||
├── ui/ # Frontend integration
|
||||
├── conf/ # Configuration management
|
||||
├── settings/ # Environment-specific settings
|
||||
├── templates/ # Django templates
|
||||
└── static/ # Static assets
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Each functional area must have its own Django app
|
||||
- Use descriptive app names that reflect business domains
|
||||
- Separate API logic from core business logic
|
||||
|
||||
1.2 Pre-Management Command Code
|
||||
--------------------------------
|
||||
|
||||
This section describes the code that runs before every management command.
|
||||
|
||||
AWX persistent services (i.e. wsrelay, heartbeat, dispatcher) all have management commands as entry points. So if you want to write a new persistent service, make a management command.
|
||||
|
||||
System jobs are implemented as management commands too.
|
||||
|
||||
|
||||
**REQUIRED**: Implement custom Django management integration:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# awx/__init__.py
|
||||
def manage():
|
||||
"""Custom management function with environment preparation"""
|
||||
prepare_env()
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
# Version validation for production
|
||||
if not MODE == 'development':
|
||||
validate_production_requirements()
|
||||
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Environment detection (development/production modes)
|
||||
- Production deployment validation
|
||||
- Custom version checking mechanisms
|
||||
- Database version compatibility checks
|
||||
|
||||
----
|
||||
|
||||
2. Settings Management
|
||||
======================
|
||||
|
||||
2.1 Environment-Based Settings Architecture
|
||||
-------------------------------------------
|
||||
|
||||
**REQUIRED**: Use ``django-split-settings`` for modular configuration::
|
||||
|
||||
# settings/defaults.py - Base configuration
|
||||
# settings/development.py - Development overrides
|
||||
# settings/production.py - Production security settings
|
||||
# settings/testing.py - Test-specific configuration
|
||||
|
||||
**Settings Pattern**:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# development.py
|
||||
from .defaults import *
|
||||
from split_settings.tools import optional, include
|
||||
|
||||
DEBUG = True
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
# Include optional local settings
|
||||
include(optional('local_settings.py'))
|
||||
|
||||
2.2 Sourcing config from files
|
||||
-------------------------------
|
||||
|
||||
**REQUIRED**: Sourcing config from multiple files (in a directory) on disk:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# External settings loading
|
||||
EXTERNAL_SETTINGS = os.environ.get('AWX_SETTINGS_FILE')
|
||||
if EXTERNAL_SETTINGS:
|
||||
include(EXTERNAL_SETTINGS, scope=locals())
|
||||
|
||||
|
||||
3. URL Patterns and Routing
|
||||
============================
|
||||
|
||||
3.1 Modular URL Architecture
|
||||
-----------------------------
|
||||
|
||||
**REQUIRED**: Implement hierarchical URL organization with namespacing:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# urls.py
|
||||
def get_urlpatterns(prefix=None):
|
||||
"""Dynamic URL pattern generation with prefix support"""
|
||||
if not prefix:
|
||||
prefix = '/'
|
||||
else:
|
||||
prefix = f'/{prefix}/'
|
||||
|
||||
return [
|
||||
path(f'api{prefix}', include('awx.api.urls', namespace='api')),
|
||||
path(f'ui{prefix}', include('awx.ui.urls', namespace='ui')),
|
||||
]
|
||||
|
||||
urlpatterns = get_urlpatterns()
|
||||
|
||||
3.2 Environment-Specific URL Inclusion
|
||||
--------------------------------------
|
||||
|
||||
**REQUIRED**: Conditional URL patterns based on environment:
|
||||
|
||||
This example allows the Django debug toolbar to work.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Development-only URLs
|
||||
if settings.DEBUG:
|
||||
try:
|
||||
import debug_toolbar
|
||||
urlpatterns += [path('__debug__/', include(debug_toolbar.urls))]
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
**OPTIONAL**: If you want to include your own debug logic and endpoints:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if MODE == 'development':
|
||||
# Only include these if we are in the development environment
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [
|
||||
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
]
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Use Django's ``include()`` for modular organization
|
||||
- Implement URL namespacing for API versioning
|
||||
- Support dynamic URL prefix configuration
|
||||
- Separate URL patterns by functional area
|
||||
|
||||
----
|
||||
|
||||
4. Model Design
|
||||
===============
|
||||
|
||||
4.1 Abstract Base Models
|
||||
------------------------
|
||||
|
||||
**REQUIRED**: Use abstract base models for common functionality:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# models/base.py
|
||||
class BaseModel(models.Model):
|
||||
"""Common fields and methods for all models"""
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
modified = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class AuditableModel(BaseModel):
|
||||
"""Models requiring audit trail"""
|
||||
created_by = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
4.2 Mixin-Based Architecture
|
||||
----------------------------
|
||||
|
||||
**REQUIRED**: Implement reusable model behaviors through mixins:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# models/mixins.py
|
||||
class ResourceMixin(models.Model):
|
||||
"""Common resource management functionality"""
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class ExecutionEnvironmentMixin(models.Model):
|
||||
"""Execution environment configuration"""
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
4.3 Model Organization
|
||||
----------------------
|
||||
|
||||
**REQUIRED**: Organize models by domain functionality::
|
||||
|
||||
models/
|
||||
├── __init__.py
|
||||
├── base.py # Abstract base models
|
||||
├── mixins.py # Reusable model behaviors
|
||||
├── inventory.py # Inventory-related models
|
||||
├── jobs.py # Job execution models
|
||||
├── credential.py # Credential management
|
||||
└── organization.py # Organization models
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- One file per logical domain until the domain gets too big, create a folder for it instead. In the past, credentials were broken out into logical domains until they were moved out of AWX, then they were collapsed back down to a single file.
|
||||
- Use consistent naming conventions
|
||||
- Implement comprehensive model validation
|
||||
- Custom managers for complex queries
|
||||
|
||||
----
|
||||
|
||||
5. REST API Development
|
||||
=======================
|
||||
|
||||
5.1 Custom Authentication Classes
|
||||
----------------------------------
|
||||
|
||||
The recommended best practice is to log all of the terminal (return) paths of authentication, not just the successful ones.
|
||||
|
||||
**REQUIRED**: Implement domain-specific authentication with logging:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# api/authentication.py
|
||||
class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
"""Basic authentication with request logging"""
|
||||
|
||||
def authenticate(self, request):
|
||||
if not settings.AUTH_BASIC_ENABLED:
|
||||
return
|
||||
|
||||
ret = super().authenticate(request)
|
||||
if ret:
|
||||
username = ret[0].username if ret[0] else '<none>'
|
||||
logger.info(
|
||||
f"User {username} performed {request.method} "
|
||||
f"to {request.path} through the API"
|
||||
)
|
||||
return ret
|
||||
|
||||
5.2 Custom Permission Classes
|
||||
-----------------------------
|
||||
|
||||
**REQUIRED**: Implement comprehensive permission checking:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# api/permissions.py
|
||||
class ModelAccessPermission(permissions.BasePermission):
|
||||
"""Model-based access control with hierarchy support"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if hasattr(view, 'parent_model'):
|
||||
parent_obj = view.get_parent_object()
|
||||
return check_user_access(
|
||||
request.user,
|
||||
view.parent_model,
|
||||
'read',
|
||||
parent_obj
|
||||
)
|
||||
return True
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Multiple authentication methods (JWT, Session, Basic)
|
||||
- Custom pagination, renderers, and metadata classes
|
||||
- Comprehensive API exception handling
|
||||
- Resource-based URL organization
|
||||
- Logging for authentication events
|
||||
|
||||
----
|
||||
|
||||
6. Security Requirements
|
||||
========================
|
||||
|
||||
6.1 Production Security Settings
|
||||
--------------------------------
|
||||
|
||||
**REQUIRED**: Enforce secure defaults for production:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# settings/production.py
|
||||
DEBUG = False
|
||||
SECRET_KEY = None # Force explicit configuration
|
||||
ALLOWED_HOSTS = [] # Must be explicitly set
|
||||
|
||||
# Session security
|
||||
SESSION_COOKIE_SECURE = True
|
||||
SESSION_COOKIE_HTTPONLY = True
|
||||
SESSION_COOKIE_SAMESITE = 'Lax'
|
||||
SESSION_COOKIE_AGE = 1800
|
||||
|
||||
# CSRF protection
|
||||
CSRF_COOKIE_SECURE = True
|
||||
CSRF_COOKIE_HTTPONLY = True
|
||||
CSRF_TRUSTED_ORIGINS = []
|
||||
|
||||
6.2 Django SECRET_KEY loading
|
||||
------------------------------
|
||||
|
||||
**REQUIRED**: Implement Django SECRET_KEY loading:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Secret key from external file
|
||||
SECRET_KEY_FILE = os.environ.get('SECRET_KEY_FILE', '/etc/awx/SECRET_KEY')
|
||||
if os.path.exists(SECRET_KEY_FILE):
|
||||
with open(SECRET_KEY_FILE, 'rb') as f:
|
||||
SECRET_KEY = f.read().strip().decode()
|
||||
else:
|
||||
if not DEBUG:
|
||||
raise ImproperlyConfigured("SECRET_KEY must be configured in production")
|
||||
|
||||
For more detail, refer to the `Django documentation <https://docs.djangoproject.com/en/5.2/ref/settings/#secret-key>`_.
|
||||
|
||||
6.3 Proxy and Network Security
|
||||
------------------------------
|
||||
|
||||
**REQUIRED**: Configure reverse proxy security:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Proxy configuration
|
||||
REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
PROXY_IP_ALLOWED_LIST = []
|
||||
USE_X_FORWARDED_HOST = True
|
||||
USE_X_FORWARDED_PORT = True
|
||||
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- External secret file management
|
||||
- Secure cookie configuration
|
||||
- CSRF protection with trusted origins
|
||||
- Proxy header validation
|
||||
- Force HTTPS in production
|
||||
|
||||
----
|
||||
|
||||
7. Database Management
|
||||
======================
|
||||
|
||||
7.1 Advanced Database Configuration
|
||||
-----------------------------------
|
||||
|
||||
**REQUIRED**: Robust database connections for production:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Database configuration with connection tuning
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': os.environ.get('DATABASE_NAME', 'awx'),
|
||||
'ATOMIC_REQUESTS': True,
|
||||
'CONN_MAX_AGE': 0,
|
||||
'OPTIONS': {
|
||||
'keepalives': 1,
|
||||
'keepalives_idle': 5,
|
||||
'keepalives_interval': 5,
|
||||
'keepalives_count': 5,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
7.2 Database Version Validation
|
||||
-------------------------------
|
||||
|
||||
**REQUIRED**: Implement database compatibility checking:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# PostgreSQL version enforcement
|
||||
def validate_database_version():
|
||||
from django.db import connection
|
||||
if (connection.pg_version // 10000) < 12:
|
||||
raise ImproperlyConfigured(
|
||||
"PostgreSQL version 12 or higher is required"
|
||||
)
|
||||
|
||||
7.3 Migration Management
|
||||
------------------------
|
||||
|
||||
**REQUIRED**: Structured migration organization
|
||||
|
||||
::
|
||||
|
||||
migrations/
|
||||
├── 0001_initial.py
|
||||
├── 0002_squashed_v300_release.py
|
||||
├── 0003_squashed_v300_v303_updates.py
|
||||
└── _migration_utils.py
|
||||
|
||||
**Requirements**:
|
||||
It is best practice to not to re-write migrations. If possible, include a reverse migration, especially for data migrations to make testing easier.
|
||||
|
||||
----
|
||||
|
||||
8. Testing Standards
|
||||
====================
|
||||
|
||||
8.1 Pytest Configuration
|
||||
-------------------------
|
||||
|
||||
**REQUIRED**: Comprehensive test setup with optimization:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
# pytest.ini
|
||||
[pytest]
|
||||
DJANGO_SETTINGS_MODULE = awx.main.tests.settings_for_test
|
||||
python_files = *.py
|
||||
addopts = --reuse-db --nomigrations --tb=native
|
||||
markers =
|
||||
ac: access control test
|
||||
survey: tests related to survey feature
|
||||
inventory_import: tests of code used by inventory import command
|
||||
integration: integration tests requiring external services
|
||||
|
||||
8.2 Test Settings Module
|
||||
-------------------------
|
||||
|
||||
**REQUIRED**: Dedicated test configuration:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# settings/testing.py
|
||||
from .defaults import *
|
||||
|
||||
# Fast test database
|
||||
DATABASES['default']['ENGINE'] = 'django.db.backends.sqlite3'
|
||||
DATABASES['default']['NAME'] = ':memory:'
|
||||
|
||||
# Disable migrations for speed
|
||||
class DisableMigrations:
|
||||
def __contains__(self, item):
|
||||
return True
|
||||
def __getitem__(self, item):
|
||||
return None
|
||||
|
||||
MIGRATION_MODULES = DisableMigrations()
|
||||
|
||||
8.3 Coverage Requirements
|
||||
-------------------------
|
||||
|
||||
**REQUIRED**: Enforce comprehensive test coverage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Coverage targets
|
||||
COVERAGE_TARGETS = {
|
||||
'project_overall': 75,
|
||||
'library_code': 75,
|
||||
'test_code': 95,
|
||||
'new_patches': 100,
|
||||
'type_checking': 100,
|
||||
}
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Database reuse for faster execution
|
||||
- Skip migrations in tests
|
||||
- Custom test markers for categorization
|
||||
- Dedicated test settings module
|
||||
- Comprehensive warning filters
|
||||
|
||||
----
|
||||
|
||||
9. Application Configuration
|
||||
=============================
|
||||
|
||||
9.1 Advanced AppConfig Implementation
|
||||
--------------------------------------
|
||||
|
||||
**REQUIRED**: Custom application configuration with initialization:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# apps.py
|
||||
class MainConfig(AppConfig):
|
||||
name = 'awx.main'
|
||||
verbose_name = _('Main')
|
||||
default_auto_field = 'django.db.models.AutoField'
|
||||
|
||||
def ready(self):
|
||||
super().ready()
|
||||
|
||||
# Feature loading with environment checks
|
||||
if not os.environ.get('AWX_SKIP_FEATURES', None):
|
||||
self.load_credential_types()
|
||||
self.load_inventory_plugins()
|
||||
self.load_named_urls()
|
||||
|
||||
# Signal registration
|
||||
self.register_signals()
|
||||
|
||||
def load_credential_types(self):
|
||||
"""Load credential type definitions"""
|
||||
pass
|
||||
|
||||
def register_signals(self):
|
||||
"""Register Django signals"""
|
||||
pass
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Custom AppConfig for complex initialization
|
||||
- Feature loading in ``ready()`` method
|
||||
- Environment-based feature toggling
|
||||
- Plugin system integration
|
||||
- Signal registration
|
||||
|
||||
----
|
||||
|
||||
10. Middleware Implementation
|
||||
=============================
|
||||
|
||||
10.1 Custom Middleware for Enterprise Features
|
||||
----------------------------------------------
|
||||
|
||||
**REQUIRED**: Implement domain-specific middleware:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# middleware.py
|
||||
class SettingsCacheMiddleware(MiddlewareMixin):
|
||||
"""Clear settings cache on each request"""
|
||||
|
||||
def process_request(self, request):
|
||||
from django.conf import settings
|
||||
if hasattr(settings, '_awx_conf_memoizedcache'):
|
||||
settings._awx_conf_memoizedcache.clear()
|
||||
|
||||
class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
"""Request timing and performance monitoring"""
|
||||
|
||||
def process_request(self, request):
|
||||
self.start_time = time.time()
|
||||
|
||||
def process_response(self, request, response):
|
||||
if hasattr(self, 'start_time'):
|
||||
duration = time.time() - self.start_time
|
||||
response['X-Response-Time'] = f"{duration:.3f}s"
|
||||
return response
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Settings cache management middleware
|
||||
- Performance monitoring middleware
|
||||
- Thread-local storage for request data
|
||||
- Conditional middleware activation
|
||||
|
||||
----
|
||||
|
||||
11. Deployment Patterns
|
||||
========================
|
||||
|
||||
11.1 Production-Ready ASGI/WSGI Configuration
|
||||
---------------------------------------------
|
||||
|
||||
**REQUIRED**: Proper application server setup:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# asgi.py
|
||||
import os
|
||||
import django
|
||||
from channels.routing import get_default_application
|
||||
from awx import prepare_env
|
||||
|
||||
prepare_env()
|
||||
django.setup()
|
||||
|
||||
application = get_default_application()
|
||||
|
||||
# wsgi.py
|
||||
import os
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
from awx import prepare_env
|
||||
|
||||
prepare_env()
|
||||
application = get_wsgi_application()
|
||||
|
||||
----
|
||||
|
||||
Compliance Checklist
|
||||
=====================
|
||||
|
||||
Development Standards
|
||||
---------------------
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
:widths: 50 10
|
||||
|
||||
* - Requirement
|
||||
- Status
|
||||
* - Modular app architecture implemented
|
||||
- ☐
|
||||
* - Environment-based settings configured
|
||||
- ☐
|
||||
* - Custom authentication and permissions
|
||||
- ☐
|
||||
* - Comprehensive test coverage (>75%)
|
||||
- ☐
|
||||
* - Security settings enforced
|
||||
- ☐
|
||||
* - Database optimization configured
|
||||
- ☐
|
||||
* - Static files properly organized
|
||||
- ☐
|
||||
* - Custom middleware implemented
|
||||
- ☐
|
||||
|
||||
Production Readiness
|
||||
--------------------
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
:widths: 50 10
|
||||
|
||||
* - Requirement
|
||||
- Status
|
||||
* - External secret management
|
||||
- ☐
|
||||
* - Database version validation
|
||||
- ☐
|
||||
* - Version deployment verification
|
||||
- ☐
|
||||
* - Performance monitoring
|
||||
- ☐
|
||||
* - Security headers configured
|
||||
- ☐
|
||||
* - HTTPS enforcement
|
||||
- ☐
|
||||
* - Proper logging setup
|
||||
- ☐
|
||||
* - Error handling and monitoring
|
||||
- ☐
|
||||
|
||||
Code Quality
|
||||
------------
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
:widths: 50 10
|
||||
|
||||
* - Requirement
|
||||
- Status
|
||||
* - Abstract base models used
|
||||
- ☐
|
||||
* - Mixin-based architecture
|
||||
- ☐
|
||||
* - Custom management commands
|
||||
- ☐
|
||||
* - Plugin system support
|
||||
- ☐
|
||||
* - Signal registration
|
||||
- ☐
|
||||
* - Migration organization
|
||||
- ☐
|
||||
* - API documentation
|
||||
- ☐
|
||||
* - Type hints and validation
|
||||
- ☐
|
||||
|
||||
----
|
||||
|
||||
References
|
||||
==========
|
||||
|
||||
- **Django Documentation**: https://docs.djangoproject.com/
|
||||
- **Django REST Framework**: https://www.django-rest-framework.org/
|
||||
- **Django Split Settings**: https://github.com/sobolevn/django-split-settings
|
||||
- **AWX Source Code**: https://github.com/ansible/awx
|
||||
|
||||
----
|
||||
|
||||
| **Document Maintainer**: Development Team
|
||||
| **Last Updated**: September 2025
|
||||
| **Review Schedule**: Quarterly
|
||||
@@ -15,6 +15,8 @@ Ansible AWX helps teams manage complex multi-tier deployments by adding control,
|
||||
:caption: Community
|
||||
|
||||
contributor/index
|
||||
contributor/DJANGO_REQUIREMENTS
|
||||
contributor/API_REQUIREMENTS
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Laurent LAPORTE
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
30
licenses/drf-spectacular.txt
Normal file
30
licenses/drf-spectacular.txt
Normal file
@@ -0,0 +1,30 @@
|
||||
Copyright © 2011-present, Encode OSS Ltd.
|
||||
Copyright © 2019-2021, T. Franzel <tfranzel@gmail.com>, Cashlink Technologies GmbH.
|
||||
Copyright © 2021-present, T. Franzel <tfranzel@gmail.com>.
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
Binary file not shown.
@@ -1,165 +0,0 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates
|
||||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License,
|
||||
other than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to
|
||||
ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
|
||||
b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from
|
||||
a header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the
|
||||
Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that,
|
||||
taken together, effectively do not restrict modification of the
|
||||
portions of the Library contained in the Combined Work and reverse
|
||||
engineering for debugging such modifications, if you also do each of
|
||||
the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
|
||||
d) Do one of the following:
|
||||
|
||||
0) Convey the Minimal Corresponding Source under the terms of this
|
||||
License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
|
||||
1) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (a) uses at run time
|
||||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the
|
||||
Application with a modified version of the Linked Version. (If
|
||||
you use option 4d0, the Installation Information must accompany
|
||||
the Minimal Corresponding Source and Corresponding Application
|
||||
Code. If you use option 4d1, you must provide the Installation
|
||||
Information in the manner specified by section 6 of the GNU GPL
|
||||
for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the
|
||||
Library side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities,
|
||||
conveyed under the terms of this License.
|
||||
|
||||
b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Library as you received it specifies that a certain numbered version
|
||||
of the GNU Lesser General Public License "or any later version"
|
||||
applies to it, you have the option of following the terms and
|
||||
conditions either of that published version or of any later version
|
||||
published by the Free Software Foundation. If the Library as you
|
||||
received it does not specify a version number of the GNU Lesser
|
||||
General Public License, you may choose any version of the GNU Lesser
|
||||
General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
||||
@@ -1,11 +0,0 @@
|
||||
Copyright 2022 Rick van Hattem
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
BIN
licenses/psycopg-3.2.10.tar.gz
Normal file
BIN
licenses/psycopg-3.2.10.tar.gz
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
licenses/pygithub-2.8.1.tar.gz
Normal file
BIN
licenses/pygithub-2.8.1.tar.gz
Normal file
Binary file not shown.
23
licenses/uritemplate.txt
Normal file
23
licenses/uritemplate.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
Binary file not shown.
BIN
licenses/uwsgi-2.0.30.tar.gz
Normal file
BIN
licenses/uwsgi-2.0.30.tar.gz
Normal file
Binary file not shown.
@@ -1,26 +1,27 @@
|
||||
Copyright (c) 2013, Massimiliano Pippi, Federico Frenguelli and contributors
|
||||
Copyright (c) 2016, Gregory Szorc
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
The views and conclusions contained in the software and documentation are those
|
||||
of the authors and should not be interpreted as representing official policies,
|
||||
either expressed or implied, of the FreeBSD Project.
|
||||
10
pytest.ini
10
pytest.ini
@@ -23,7 +23,8 @@ filterwarnings =
|
||||
|
||||
# NOTE: the following are present using python 3.11
|
||||
# FIXME: Set `USE_TZ` to `True`.
|
||||
once:The default value of USE_TZ will change from False to True in Django 5.0. Set USE_TZ to False in your project settings if you want to keep the current default behavior.:django.utils.deprecation.RemovedInDjango50Warning:django.conf
|
||||
# Note: RemovedInDjango50Warning may not exist in newer Django versions
|
||||
ignore:The default value of USE_TZ will change from False to True in Django 5.0. Set USE_TZ to False in your project settings if you want to keep the current default behavior.
|
||||
|
||||
# FIXME: Delete this entry once `pyparsing` is updated.
|
||||
once:module 'sre_constants' is deprecated:DeprecationWarning:_pytest.assertion.rewrite
|
||||
@@ -46,11 +47,12 @@ filterwarnings =
|
||||
once:DateTimeField User.date_joined received a naive datetime .2020-01-01 00.00.00. while time zone support is active.:RuntimeWarning:django.db.models.fields
|
||||
|
||||
# FIXME: Delete this entry once the deprecation is acted upon.
|
||||
once:'index_together' is deprecated. Use 'Meta.indexes' in 'main.\w+' instead.:django.utils.deprecation.RemovedInDjango51Warning:django.db.models.options
|
||||
# Note: RemovedInDjango51Warning may not exist in newer Django versions
|
||||
ignore:'index_together' is deprecated. Use 'Meta.indexes' in 'main.\w+' instead.
|
||||
|
||||
# FIXME: Update `awx.main.migrations._dab_rbac` and delete this entry.
|
||||
# once:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.:django.utils.deprecation.RemovedInDjango50Warning:django.db.models.query
|
||||
once:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.:django.utils.deprecation.RemovedInDjango50Warning:awx.main.migrations._dab_rbac
|
||||
# Note: RemovedInDjango50Warning may not exist in newer Django versions
|
||||
ignore:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.
|
||||
|
||||
# FIXME: Delete this entry once the **broken** always-true assertions in the
|
||||
# FIXME: following tests are fixed:
|
||||
|
||||
@@ -49,29 +49,19 @@ Make sure to delete the old tarball if it is an upgrade.
|
||||
Anything pinned in `*.in` files involves additional manual work in
|
||||
order to upgrade. Some information related to that work is outlined here.
|
||||
|
||||
### django-oauth-toolkit
|
||||
### pip, setuptools and setuptools_scm, wheel, cython
|
||||
|
||||
Versions later than 1.4.1 throw an error about id_token_id, due to the
|
||||
OpenID Connect work that was done in
|
||||
https://github.com/jazzband/django-oauth-toolkit/pull/915. This may
|
||||
be fixable by creating a migration on our end?
|
||||
|
||||
### pip, setuptools and setuptools_scm
|
||||
|
||||
If modifying these libraries make sure testing with the offline build is performed to confirm they are functionally working.
|
||||
Versions need to match the versions used in the pip bootstrapping step
|
||||
in the top-level Makefile.
|
||||
If modifying these libraries make sure testing with the offline build is performed to confirm
|
||||
they are functionally working. Versions need to match the versions used in the pip bootstrapping
|
||||
step in the top-level Makefile.
|
||||
|
||||
Verify ansible-runner's build dependency doesn't conflict with the changes made.
|
||||
|
||||
### cryptography
|
||||
|
||||
If modifying this library make sure testing with the offline build is performed to confirm it is functionally working.
|
||||
|
||||
## Library Notes
|
||||
|
||||
### pexpect
|
||||
|
||||
Version 4.8 makes us a little bit nervous with changes to `searchwindowsize` https://github.com/pexpect/pexpect/pull/579/files
|
||||
Pin to `pexpect==4.7.x` until we have more time to move to `4.8` and test.
|
||||
### urllib3 and OPA-python-client
|
||||
There are incompatible version dependancies for urllib3 between OPA-python-client and kubernetes.
|
||||
OPA-python-client v2.0.3+ requires urllib3 v2.5.0+ and kubernetes v34.1.0 caps it at v.2.4.0.
|
||||
|
||||
## djangorestframework
|
||||
Upgrading to 3.16.1 introduced errors on the tests around CredentialInputSource. We have several
|
||||
fields on that model set to default=null but in the serializer they're set to required: true which causes
|
||||
a conflict.
|
||||
@@ -9,21 +9,21 @@ boto3
|
||||
botocore
|
||||
channels
|
||||
channels-redis
|
||||
cryptography<42.0.0 # investigation is needed for 42+ to work with OpenSSL v3.0.x (RHEL 9.4) and v3.2.x (RHEL 9.5)
|
||||
cryptography
|
||||
Cython
|
||||
daphne
|
||||
distro
|
||||
django==4.2.21 # CVE-2025-32873
|
||||
django==4.2.26 # CVE-2025-32873
|
||||
django-cors-headers
|
||||
django-crum
|
||||
django-extensions
|
||||
django-guid
|
||||
django-oauth-toolkit<2.0.0 # Version 2.0.0 has breaking changes that will need to be worked out before upgrading
|
||||
django-polymorphic
|
||||
django-solo
|
||||
djangorestframework>=3.15.0
|
||||
djangorestframework==3.15.2 # upgrading to 3.16+ throws NOT_REQUIRED_DEFAULT error on required fields in serializer that have no default
|
||||
djangorestframework-yaml
|
||||
dynaconf<4
|
||||
drf-spectacular>=0.27.0
|
||||
dynaconf
|
||||
filelock
|
||||
GitPython>=3.1.37 # CVE-2023-41040
|
||||
grpcio
|
||||
@@ -35,20 +35,20 @@ Markdown # used for formatting API help
|
||||
maturin # pydantic-core build dep
|
||||
msgpack
|
||||
msrestazure
|
||||
OPA-python-client==2.0.2 # Code contain monkey patch targeted to 2.0.2 to fix https://github.com/Turall/OPA-python-client/issues/29
|
||||
OPA-python-client==2.0.2 # upgrading requires urllib3 2.5.0+ which is blocked by other deps
|
||||
openshift
|
||||
opentelemetry-api~=1.24 # new y streams can be drastically different, in a good way
|
||||
opentelemetry-sdk~=1.24
|
||||
opentelemetry-api~=1.37 # new y streams can be drastically different, in a good way
|
||||
opentelemetry-sdk~=1.37
|
||||
opentelemetry-instrumentation-logging
|
||||
opentelemetry-exporter-otlp
|
||||
pexpect==4.7.0 # see library notes
|
||||
pexpect
|
||||
prometheus_client
|
||||
psycopg
|
||||
psutil
|
||||
pygerduty
|
||||
PyGithub <= 2.6.0
|
||||
pyopenssl>=23.2.0 # resolve dep conflict from cryptography pin above
|
||||
pyparsing==2.4.6 # Upgrading to v3 of pyparsing introduce errors on smart host filtering: Expected 'or' term, found 'or' (at char 15), (line:1, col:16)
|
||||
PyGithub
|
||||
pyopenssl
|
||||
pyparsing==2.4.7 # Upgrading to v3 of pyparsing introduce errors on smart host filtering: Expected 'or' term, found 'or' (at char 15), (line:1, col:16)
|
||||
python-daemon
|
||||
python-dsv-sdk>=1.0.4
|
||||
python-tss-sdk>=1.2.1
|
||||
@@ -61,13 +61,13 @@ requests
|
||||
slack-sdk
|
||||
twilio
|
||||
twisted[tls]>=24.7.0 # CVE-2024-41810
|
||||
urllib3>=1.26.19 # CVE-2024-37891
|
||||
urllib3<2.4.0, >=1.26.19 # CVE-2024-37891. capped by kubernetes 34.1.0 reqs
|
||||
uWSGI>=2.0.28
|
||||
uwsgitop
|
||||
wheel>=0.38.1 # CVE-2022-40898
|
||||
pip==21.2.4 # see UPGRADE BLOCKERs
|
||||
setuptools==80.9.0 # see UPGRADE BLOCKERs
|
||||
setuptools_scm[toml] # see UPGRADE BLOCKERs, xmlsec build dep
|
||||
setuptools_scm[toml]
|
||||
setuptools-rust>=0.11.4 # cryptography build dep
|
||||
pkgconfig>=1.5.1 # xmlsec build dep - needed for offline build
|
||||
django-flags>=5.0.13
|
||||
|
||||
@@ -1,38 +1,35 @@
|
||||
adal==1.2.7
|
||||
# via msrestazure
|
||||
aiodns==3.2.0
|
||||
aiodns==3.5.0
|
||||
# via aiohttp
|
||||
aiofiles==24.1.0
|
||||
# via opa-python-client
|
||||
aiohappyeyeballs==2.4.4
|
||||
aiohappyeyeballs==2.6.1
|
||||
# via aiohttp
|
||||
aiohttp[speedups]==3.11.11
|
||||
aiohttp[speedups]==3.13.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# aiohttp-retry
|
||||
# opa-python-client
|
||||
# twilio
|
||||
aiohttp-retry==2.8.3
|
||||
aiohttp-retry==2.9.1
|
||||
# via twilio
|
||||
aiosignal==1.3.2
|
||||
aiosignal==1.4.0
|
||||
# via aiohttp
|
||||
ansi2html==1.9.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# git+https://github.com/ansible/ansible-runner.git@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
# via -r requirements.in
|
||||
asciichartpy==1.5.25
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
asgiref==3.8.1
|
||||
# via -r requirements.in
|
||||
asgiref==3.11.0
|
||||
# via
|
||||
# channels
|
||||
# channels-redis
|
||||
# daphne
|
||||
# django
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
asn1==2.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
attrs==24.3.0
|
||||
asn1==3.1.0
|
||||
# via -r requirements.in
|
||||
attrs==25.4.0
|
||||
# via
|
||||
# aiohttp
|
||||
# jsonschema
|
||||
@@ -43,166 +40,139 @@ autobahn==24.4.2
|
||||
# via daphne
|
||||
autocommand==2.2.2
|
||||
# via jaraco-text
|
||||
automat==24.8.1
|
||||
automat==25.4.16
|
||||
# via twisted
|
||||
# awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
awx-plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# awx-plugins-core
|
||||
azure-core==1.32.0
|
||||
azure-core==1.35.1
|
||||
# via
|
||||
# azure-identity
|
||||
# azure-keyvault-certificates
|
||||
# azure-keyvault-keys
|
||||
# azure-keyvault-secrets
|
||||
# msrest
|
||||
azure-identity==1.19.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
azure-identity==1.25.1
|
||||
# via -r requirements.in
|
||||
azure-keyvault==4.2.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
azure-keyvault-certificates==4.9.0
|
||||
# via -r requirements.in
|
||||
azure-keyvault-certificates==4.10.0
|
||||
# via azure-keyvault
|
||||
azure-keyvault-keys==4.10.0
|
||||
azure-keyvault-keys==4.11.0
|
||||
# via azure-keyvault
|
||||
azure-keyvault-secrets==4.9.0
|
||||
azure-keyvault-secrets==4.10.0
|
||||
# via azure-keyvault
|
||||
backports-tarfile==1.2.0
|
||||
# via jaraco-context
|
||||
boto3==1.35.96
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
botocore==1.35.96
|
||||
boto3==1.41.3
|
||||
# via -r requirements.in
|
||||
botocore==1.41.3
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# boto3
|
||||
# s3transfer
|
||||
brotli==1.1.0
|
||||
# via aiohttp
|
||||
cachetools==5.5.0
|
||||
cachetools==6.2.0
|
||||
# via google-auth
|
||||
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
|
||||
certifi==2025.11.12
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# kubernetes
|
||||
# msrest
|
||||
# requests
|
||||
cffi==1.17.1
|
||||
cffi==2.0.0
|
||||
# via
|
||||
# cryptography
|
||||
# pycares
|
||||
# pynacl
|
||||
channels==4.2.0
|
||||
channels==4.3.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels-redis
|
||||
channels-redis==4.2.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
charset-normalizer==3.4.1
|
||||
channels-redis==4.3.0
|
||||
# via -r requirements.in
|
||||
charset-normalizer==3.4.3
|
||||
# via requests
|
||||
click==8.1.8
|
||||
# via receptorctl
|
||||
constantly==23.10.4
|
||||
# via twisted
|
||||
cryptography==41.0.7
|
||||
cryptography==46.0.3
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# adal
|
||||
# autobahn
|
||||
# azure-identity
|
||||
# azure-keyvault-keys
|
||||
# django-ansible-base
|
||||
# jwcrypto
|
||||
# msal
|
||||
# pyjwt
|
||||
# pyopenssl
|
||||
# service-identity
|
||||
cython==3.1.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
daphne==4.1.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
deprecated==1.2.15
|
||||
# via
|
||||
# opentelemetry-api
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-semantic-conventions
|
||||
# pygithub
|
||||
# via -r requirements.in
|
||||
daphne==4.2.1
|
||||
# via -r requirements.in
|
||||
dispatcherd==2025.5.21
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
distro==1.9.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django==4.2.21
|
||||
# via -r requirements.in
|
||||
django==4.2.26
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
# django-crum
|
||||
# django-extensions
|
||||
# django-flags
|
||||
# django-guid
|
||||
# django-oauth-toolkit
|
||||
# django-polymorphic
|
||||
# django-solo
|
||||
# djangorestframework
|
||||
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
django-cors-headers==4.6.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# drf-spectacular
|
||||
django-cors-headers==4.9.0
|
||||
# via -r requirements.in
|
||||
django-crum==0.7.9
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-extensions==3.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-flags==5.0.13
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-guid==3.5.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-oauth-toolkit==1.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-polymorphic==3.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
django-extensions==4.1
|
||||
# via -r requirements.in
|
||||
django-flags==5.1.0
|
||||
# via -r requirements.in
|
||||
django-guid==3.5.2
|
||||
# via -r requirements.in
|
||||
django-polymorphic==4.1.0
|
||||
# via -r requirements.in
|
||||
django-solo==2.4.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
djangorestframework==3.15.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
# -r requirements.in
|
||||
# drf-spectacular
|
||||
djangorestframework-yaml==2.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
durationpy==0.9
|
||||
# via -r requirements.in
|
||||
drf-spectacular==0.29.0
|
||||
# via -r requirements.in
|
||||
durationpy==0.10
|
||||
# via kubernetes
|
||||
dynaconf==3.2.10
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
dynaconf==3.2.12
|
||||
# via -r requirements.in
|
||||
enum-compat==0.0.3
|
||||
# via asn1
|
||||
filelock==3.16.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
frozenlist==1.5.0
|
||||
filelock==3.19.1
|
||||
# via -r requirements.in
|
||||
frozenlist==1.8.0
|
||||
# via
|
||||
# aiohttp
|
||||
# aiosignal
|
||||
gitdb==4.0.12
|
||||
# via gitpython
|
||||
gitpython==3.1.44
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
google-auth==2.37.0
|
||||
gitpython==3.1.45
|
||||
# via -r requirements.in
|
||||
google-auth==2.41.1
|
||||
# via kubernetes
|
||||
googleapis-common-protos==1.66.0
|
||||
googleapis-common-protos==1.70.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
grpcio==1.69.0
|
||||
grpcio==1.75.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
hiredis==3.1.0
|
||||
hiredis==3.2.1
|
||||
# via redis
|
||||
hyperlink==21.0.0
|
||||
# via
|
||||
@@ -210,37 +180,35 @@ hyperlink==21.0.0
|
||||
# twisted
|
||||
idna==3.10
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# hyperlink
|
||||
# requests
|
||||
# twisted
|
||||
# yarl
|
||||
importlib-metadata==8.5.0
|
||||
importlib-metadata==8.7.0
|
||||
# via opentelemetry-api
|
||||
importlib-resources==6.5.2
|
||||
# via irc
|
||||
incremental==24.7.2
|
||||
# via twisted
|
||||
inflection==0.5.1
|
||||
# via django-ansible-base
|
||||
# via drf-spectacular
|
||||
irc==20.5.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
isodate==0.7.2
|
||||
# via
|
||||
# azure-keyvault-certificates
|
||||
# azure-keyvault-keys
|
||||
# azure-keyvault-secrets
|
||||
# msrest
|
||||
jaraco-collections==5.1.0
|
||||
jaraco-collections==5.2.1
|
||||
# via irc
|
||||
jaraco-context==6.0.1
|
||||
# via jaraco-text
|
||||
jaraco-functools==4.1.0
|
||||
jaraco-functools==4.3.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-text
|
||||
# tempora
|
||||
jaraco-logging==3.3.0
|
||||
jaraco-logging==3.4.0
|
||||
# via irc
|
||||
jaraco-stream==3.0.4
|
||||
# via irc
|
||||
@@ -248,129 +216,125 @@ jaraco-text==4.0.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-collections
|
||||
jinja2==3.1.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jinja2==3.1.6
|
||||
# via -r requirements.in
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
jq==1.8.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
json-log-formatter==1.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema==4.23.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema-specifications==2024.10.1
|
||||
jq==1.10.0
|
||||
# via -r requirements.in
|
||||
json-log-formatter==1.1.1
|
||||
# via -r requirements.in
|
||||
jsonschema==4.25.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# drf-spectacular
|
||||
jsonschema-specifications==2025.9.1
|
||||
# via jsonschema
|
||||
jwcrypto==1.5.6
|
||||
# via django-oauth-toolkit
|
||||
kubernetes==31.0.0
|
||||
kubernetes==34.1.0
|
||||
# via openshift
|
||||
lockfile==0.12.2
|
||||
# via python-daemon
|
||||
markdown==3.7
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
markupsafe==3.0.2
|
||||
markdown==3.9
|
||||
# via -r requirements.in
|
||||
markupsafe==3.0.3
|
||||
# via jinja2
|
||||
maturin==1.8.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
more-itertools==10.5.0
|
||||
maturin==1.9.6
|
||||
# via -r requirements.in
|
||||
more-itertools==10.8.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-functools
|
||||
# jaraco-stream
|
||||
# jaraco-text
|
||||
msal==1.31.1
|
||||
msal==1.34.0
|
||||
# via
|
||||
# azure-identity
|
||||
# msal-extensions
|
||||
msal-extensions==1.2.0
|
||||
msal-extensions==1.3.1
|
||||
# via azure-identity
|
||||
msgpack==1.1.0
|
||||
msgpack==1.1.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels-redis
|
||||
msrest==0.7.1
|
||||
# via msrestazure
|
||||
msrestazure==0.6.4.post1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
multidict==6.1.0
|
||||
# via -r requirements.in
|
||||
multidict==6.7.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
oauthlib==3.2.2
|
||||
# via
|
||||
# django-oauth-toolkit
|
||||
# kubernetes
|
||||
# requests-oauthlib
|
||||
oauthlib==3.3.1
|
||||
# via requests-oauthlib
|
||||
opa-python-client==2.0.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
openshift==0.13.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-api==1.29.0
|
||||
# via -r requirements.in
|
||||
opentelemetry-api==1.37.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-instrumentation
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-sdk
|
||||
# opentelemetry-semantic-conventions
|
||||
opentelemetry-exporter-otlp==1.29.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-exporter-otlp-proto-common==1.29.0
|
||||
opentelemetry-exporter-otlp==1.37.0
|
||||
# via -r requirements.in
|
||||
opentelemetry-exporter-otlp-proto-common==1.37.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-exporter-otlp-proto-grpc==1.29.0
|
||||
opentelemetry-exporter-otlp-proto-grpc==1.37.0
|
||||
# via opentelemetry-exporter-otlp
|
||||
opentelemetry-exporter-otlp-proto-http==1.29.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.37.0
|
||||
# via opentelemetry-exporter-otlp
|
||||
opentelemetry-instrumentation==0.50b0
|
||||
opentelemetry-instrumentation==0.58b0
|
||||
# via opentelemetry-instrumentation-logging
|
||||
opentelemetry-instrumentation-logging==0.50b0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-proto==1.29.0
|
||||
opentelemetry-instrumentation-logging==0.58b0
|
||||
# via -r requirements.in
|
||||
opentelemetry-proto==1.37.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-common
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-sdk==1.29.0
|
||||
opentelemetry-sdk==1.37.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-semantic-conventions==0.50b0
|
||||
opentelemetry-semantic-conventions==0.58b0
|
||||
# via
|
||||
# opentelemetry-instrumentation
|
||||
# opentelemetry-sdk
|
||||
packaging==24.2
|
||||
packaging==25.0
|
||||
# via
|
||||
# ansible-runner
|
||||
# django-guid
|
||||
# opentelemetry-instrumentation
|
||||
# setuptools-scm
|
||||
pexpect==4.7.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
pbr==7.0.1
|
||||
# via -r requirements.in
|
||||
pexpect==4.9.0
|
||||
# via -r requirements.in
|
||||
pkgconfig==1.5.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
portalocker==2.10.1
|
||||
# via msal-extensions
|
||||
prometheus-client==0.21.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
propcache==0.2.1
|
||||
# via -r requirements.in
|
||||
prometheus-client==0.23.1
|
||||
# via -r requirements.in
|
||||
propcache==0.4.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
protobuf==5.29.3
|
||||
protobuf==6.32.1
|
||||
# via
|
||||
# -r requirements.in
|
||||
# googleapis-common-protos
|
||||
# opentelemetry-proto
|
||||
psutil==6.1.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psycopg==3.2.6
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psutil==7.1.0
|
||||
# via -r requirements.in
|
||||
psycopg==3.2.10
|
||||
# via -r requirements.in
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
pyasn1==0.6.1
|
||||
@@ -378,37 +342,34 @@ pyasn1==0.6.1
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
# service-identity
|
||||
pyasn1-modules==0.4.1
|
||||
pyasn1-modules==0.4.2
|
||||
# via
|
||||
# google-auth
|
||||
# service-identity
|
||||
pycares==4.5.0
|
||||
pycares==4.11.0
|
||||
# via aiodns
|
||||
pycparser==2.22
|
||||
pycparser==2.23
|
||||
# via cffi
|
||||
pygerduty==0.38.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pygithub==2.6.1
|
||||
# via awx-plugins-core
|
||||
# via -r requirements.in
|
||||
pygithub==2.8.1
|
||||
# via -r requirements.in
|
||||
pyjwt[crypto]==2.10.1
|
||||
# via
|
||||
# adal
|
||||
# django-ansible-base
|
||||
# msal
|
||||
# pygithub
|
||||
# twilio
|
||||
pynacl==1.5.0
|
||||
pynacl==1.6.0
|
||||
# via pygithub
|
||||
pyopenssl==24.3.0
|
||||
pyopenssl==25.3.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# twisted
|
||||
pyparsing==2.4.6
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pyparsing==2.4.7
|
||||
# via -r requirements.in
|
||||
python-daemon==3.1.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
# via -r requirements.in
|
||||
python-dateutil==2.9.0.post0
|
||||
# via
|
||||
# adal
|
||||
@@ -417,40 +378,38 @@ python-dateutil==2.9.0.post0
|
||||
# receptorctl
|
||||
# tempora
|
||||
python-dsv-sdk==1.0.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
python-string-utils==1.0.0
|
||||
# via openshift
|
||||
python-tss-sdk==1.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pytz==2024.2
|
||||
python-tss-sdk==2.0.0
|
||||
# via -r requirements.in
|
||||
pytz==2025.2
|
||||
# via irc
|
||||
pyyaml==6.0.2
|
||||
pyyaml==6.0.3
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
# -r requirements.in
|
||||
# dispatcherd
|
||||
# djangorestframework-yaml
|
||||
# drf-spectacular
|
||||
# kubernetes
|
||||
# receptorctl
|
||||
pyzstd==0.16.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
receptorctl==1.5.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
redis[hiredis]==5.2.1
|
||||
pyzstd==0.18.0
|
||||
# via -r requirements.in
|
||||
receptorctl==1.6.0
|
||||
# via -r requirements.in
|
||||
redis[hiredis]==6.4.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels-redis
|
||||
referencing==0.35.1
|
||||
referencing==0.36.2
|
||||
# via
|
||||
# jsonschema
|
||||
# jsonschema-specifications
|
||||
requests==2.32.3
|
||||
requests==2.32.5
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# adal
|
||||
# azure-core
|
||||
# django-ansible-base
|
||||
# django-oauth-toolkit
|
||||
# kubernetes
|
||||
# msal
|
||||
# msrest
|
||||
@@ -465,22 +424,22 @@ requests-oauthlib==2.0.0
|
||||
# via
|
||||
# kubernetes
|
||||
# msrest
|
||||
rpds-py==0.22.3
|
||||
rpds-py==0.27.1
|
||||
# via
|
||||
# jsonschema
|
||||
# referencing
|
||||
rsa==4.9
|
||||
rsa==4.9.1
|
||||
# via google-auth
|
||||
s3transfer==0.10.4
|
||||
s3transfer==0.15.0
|
||||
# via boto3
|
||||
semantic-version==2.10.0
|
||||
# via setuptools-rust
|
||||
service-identity==24.2.0
|
||||
# via twisted
|
||||
setuptools-rust==1.10.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
setuptools-scm[toml]==8.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
six==1.17.0
|
||||
# via
|
||||
# azure-core
|
||||
@@ -489,74 +448,76 @@ six==1.17.0
|
||||
# openshift
|
||||
# pygerduty
|
||||
# python-dateutil
|
||||
slack-sdk==3.34.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
slack-sdk==3.37.0
|
||||
# via -r requirements.in
|
||||
smmap==5.0.2
|
||||
# via gitdb
|
||||
sqlparse==0.5.3
|
||||
# via
|
||||
# -r requirements.in
|
||||
# django
|
||||
# django-ansible-base
|
||||
tempora==5.8.0
|
||||
tempora==5.8.1
|
||||
# via
|
||||
# irc
|
||||
# jaraco-logging
|
||||
twilio==9.4.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
twisted[tls]==24.11.0
|
||||
twilio==9.8.3
|
||||
# via -r requirements.in
|
||||
twisted[tls]==25.5.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# daphne
|
||||
txaio==23.1.1
|
||||
txaio==25.9.2
|
||||
# via autobahn
|
||||
typing-extensions==4.12.2
|
||||
typing-extensions==4.15.0
|
||||
# via
|
||||
# azure-core
|
||||
# azure-identity
|
||||
# azure-keyvault-certificates
|
||||
# azure-keyvault-keys
|
||||
# azure-keyvault-secrets
|
||||
# jwcrypto
|
||||
# grpcio
|
||||
# opentelemetry-api
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-sdk
|
||||
# psycopg
|
||||
# opentelemetry-semantic-conventions
|
||||
# pygithub
|
||||
# twisted
|
||||
uritemplate==4.2.0
|
||||
# via drf-spectacular
|
||||
urllib3==2.3.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# botocore
|
||||
# django-ansible-base
|
||||
# kubernetes
|
||||
# pygithub
|
||||
# requests
|
||||
uwsgi==2.0.28
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
uwsgi==2.0.30
|
||||
# via -r requirements.in
|
||||
uwsgitop==0.12
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
websocket-client==1.8.0
|
||||
# via kubernetes
|
||||
wheel==0.42.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
wrapt==1.17.0
|
||||
# via
|
||||
# deprecated
|
||||
# opentelemetry-instrumentation
|
||||
yarl==1.18.3
|
||||
# via -r requirements.in
|
||||
wrapt==1.17.3
|
||||
# via opentelemetry-instrumentation
|
||||
yarl==1.22.0
|
||||
# via aiohttp
|
||||
zipp==3.21.0
|
||||
zipp==3.23.0
|
||||
# via importlib-metadata
|
||||
zope-interface==7.2
|
||||
zope-interface==8.0.1
|
||||
# via twisted
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
pip==21.2.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
setuptools==80.9.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# asciichartpy
|
||||
# autobahn
|
||||
# incremental
|
||||
# pbr
|
||||
# setuptools-rust
|
||||
# setuptools-scm
|
||||
# zope-interface
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
build
|
||||
django-debug-toolbar==3.2.4
|
||||
django-test-migrations
|
||||
drf-yasg<1.21.10 # introduces new DeprecationWarning that is turned into error
|
||||
drf-spectacular>=0.27.0 # Modern OpenAPI 3.0 schema generator
|
||||
# pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed
|
||||
ipython>=7.31.1 # https://github.com/ansible/awx/security/dependabot/30
|
||||
unittest2
|
||||
|
||||
Reference in New Issue
Block a user