mirror of
https://github.com/ansible/awx.git
synced 2026-02-12 07:04:45 -03:30
Compare commits
12 Commits
thedoubl3j
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5264e4a12b | ||
|
|
2b2f2b73ac | ||
|
|
e03beb4d54 | ||
|
|
4db52e074b | ||
|
|
4e1911f7c4 | ||
|
|
b02117979d | ||
|
|
2fa2cd8beb | ||
|
|
f81859510c | ||
|
|
335a4bbbc6 | ||
|
|
5ea2fe65b0 | ||
|
|
f3f10ae9ce | ||
|
|
5be4462395 |
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,8 +11,6 @@ inputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||
|
||||
2
.github/actions/run_awx_devel/action.yml
vendored
2
.github/actions/run_awx_devel/action.yml
vendored
@@ -36,7 +36,7 @@ runs:
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
shell: bash
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Install system deps
|
||||
shell: bash
|
||||
|
||||
12
.github/workflows/api_schema_check.yml
vendored
12
.github/workflows/api_schema_check.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
|
||||
- name: Add schema diff to job summary
|
||||
if: always()
|
||||
# show text and if for some reason, it can't be generated, state that it can't be.
|
||||
# show text and if for some reason, it can't be generated, state that it can't be.
|
||||
run: |
|
||||
echo "## API Schema Change Detection Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -55,12 +55,18 @@ jobs:
|
||||
if grep -q "^+" schema-diff.txt || grep -q "^-" schema-diff.txt; then
|
||||
echo "### Schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
# Truncate to first 1000 lines to stay under GitHub's 1MB summary limit
|
||||
TOTAL_LINES=$(wc -l < schema-diff.txt)
|
||||
if [ $TOTAL_LINES -gt 1000 ]; then
|
||||
echo "_Showing first 1000 of ${TOTAL_LINES} lines. See job logs or download artifact for full diff._" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo '```diff' >> $GITHUB_STEP_SUMMARY
|
||||
cat schema-diff.txt >> $GITHUB_STEP_SUMMARY
|
||||
head -n 1000 schema-diff.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### No schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
echo "### Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
59
.github/workflows/ci.yml
vendored
59
.github/workflows/ci.yml
vendored
@@ -32,9 +32,6 @@ jobs:
|
||||
- name: api-lint
|
||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||
coverage-upload-name: ""
|
||||
- name: api-swagger
|
||||
command: /start_tests.sh swagger
|
||||
coverage-upload-name: ""
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
coverage-upload-name: "awx-collection"
|
||||
@@ -57,6 +54,17 @@ jobs:
|
||||
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
|
||||
make docker-runner
|
||||
|
||||
- name: Inject PR number into coverage.xml
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& github.event_name == 'pull_request'
|
||||
&& steps.make-run.outputs.cov-report-files != ''
|
||||
run: |
|
||||
if [ -f "reports/coverage.xml" ]; then
|
||||
sed -i '2i<!-- PR ${{ github.event.pull_request.number }} -->' reports/coverage.xml
|
||||
echo "Injected PR number ${{ github.event.pull_request.number }} into coverage.xml"
|
||||
fi
|
||||
|
||||
- name: Upload test coverage to Codecov
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -96,6 +104,14 @@ jobs:
|
||||
}}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload test artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.tests.name }}-artifacts
|
||||
path: reports/coverage.xml
|
||||
retention-days: 5
|
||||
|
||||
- name: Upload awx jUnit test reports
|
||||
if: >-
|
||||
!cancelled()
|
||||
@@ -126,7 +142,7 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -169,11 +185,11 @@ jobs:
|
||||
- name: Setup python, referencing action at awx relative path
|
||||
uses: ./awx/.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python3 -m pip install docker
|
||||
python -m pip install docker
|
||||
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
@@ -187,8 +203,8 @@ jobs:
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
python3 -m pip install -r molecule/requirements.txt
|
||||
python3 -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
python -m pip install -r molecule/requirements.txt
|
||||
python -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
@@ -275,7 +291,11 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -286,8 +306,9 @@ jobs:
|
||||
|
||||
- name: Install dependencies for running tests
|
||||
run: |
|
||||
python3 -m pip install -e ./awxkit/
|
||||
python3 -m pip install -r awx_collection/requirements.txt
|
||||
python -m pip install -e ./awxkit/
|
||||
python -m pip install -r awx_collection/requirements.txt
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
|
||||
- name: Run integration tests
|
||||
id: make-run
|
||||
@@ -299,6 +320,7 @@ jobs:
|
||||
echo 'password = password' >> ~/.tower_cli.cfg
|
||||
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
|
||||
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
|
||||
export PYTHONPATH="$(python -c 'import site; print(":".join(site.getsitepackages()))')${PYTHONPATH:+:$PYTHONPATH}"
|
||||
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
|
||||
env:
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
@@ -353,10 +375,14 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Remove system ansible to avoid conflicts
|
||||
run: |
|
||||
python -m pip uninstall -y ansible ansible-core || true
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
run: python -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Download coverage artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -371,11 +397,12 @@ jobs:
|
||||
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
|
||||
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
cd ~/.ansible/collections/ansible_collections/awx/awx
|
||||
ansible-test coverage combine --requirements
|
||||
ansible-test coverage html
|
||||
hash -r # Rehash to pick up newly installed scripts
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage combine --requirements
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage html
|
||||
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo >> $GITHUB_STEP_SUMMARY
|
||||
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
271
.github/workflows/sonarcloud_pr.yml
vendored
271
.github/workflows/sonarcloud_pr.yml
vendored
@@ -1,85 +1,248 @@
|
||||
---
|
||||
name: SonarQube
|
||||
# SonarCloud Analysis Workflow for awx
|
||||
#
|
||||
# This workflow runs SonarCloud analysis triggered by CI workflow completion.
|
||||
# It is split into two separate jobs for clarity and maintainability:
|
||||
#
|
||||
# FLOW: CI completes → workflow_run triggers this workflow → appropriate job runs
|
||||
#
|
||||
# JOB 1: sonar-pr-analysis (for PRs)
|
||||
# - Triggered by: workflow_run (CI on pull_request)
|
||||
# - Steps: Download coverage → Get PR info → Get changed files → Run SonarCloud PR analysis
|
||||
# - Scans: All changed files in the PR (Python, YAML, JSON, etc.)
|
||||
# - Quality gate: Focuses on new/changed code in PR only
|
||||
#
|
||||
# JOB 2: sonar-branch-analysis (for long-lived branches)
|
||||
# - Triggered by: workflow_run (CI on push to devel)
|
||||
# - Steps: Download coverage → Run SonarCloud branch analysis
|
||||
# - Scans: Full codebase
|
||||
# - Quality gate: Focuses on overall project health
|
||||
#
|
||||
# This ensures coverage data is always available from CI before analysis runs.
|
||||
#
|
||||
# What files are scanned:
|
||||
# - All files in the repository that SonarCloud can analyze
|
||||
# - Excludes: tests, scripts, dev environments, external collections (see sonar-project.properties)
|
||||
|
||||
|
||||
# With much help from:
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/30
|
||||
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/32
|
||||
name: SonarCloud
|
||||
on:
|
||||
workflow_run:
|
||||
workflow_run: # This is triggered by CI being completed.
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
sonarqube:
|
||||
sonar-pr-analysis:
|
||||
name: SonarCloud PR Analysis
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.repository == 'ansible/awx'
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
show-progress: false
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Download coverage report artifact
|
||||
uses: actions/download-artifact@v4
|
||||
# Download all individual coverage artifacts from CI workflow
|
||||
- name: Download coverage artifacts
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
with:
|
||||
name: coverage-report
|
||||
path: reports/
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
|
||||
- name: Download PR number artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number
|
||||
path: .
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Extract PR number
|
||||
run: |
|
||||
cat pr-number.txt
|
||||
echo "PR_NUMBER=$(cat pr-number.txt)" >> $GITHUB_ENV
|
||||
|
||||
- name: Get PR info
|
||||
uses: octokit/request-action@v2.x
|
||||
id: pr_info
|
||||
with:
|
||||
route: GET /repos/{repo}/pulls/{number}
|
||||
repo: ${{ github.event.repository.full_name }}
|
||||
number: ${{ env.PR_NUMBER }}
|
||||
# Extract PR metadata from workflow_run event
|
||||
- name: Set PR metadata and prepare files for analysis
|
||||
env:
|
||||
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
REPO_NAME: ${{ github.event.repository.full_name }}
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set PR info into env
|
||||
run: |
|
||||
echo "PR_BASE=${{ fromJson(steps.pr_info.outputs.data).base.ref }}" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=${{ fromJson(steps.pr_info.outputs.data).head.ref }}" >> $GITHUB_ENV
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
|
||||
# Extract PR number from first coverage.xml file found
|
||||
first_coverage=$(find . -name "coverage.xml" -type f | head -1)
|
||||
if [ -f "$first_coverage" ]; then
|
||||
PR_NUMBER=$(grep -m 1 '<!-- PR' "$first_coverage" | awk '{print $3}' || echo "")
|
||||
else
|
||||
PR_NUMBER=""
|
||||
fi
|
||||
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number from coverage.xml: #${PR_NUMBER:-<not found>}"
|
||||
|
||||
if [ -z "$PR_NUMBER" ]; then
|
||||
echo "##[error]❌ FATAL: PR number not found in coverage.xml"
|
||||
echo "##[error]This job requires a PR number to run PR analysis."
|
||||
echo "##[error]The ci workflow should have injected the PR number into coverage.xml."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get PR metadata from GitHub API
|
||||
PR_DATA=$(gh api "repos/$REPO_NAME/pulls/$PR_NUMBER")
|
||||
PR_BASE=$(echo "$PR_DATA" | jq -r '.base.ref')
|
||||
PR_HEAD=$(echo "$PR_DATA" | jq -r '.head.ref')
|
||||
|
||||
# Print summary
|
||||
echo "🔍 SonarCloud Analysis Decision Summary"
|
||||
echo "========================================"
|
||||
echo "├── CI Event: ✅ Pull Request"
|
||||
echo "├── PR Number: #$PR_NUMBER"
|
||||
echo "├── Base Branch: $PR_BASE"
|
||||
echo "├── Head Branch: $PR_HEAD"
|
||||
echo "├── Repo: $REPO_NAME"
|
||||
|
||||
# Export to GitHub env for later steps
|
||||
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
|
||||
echo "PR_BASE=$PR_BASE" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=$PR_HEAD" >> $GITHUB_ENV
|
||||
echo "COMMIT_SHA=$COMMIT_SHA" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
# Get all changed files from PR (with error handling)
|
||||
files=""
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
if gh api repos/$REPO_NAME/pulls/$PR_NUMBER/files --jq '.[].filename' > /tmp/pr_files.txt 2>/tmp/pr_error.txt; then
|
||||
files=$(cat /tmp/pr_files.txt)
|
||||
else
|
||||
echo "├── Changed Files: ⚠️ Could not fetch (likely test repo or PR not found)"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
# No files = no inclusions filter = full scan
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "├── PR Number: ⚠️ Not available"
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available"
|
||||
fi
|
||||
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get file extensions and count for summary
|
||||
extensions=$(echo "$files" | sed 's/.*\.//' | sort | uniq | tr '\n' ',' | sed 's/,$//')
|
||||
file_count=$(echo "$files" | wc -l)
|
||||
echo "├── Changed Files: $file_count file(s) (.${extensions})"
|
||||
|
||||
# Check if coverage.xml exists and has content
|
||||
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
|
||||
echo "├── Coverage Data: ✅ Available"
|
||||
else
|
||||
echo "├── Coverage Data: ⚠️ Not available (analysis will proceed without coverage)"
|
||||
fi
|
||||
|
||||
# Prepare file list for Sonar
|
||||
echo "All changed files in PR:"
|
||||
echo "$files"
|
||||
|
||||
# Filter out files that are excluded by .coveragerc to avoid coverage conflicts
|
||||
# This prevents SonarCloud from analyzing files that have no coverage data
|
||||
if [ -n "$files" ]; then
|
||||
# Filter out files matching .coveragerc omit patterns
|
||||
filtered_files=$(echo "$files" | grep -v "settings/.*_defaults\.py$" | grep -v "settings/defaults\.py$" | grep -v "main/migrations/")
|
||||
|
||||
# Show which files were filtered out for transparency
|
||||
excluded_files=$(echo "$files" | grep -E "(settings/.*_defaults\.py$|settings/defaults\.py$|main/migrations/)" || true)
|
||||
if [ -n "$excluded_files" ]; then
|
||||
echo "├── Filtered out (coverage-excluded): $(echo "$excluded_files" | wc -l) file(s)"
|
||||
echo "$excluded_files" | sed 's/^/│ - /'
|
||||
fi
|
||||
|
||||
if [ -n "$filtered_files" ]; then
|
||||
inclusions=$(echo "$filtered_files" | tr '\n' ',' | sed 's/,$//')
|
||||
echo "SONAR_INCLUSIONS=$inclusions" >> $GITHUB_ENV
|
||||
echo "└── Result: ✅ Will scan these files (excluding coverage-omitted files): $inclusions"
|
||||
else
|
||||
echo "└── Result: ✅ All changed files are excluded by coverage config, running full SonarCloud analysis"
|
||||
# Don't set SONAR_INCLUSIONS, let it scan everything per sonar-project.properties
|
||||
fi
|
||||
else
|
||||
echo "└── Result: ✅ Running SonarCloud analysis"
|
||||
fi
|
||||
|
||||
- name: Add base branch
|
||||
if: env.PR_NUMBER != ''
|
||||
run: |
|
||||
gh pr checkout ${{ env.PR_NUMBER }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract and export repo owner/name
|
||||
run: |
|
||||
REPO_SLUG="${GITHUB_REPOSITORY}"
|
||||
IFS="/" read -r REPO_OWNER REPO_NAME <<< "$REPO_SLUG"
|
||||
echo "REPO_OWNER=$REPO_OWNER" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
- name: SonarQube scan
|
||||
uses: SonarSource/sonarqube-scan-action@v5
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets[format('{0}', vars.SONAR_TOKEN_SECRET_NAME)] }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.organization=${{ env.REPO_OWNER }}
|
||||
-Dsonar.projectKey=${{ env.REPO_OWNER }}_${{ env.REPO_NAME }}
|
||||
-Dsonar.scm.revision=${{ env.COMMIT_SHA }}
|
||||
-Dsonar.pullrequest.key=${{ env.PR_NUMBER }}
|
||||
-Dsonar.pullrequest.branch=${{ env.PR_HEAD }}
|
||||
-Dsonar.pullrequest.base=${{ env.PR_BASE }}
|
||||
-Dsonar.python.coverage.reportPaths=${{ env.COVERAGE_PATHS }}
|
||||
${{ env.SONAR_INCLUSIONS && format('-Dsonar.inclusions={0}', env.SONAR_INCLUSIONS) || '' }}
|
||||
|
||||
sonar-branch-analysis:
|
||||
name: SonarCloud Branch Analysis
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.event == 'push' &&
|
||||
github.repository == 'ansible/awx'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Download all individual coverage artifacts from CI workflow (optional for branch pushes)
|
||||
- name: Download coverage artifacts
|
||||
continue-on-error: true
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
workflow: CI
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
pattern: api-test-artifacts
|
||||
|
||||
- name: Print SonarCloud Analysis Summary
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
# Find all downloaded coverage XML files
|
||||
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
|
||||
echo "Found coverage files: $coverage_files"
|
||||
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
|
||||
|
||||
echo "🔍 SonarCloud Analysis Summary"
|
||||
echo "=============================="
|
||||
echo "├── CI Event: ✅ Push (via workflow_run)"
|
||||
echo "├── Branch: $BRANCH_NAME"
|
||||
echo "├── Coverage Files: ${coverage_files:-none}"
|
||||
echo "├── Python Changes: ➖ N/A (Full codebase scan)"
|
||||
echo "└── Result: ✅ Proceed - \"Running SonarCloud analysis\""
|
||||
|
||||
- name: SonarCloud Scan
|
||||
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
|
||||
-Dsonar.branch.name=${{ github.event.workflow_run.head_branch }}
|
||||
${{ env.COVERAGE_PATHS && format('-Dsonar.python.coverage.reportPaths={0}', env.COVERAGE_PATHS) || '' }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
# Ignore generated schema
|
||||
swagger.json
|
||||
schema.json
|
||||
schema.yaml
|
||||
reference-schema.json
|
||||
|
||||
# Tags
|
||||
|
||||
32
Makefile
32
Makefile
@@ -27,6 +27,8 @@ TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
|
||||
PARALLEL_TESTS ?= -n auto
|
||||
# collection integration test directories (defaults to all)
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
# Python version for ansible-test (must be 3.11, 3.12, or 3.13)
|
||||
ANSIBLE_TEST_PYTHON_VERSION ?= 3.13
|
||||
# args for collection install
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
@@ -314,20 +316,17 @@ black: reports
|
||||
@echo "fi" >> .git/hooks/pre-commit
|
||||
@chmod +x .git/hooks/pre-commit
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ADDOPTS="--genschema --create-db "
|
||||
mv swagger.json schema.json
|
||||
|
||||
swagger: reports
|
||||
genschema: awx-link reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(COVERAGE_ARGS) $(PARALLEL_TESTS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi-json --file schema.json
|
||||
|
||||
genschema-yaml: awx-link reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) spectacular --format openapi --file schema.yaml
|
||||
|
||||
check: black
|
||||
|
||||
@@ -431,8 +430,8 @@ test_collection_sanity:
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && \
|
||||
ansible-test integration --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test integration --python $(ANSIBLE_TEST_PYTHON_VERSION) --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
|
||||
@@ -537,14 +536,15 @@ docker-compose-test: awx/projects docker-compose-sources
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
docker-compose-build-schema: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 make genschema
|
||||
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
diff -u -b reference-schema.json schema.json
|
||||
# diff exits with 1 when files differ - capture but don't fail
|
||||
-diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
@@ -161,16 +161,14 @@ def get_view_description(view, html=False):
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
if settings.DYNACONF.is_development_mode:
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
return schema_view
|
||||
else:
|
||||
return views.APIView.schema
|
||||
# drf-spectacular is configured via REST_FRAMEWORK['DEFAULT_SCHEMA_CLASS']
|
||||
# Just use the DRF default, which will pick up our CustomAutoSchema
|
||||
return views.APIView.schema
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
schema = get_default_schema()
|
||||
# Schema is inherited from DRF's APIView, which uses DEFAULT_SCHEMA_CLASS
|
||||
# No need to override it here - drf-spectacular will handle it
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
|
||||
75
awx/api/schema.py
Normal file
75
awx/api/schema.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import warnings
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from drf_spectacular.openapi import AutoSchema
|
||||
from drf_spectacular.views import (
|
||||
SpectacularAPIView,
|
||||
SpectacularSwaggerView,
|
||||
SpectacularRedocView,
|
||||
)
|
||||
|
||||
|
||||
class CustomAutoSchema(AutoSchema):
|
||||
"""Custom AutoSchema to add swagger_topic to tags and handle deprecated endpoints."""
|
||||
|
||||
def get_tags(self):
|
||||
tags = []
|
||||
try:
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
serializer = self.view.get_serializer()
|
||||
else:
|
||||
serializer = None
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for this view.'.format(self.view.__class__.__name__)
|
||||
)
|
||||
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
tags = super().get_tags() # Use default drf-spectacular behavior
|
||||
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
"""Return `True` if this operation is to be marked as deprecated."""
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
class AuthenticatedSpectacularAPIView(SpectacularAPIView):
|
||||
"""SpectacularAPIView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
class AuthenticatedSpectacularSwaggerView(SpectacularSwaggerView):
|
||||
"""SpectacularSwaggerView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
class AuthenticatedSpectacularRedocView(SpectacularRedocView):
|
||||
"""SpectacularRedocView that requires authentication."""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
|
||||
# Schema view (returns OpenAPI schema JSON/YAML)
|
||||
schema_view = AuthenticatedSpectacularAPIView.as_view()
|
||||
|
||||
# Swagger UI view
|
||||
swagger_ui_view = AuthenticatedSpectacularSwaggerView.as_view(url_name='api:schema-json')
|
||||
|
||||
# ReDoc UI view
|
||||
redoc_view = AuthenticatedSpectacularRedocView.as_view(url_name='api:schema-json')
|
||||
@@ -1,55 +0,0 @@
|
||||
import warnings
|
||||
|
||||
from rest_framework.permissions import AllowAny
|
||||
from drf_yasg import openapi
|
||||
from drf_yasg.inspectors import SwaggerAutoSchema
|
||||
from drf_yasg.views import get_schema_view
|
||||
|
||||
|
||||
class CustomSwaggerAutoSchema(SwaggerAutoSchema):
|
||||
"""Custom SwaggerAutoSchema to add swagger_topic to tags."""
|
||||
|
||||
def get_tags(self, operation_keys=None):
|
||||
tags = []
|
||||
try:
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
serializer = self.view.get_serializer()
|
||||
else:
|
||||
serializer = None
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {}.'.format(self.view.__class__.__name__, operation_keys)
|
||||
)
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
"""Return `True` if this operation is to be marked as deprecated."""
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title='AWX API',
|
||||
default_version='v2',
|
||||
description='AWX API Documentation',
|
||||
terms_of_service='https://www.google.com/policies/terms/',
|
||||
contact=openapi.Contact(email='contact@snippets.local'),
|
||||
license=openapi.License(name='Apache License'),
|
||||
),
|
||||
public=True,
|
||||
permission_classes=[AllowAny],
|
||||
)
|
||||
@@ -1,4 +1,4 @@
|
||||
---
|
||||
collections:
|
||||
- name: ansible.receptor
|
||||
version: 2.0.3
|
||||
version: 2.0.6
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.urls import include, re_path
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views.root import (
|
||||
ApiRootView,
|
||||
@@ -148,21 +147,21 @@ v2_urls = [
|
||||
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
# Import schema views (needed for both development and testing)
|
||||
from awx.api.schema import schema_view, swagger_ui_view, redoc_view
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
re_path(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
|
||||
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||
# Schema endpoints (available in all modes for API documentation and testing)
|
||||
re_path(r'^schema/$', schema_view, name='schema-json'),
|
||||
re_path(r'^docs/$', swagger_ui_view, name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', redoc_view, name='schema-redoc'),
|
||||
]
|
||||
if MODE == 'development':
|
||||
# Only include these if we are in the development environment
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [
|
||||
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
]
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
|
||||
@@ -59,7 +59,7 @@ class ApiRootView(APIView):
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
if MODE == 'development':
|
||||
data['swagger'] = drf_reverse('api:schema-swagger-ui')
|
||||
data['docs'] = drf_reverse('api:schema-swagger-ui')
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from awx.main.dispatch import get_task_queuename
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=300, on_duplicate='discard')
|
||||
def send_subsystem_metrics():
|
||||
DispatcherMetrics().send_metrics()
|
||||
CallbackReceiverMetrics().send_metrics()
|
||||
|
||||
@@ -5,6 +5,7 @@ import time
|
||||
from uuid import uuid4
|
||||
|
||||
from dispatcherd.publish import submit_task
|
||||
from dispatcherd.processors.blocker import Blocker
|
||||
from dispatcherd.utils import resolve_callable
|
||||
|
||||
from django_guid import get_guid
|
||||
@@ -60,13 +61,17 @@ class task:
|
||||
print(f"Time I was dispatched: {dispatch_time}")
|
||||
"""
|
||||
|
||||
def __init__(self, queue=None, bind_kwargs=None):
|
||||
def __init__(self, queue=None, bind_kwargs=None, timeout=None, on_duplicate=None):
|
||||
self.queue = queue
|
||||
self.bind_kwargs = bind_kwargs
|
||||
self.timeout = timeout
|
||||
self.on_duplicate = on_duplicate
|
||||
|
||||
def __call__(self, fn=None):
|
||||
queue = self.queue
|
||||
bind_kwargs = self.bind_kwargs
|
||||
timeout = self.timeout
|
||||
on_duplicate = self.on_duplicate
|
||||
|
||||
class PublisherMixin(object):
|
||||
queue = None
|
||||
@@ -102,7 +107,19 @@ class task:
|
||||
if flag_enabled('FEATURE_DISPATCHERD_ENABLED'):
|
||||
# At this point we have the import string, and submit_task wants the method, so back to that
|
||||
actual_task = resolve_callable(cls.name)
|
||||
return submit_task(actual_task, args=args, kwargs=kwargs, queue=queue, uuid=uuid, **kw)
|
||||
processor_options = ()
|
||||
if on_duplicate is not None:
|
||||
processor_options = (Blocker.Params(on_duplicate=on_duplicate),)
|
||||
return submit_task(
|
||||
actual_task,
|
||||
args=args,
|
||||
kwargs=kwargs,
|
||||
queue=queue,
|
||||
uuid=uuid,
|
||||
timeout=timeout,
|
||||
processor_options=processor_options,
|
||||
**kw,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f"[DISPATCHER] Failed to check for alternative dispatcherd implementation for {cls.name}")
|
||||
# Continue with original implementation if anything fails
|
||||
|
||||
@@ -159,7 +159,7 @@ def cleanup_old_indirect_host_entries() -> None:
|
||||
IndirectManagedNodeAudit.objects.filter(created__lt=limit).delete()
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@task(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> None:
|
||||
try:
|
||||
job = Job.objects.get(id=job_id)
|
||||
@@ -201,7 +201,7 @@ def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> Non
|
||||
logger.exception(f'Error processing indirect host data for job_id={job_id}')
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@task(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def cleanup_and_save_indirect_host_entries_fallback() -> None:
|
||||
if not flag_enabled("FEATURE_INDIRECT_NODE_COUNTING_ENABLED"):
|
||||
return
|
||||
|
||||
@@ -1346,7 +1346,7 @@ class RunProjectUpdate(BaseTask):
|
||||
extra_vars['scm_refspec'] = project_update.scm_refspec
|
||||
elif project_update.project.allow_override:
|
||||
# If branch is override-able, do extra fetch for all branches
|
||||
extra_vars['scm_refspec'] = 'refs/heads/*:refs/remotes/origin/*'
|
||||
extra_vars['scm_refspec'] = '+refs/heads/*:refs/remotes/origin/*'
|
||||
|
||||
if project_update.scm_type == 'archive':
|
||||
# for raw archive, prevent error moving files between volumes
|
||||
|
||||
@@ -852,7 +852,7 @@ def reload_receptor():
|
||||
raise RuntimeError("Receptor reload failed")
|
||||
|
||||
|
||||
@task_awx()
|
||||
@task_awx(on_duplicate='queue_one')
|
||||
def write_receptor_config():
|
||||
"""
|
||||
This task runs async on each control node, K8S only.
|
||||
@@ -875,7 +875,7 @@ def write_receptor_config():
|
||||
reload_receptor()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, on_duplicate='discard')
|
||||
def remove_deprovisioned_node(hostname):
|
||||
InstanceLink.objects.filter(source__hostname=hostname).update(link_state=InstanceLink.States.REMOVING)
|
||||
InstanceLink.objects.filter(target__instance__hostname=hostname).update(link_state=InstanceLink.States.REMOVING)
|
||||
|
||||
@@ -184,7 +184,7 @@ def inform_cluster_of_shutdown():
|
||||
logger.warning("Normal shutdown processed for instance %s; instance removed from capacity pool.", inst.hostname)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def migrate_jsonfield(table, pkfield, columns):
|
||||
batchsize = 10000
|
||||
with advisory_lock(f'json_migration_{table}', wait=False) as acquired:
|
||||
@@ -230,7 +230,7 @@ def migrate_jsonfield(table, pkfield, columns):
|
||||
logger.warning(f"Migration of {table} to jsonb is finished.")
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='queue_one')
|
||||
def apply_cluster_membership_policies():
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
@@ -342,7 +342,7 @@ def apply_cluster_membership_policies():
|
||||
logger.debug('Cluster policy computation finished in {} seconds'.format(time.time() - started_compute))
|
||||
|
||||
|
||||
@task_awx(queue='tower_settings_change')
|
||||
@task_awx(queue='tower_settings_change', timeout=600)
|
||||
def clear_setting_cache(setting_keys):
|
||||
# log that cache is being cleared
|
||||
logger.info(f"clear_setting_cache of keys {setting_keys}")
|
||||
@@ -355,7 +355,7 @@ def clear_setting_cache(setting_keys):
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
|
||||
@task_awx(queue='tower_broadcast_all')
|
||||
@task_awx(queue='tower_broadcast_all', timeout=600)
|
||||
def delete_project_files(project_path):
|
||||
# TODO: possibly implement some retry logic
|
||||
lock_file = project_path + '.lock'
|
||||
@@ -383,7 +383,7 @@ def profile_sql(threshold=1, minutes=1):
|
||||
logger.error('SQL QUERIES >={}s ENABLED FOR {} MINUTE(S)'.format(threshold, minutes))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=1800)
|
||||
def send_notifications(notification_list, job_id=None):
|
||||
if not isinstance(notification_list, list):
|
||||
raise TypeError("notification_list should be of type list")
|
||||
@@ -428,13 +428,13 @@ def events_processed_hook(unified_job):
|
||||
save_indirect_host_entries.delay(unified_job.id)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5, on_duplicate='discard')
|
||||
def gather_analytics():
|
||||
if is_run_threshold_reached(getattr(settings, 'AUTOMATION_ANALYTICS_LAST_GATHER', None), settings.AUTOMATION_ANALYTICS_GATHER_INTERVAL):
|
||||
analytics.gather()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=600, on_duplicate='queue_one')
|
||||
def purge_old_stdout_files():
|
||||
nowtime = time.time()
|
||||
for f in os.listdir(settings.JOBOUTPUT_ROOT):
|
||||
@@ -496,37 +496,18 @@ class CleanupImagesAndFiles:
|
||||
cls.run_remote(this_inst, **kwargs)
|
||||
|
||||
|
||||
@task_awx(queue='tower_broadcast_all')
|
||||
@task_awx(queue='tower_broadcast_all', timeout=3600)
|
||||
def handle_removed_image(remove_images=None):
|
||||
"""Special broadcast invocation of this method to handle case of deleted EE"""
|
||||
CleanupImagesAndFiles.run(remove_images=remove_images, file_pattern='')
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='queue_one')
|
||||
def cleanup_images_and_files():
|
||||
CleanupImagesAndFiles.run(image_prune=True)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
def cluster_node_health_check(node):
|
||||
"""
|
||||
Used for the health check endpoint, refreshes the status of the instance, but must be ran on target node
|
||||
"""
|
||||
if node == '':
|
||||
logger.warning('Local health check incorrectly called with blank string')
|
||||
return
|
||||
elif node != settings.CLUSTER_HOST_ID:
|
||||
logger.warning(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}')
|
||||
return
|
||||
try:
|
||||
this_inst = Instance.objects.me()
|
||||
except Instance.DoesNotExist:
|
||||
logger.warning(f'Instance record for {node} missing, could not check capacity.')
|
||||
return
|
||||
this_inst.local_health_check()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=600, on_duplicate='queue_one')
|
||||
def execution_node_health_check(node):
|
||||
if node == '':
|
||||
logger.warning('Remote health check incorrectly called with blank string')
|
||||
@@ -850,7 +831,7 @@ def _heartbeat_handle_lost_instances(lost_instances, this_inst):
|
||||
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=1800, on_duplicate='queue_one')
|
||||
def awx_receptor_workunit_reaper():
|
||||
"""
|
||||
When an AWX job is launched via receptor, files such as status, stdin, and stdout are created
|
||||
@@ -896,7 +877,7 @@ def awx_receptor_workunit_reaper():
|
||||
administrative_workunit_reaper(receptor_work_list)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=1800, on_duplicate='queue_one')
|
||||
def awx_k8s_reaper():
|
||||
if not settings.RECEPTOR_RELEASE_WORK:
|
||||
return
|
||||
@@ -919,7 +900,7 @@ def awx_k8s_reaper():
|
||||
logger.exception("Failed to delete orphaned pod {} from {}".format(job.log_format, group))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5, on_duplicate='discard')
|
||||
def awx_periodic_scheduler():
|
||||
lock_session_timeout_milliseconds = settings.TASK_MANAGER_LOCK_TIMEOUT * 1000
|
||||
with advisory_lock('awx_periodic_scheduler_lock', lock_session_timeout_milliseconds=lock_session_timeout_milliseconds, wait=False) as acquired:
|
||||
@@ -978,7 +959,7 @@ def awx_periodic_scheduler():
|
||||
emit_channel_notification('schedules-changed', dict(id=schedule.id, group_name="schedules"))
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600)
|
||||
def handle_failure_notifications(task_ids):
|
||||
"""A task-ified version of the method that sends notifications."""
|
||||
found_task_ids = set()
|
||||
@@ -993,7 +974,7 @@ def handle_failure_notifications(task_ids):
|
||||
logger.warning(f'Could not send notifications for {deleted_tasks} because they were not found in the database')
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def update_inventory_computed_fields(inventory_id):
|
||||
"""
|
||||
Signal handler and wrapper around inventory.update_computed_fields to
|
||||
@@ -1043,7 +1024,7 @@ def update_smart_memberships_for_inventory(smart_inventory):
|
||||
return False
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='queue_one')
|
||||
def update_host_smart_inventory_memberships():
|
||||
smart_inventories = Inventory.objects.filter(kind='smart', host_filter__isnull=False, pending_deletion=False)
|
||||
changed_inventories = set([])
|
||||
@@ -1059,7 +1040,7 @@ def update_host_smart_inventory_memberships():
|
||||
smart_inventory.update_computed_fields()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600 * 5)
|
||||
def delete_inventory(inventory_id, user_id, retries=5):
|
||||
# Delete inventory as user
|
||||
if user_id is None:
|
||||
@@ -1121,7 +1102,7 @@ def _reconstruct_relationships(copy_mapping):
|
||||
new_obj.save()
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=600)
|
||||
def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, permission_check_func=None):
|
||||
logger.debug('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk))
|
||||
|
||||
@@ -1176,7 +1157,7 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task_awx(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename, timeout=3600, on_duplicate='discard')
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
|
||||
@@ -7,7 +7,6 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils.functional import Promise
|
||||
from django.utils.encoding import force_str
|
||||
|
||||
from drf_yasg.codecs import OpenAPICodecJson
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import drf_reverse
|
||||
@@ -43,10 +42,10 @@ class TestSwaggerGeneration:
|
||||
@pytest.fixture(autouse=True, scope='function')
|
||||
def _prepare(self, get, admin):
|
||||
if not self.__class__.JSON:
|
||||
url = drf_reverse('api:schema-swagger-ui') + '?format=openapi'
|
||||
# drf-spectacular returns OpenAPI schema directly from schema endpoint
|
||||
url = drf_reverse('api:schema-json') + '?format=json'
|
||||
response = get(url, user=admin)
|
||||
codec = OpenAPICodecJson([])
|
||||
data = codec.generate_swagger_object(response.data)
|
||||
data = response.data
|
||||
if response.has_header('X-Deprecated-Paths'):
|
||||
data['deprecated_paths'] = json.loads(response['X-Deprecated-Paths'])
|
||||
|
||||
|
||||
@@ -1,36 +1,30 @@
|
||||
import pytest
|
||||
from django.test import override_settings
|
||||
|
||||
from flags.state import get_flags, flag_state
|
||||
from ansible_base.feature_flags.models import AAPFlag
|
||||
from ansible_base.feature_flags.utils import create_initial_data as seed_feature_flags
|
||||
from django.conf import settings
|
||||
from awx.main.models import User
|
||||
|
||||
|
||||
@override_settings(FLAGS={})
|
||||
@pytest.mark.django_db
|
||||
def test_feature_flags_list_endpoint(get):
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=False)
|
||||
|
||||
url = "/api/v2/feature_flags_state/"
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=True)
|
||||
url = "/api/v2/feature_flags/states/"
|
||||
response = get(url, user=bob, expect=200)
|
||||
assert len(response.data) == 0
|
||||
assert len(get_flags()) > 0
|
||||
assert len(response.data["results"]) == len(get_flags())
|
||||
|
||||
|
||||
@override_settings(
|
||||
FLAGS={
|
||||
"FEATURE_SOME_PLATFORM_FLAG_ENABLED": [
|
||||
{"condition": "boolean", "value": False},
|
||||
{"condition": "before date", "value": "2022-06-01T12:00Z"},
|
||||
],
|
||||
"FEATURE_SOME_PLATFORM_FLAG_FOO_ENABLED": [
|
||||
{"condition": "boolean", "value": True},
|
||||
],
|
||||
}
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_feature_flags_list_endpoint_override(get):
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=False)
|
||||
@pytest.mark.parametrize('flag_val', (True, False))
|
||||
def test_feature_flags_list_endpoint_override(get, flag_val):
|
||||
bob = User.objects.create(username='bob', password='test_user', is_superuser=True)
|
||||
|
||||
url = "/api/v2/feature_flags_state/"
|
||||
AAPFlag.objects.all().delete()
|
||||
flag_name = "FEATURE_DISPATCHERD_ENABLED"
|
||||
setattr(settings, flag_name, flag_val)
|
||||
seed_feature_flags()
|
||||
url = "/api/v2/feature_flags/states/"
|
||||
response = get(url, user=bob, expect=200)
|
||||
assert len(response.data) == 2
|
||||
assert response.data["FEATURE_SOME_PLATFORM_FLAG_ENABLED"] is False
|
||||
assert response.data["FEATURE_SOME_PLATFORM_FLAG_FOO_ENABLED"] is True
|
||||
assert len(response.data["results"]) == 6
|
||||
assert flag_state(flag_name) == flag_val
|
||||
|
||||
@@ -93,6 +93,7 @@ def test_default_cred_types():
|
||||
'gpg_public_key',
|
||||
'hashivault_kv',
|
||||
'hashivault_ssh',
|
||||
'hcp_terraform',
|
||||
'insights',
|
||||
'kubernetes_bearer_token',
|
||||
'net',
|
||||
|
||||
@@ -5,11 +5,8 @@ import signal
|
||||
import time
|
||||
import yaml
|
||||
from unittest import mock
|
||||
from copy import deepcopy
|
||||
|
||||
from flags.state import disable_flag, enable_flag
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.conf import settings
|
||||
from django.test.utils import override_settings
|
||||
import pytest
|
||||
|
||||
from awx.main.models import Job, WorkflowJob, Instance
|
||||
@@ -302,13 +299,14 @@ class TestTaskDispatcher:
|
||||
assert str(result) == "No module named 'awx.foo'" # noqa
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTaskPublisher:
|
||||
@pytest.fixture(autouse=True)
|
||||
def _disable_dispatcherd(self):
|
||||
ffs = deepcopy(settings.FLAGS)
|
||||
ffs['FEATURE_DISPATCHERD_ENABLED'][0]['value'] = False
|
||||
with override_settings(FLAGS=ffs):
|
||||
yield
|
||||
flag_name = "FEATURE_DISPATCHERD_ENABLED"
|
||||
disable_flag(flag_name)
|
||||
yield
|
||||
enable_flag(flag_name)
|
||||
|
||||
def test_function_callable(self):
|
||||
assert add(2, 2) == 4
|
||||
|
||||
@@ -139,7 +139,7 @@ def podman_image_generator():
|
||||
|
||||
@pytest.fixture
|
||||
def project_factory(post, default_org, admin):
|
||||
def _rf(scm_url=None, local_path=None):
|
||||
def _rf(scm_url=None, local_path=None, **extra_kwargs):
|
||||
proj_kwargs = {}
|
||||
if local_path:
|
||||
# manual path
|
||||
@@ -153,6 +153,9 @@ def project_factory(post, default_org, admin):
|
||||
else:
|
||||
raise RuntimeError('Need to provide scm_url or local_path')
|
||||
|
||||
if extra_kwargs:
|
||||
proj_kwargs.update(extra_kwargs)
|
||||
|
||||
proj_kwargs['name'] = project_name
|
||||
proj_kwargs['organization'] = default_org.id
|
||||
|
||||
|
||||
@@ -1,2 +1,25 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
from awx.main.tests.live.tests.conftest import wait_for_job
|
||||
|
||||
|
||||
def test_git_file_project(live_tmp_folder, run_job_from_playbook):
|
||||
run_job_from_playbook('test_git_file_project', 'debug.yml', scm_url=f'file://{live_tmp_folder}/debug')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('allow_override', [True, False])
|
||||
def test_amend_commit(live_tmp_folder, project_factory, allow_override):
|
||||
proj = project_factory(scm_url=f'file://{live_tmp_folder}/debug', allow_override=allow_override)
|
||||
assert proj.current_job
|
||||
wait_for_job(proj.current_job)
|
||||
assert proj.allow_override is allow_override
|
||||
|
||||
source_dir = os.path.join(live_tmp_folder, 'debug')
|
||||
subprocess.run('git commit --amend --no-edit', cwd=source_dir, shell=True)
|
||||
|
||||
update = proj.update()
|
||||
update.signal_start()
|
||||
wait_for_job(update)
|
||||
|
||||
273
awx/main/tests/unit/api/test_schema.py
Normal file
273
awx/main/tests/unit/api/test_schema.py
Normal file
@@ -0,0 +1,273 @@
|
||||
import warnings
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
|
||||
from awx.api.schema import (
|
||||
CustomAutoSchema,
|
||||
AuthenticatedSpectacularAPIView,
|
||||
AuthenticatedSpectacularSwaggerView,
|
||||
AuthenticatedSpectacularRedocView,
|
||||
)
|
||||
|
||||
|
||||
class TestCustomAutoSchema:
|
||||
"""Unit tests for CustomAutoSchema class."""
|
||||
|
||||
def test_get_tags_with_swagger_topic(self):
|
||||
"""Test get_tags returns swagger_topic when available."""
|
||||
view = Mock()
|
||||
view.swagger_topic = 'custom_topic'
|
||||
view.get_serializer = Mock(return_value=Mock())
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Custom_Topic']
|
||||
|
||||
def test_get_tags_with_serializer_meta_model(self):
|
||||
"""Test get_tags returns model verbose_name_plural from serializer."""
|
||||
# Create a mock model with verbose_name_plural
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'test models'
|
||||
|
||||
# Create a mock serializer with Meta.model
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta.model = mock_model
|
||||
|
||||
view = Mock(spec=[]) # View without swagger_topic
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Test Models']
|
||||
|
||||
def test_get_tags_with_view_model(self):
|
||||
"""Test get_tags returns model verbose_name_plural from view."""
|
||||
# Create a mock model with verbose_name_plural
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'view models'
|
||||
|
||||
view = Mock(spec=['model']) # View without swagger_topic or get_serializer
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['View Models']
|
||||
|
||||
def test_get_tags_without_get_serializer(self):
|
||||
"""Test get_tags when view doesn't have get_serializer method."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'test objects'
|
||||
|
||||
view = Mock(spec=['model'])
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Test Objects']
|
||||
|
||||
def test_get_tags_serializer_exception_with_warning(self):
|
||||
"""Test get_tags handles exception in get_serializer with warning."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'fallback models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model', '__class__'])
|
||||
view.__class__.__name__ = 'TestView'
|
||||
view.get_serializer = Mock(side_effect=Exception('Serializer error'))
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Check that a warning was raised
|
||||
assert len(w) == 1
|
||||
assert 'TestView.get_serializer() raised an exception' in str(w[0].message)
|
||||
|
||||
# Should still get tags from view.model
|
||||
assert tags == ['Fallback Models']
|
||||
|
||||
def test_get_tags_serializer_without_meta_model(self):
|
||||
"""Test get_tags when serializer doesn't have Meta.model."""
|
||||
mock_serializer = Mock(spec=[]) # No Meta attribute
|
||||
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.__class__.__name__ = 'NoMetaView'
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=['Default Tag']) as mock_super:
|
||||
tags = schema.get_tags()
|
||||
mock_super.assert_called_once()
|
||||
assert tags == ['Default Tag']
|
||||
|
||||
def test_get_tags_fallback_to_super(self):
|
||||
"""Test get_tags falls back to parent class method."""
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.get_serializer = Mock(return_value=Mock(spec=[]))
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=['Super Tag']) as mock_super:
|
||||
tags = schema.get_tags()
|
||||
mock_super.assert_called_once()
|
||||
assert tags == ['Super Tag']
|
||||
|
||||
def test_get_tags_empty_with_warning(self):
|
||||
"""Test get_tags returns 'api' fallback when no tags can be determined."""
|
||||
view = Mock(spec=['get_serializer'])
|
||||
view.__class__.__name__ = 'EmptyView'
|
||||
view.get_serializer = Mock(return_value=Mock(spec=[]))
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with patch.object(CustomAutoSchema.__bases__[0], 'get_tags', return_value=[]):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Check that a warning was raised
|
||||
assert len(w) == 1
|
||||
assert 'Could not determine tags for EmptyView' in str(w[0].message)
|
||||
|
||||
# Should fallback to 'api'
|
||||
assert tags == ['api']
|
||||
|
||||
def test_get_tags_swagger_topic_title_case(self):
|
||||
"""Test that swagger_topic is properly title-cased."""
|
||||
view = Mock()
|
||||
view.swagger_topic = 'multi_word_topic'
|
||||
view.get_serializer = Mock(return_value=Mock())
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
assert tags == ['Multi_Word_Topic']
|
||||
|
||||
def test_is_deprecated_true(self):
|
||||
"""Test is_deprecated returns True when view has deprecated=True."""
|
||||
view = Mock()
|
||||
view.deprecated = True
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is True
|
||||
|
||||
def test_is_deprecated_false(self):
|
||||
"""Test is_deprecated returns False when view has deprecated=False."""
|
||||
view = Mock()
|
||||
view.deprecated = False
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is False
|
||||
|
||||
def test_is_deprecated_missing_attribute(self):
|
||||
"""Test is_deprecated returns False when view doesn't have deprecated attribute."""
|
||||
view = Mock(spec=[])
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
assert schema.is_deprecated() is False
|
||||
|
||||
def test_get_tags_serializer_meta_without_model(self):
|
||||
"""Test get_tags when serializer has Meta but no model attribute."""
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta = Mock(spec=[]) # Meta exists but no model
|
||||
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'backup models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model'])
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
# Should fall back to view.model
|
||||
assert tags == ['Backup Models']
|
||||
|
||||
def test_get_tags_complex_scenario_exception_recovery(self):
|
||||
"""Test complex scenario where serializer fails but view.model exists."""
|
||||
mock_model = Mock()
|
||||
mock_model._meta.verbose_name_plural = 'recovery models'
|
||||
|
||||
view = Mock(spec=['get_serializer', 'model', '__class__'])
|
||||
view.__class__.__name__ = 'ComplexView'
|
||||
view.get_serializer = Mock(side_effect=ValueError('Invalid serializer'))
|
||||
view.model = mock_model
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
tags = schema.get_tags()
|
||||
|
||||
# Should have warned about the exception
|
||||
assert len(w) == 1
|
||||
assert 'ComplexView.get_serializer() raised an exception' in str(w[0].message)
|
||||
|
||||
# But still recovered and got tags from view.model
|
||||
assert tags == ['Recovery Models']
|
||||
|
||||
def test_get_tags_priority_order(self):
|
||||
"""Test that get_tags respects priority: swagger_topic > serializer.Meta.model > view.model."""
|
||||
# Set up a view with all three options
|
||||
mock_model_view = Mock()
|
||||
mock_model_view._meta.verbose_name_plural = 'view models'
|
||||
|
||||
mock_model_serializer = Mock()
|
||||
mock_model_serializer._meta.verbose_name_plural = 'serializer models'
|
||||
|
||||
mock_serializer = Mock()
|
||||
mock_serializer.Meta.model = mock_model_serializer
|
||||
|
||||
view = Mock()
|
||||
view.swagger_topic = 'priority_topic'
|
||||
view.get_serializer = Mock(return_value=mock_serializer)
|
||||
view.model = mock_model_view
|
||||
|
||||
schema = CustomAutoSchema()
|
||||
schema.view = view
|
||||
|
||||
tags = schema.get_tags()
|
||||
# swagger_topic should take priority
|
||||
assert tags == ['Priority_Topic']
|
||||
|
||||
|
||||
class TestAuthenticatedSchemaViews:
|
||||
"""Unit tests for authenticated schema view classes."""
|
||||
|
||||
def test_authenticated_spectacular_api_view_requires_authentication(self):
|
||||
"""Test that AuthenticatedSpectacularAPIView requires authentication."""
|
||||
assert IsAuthenticated in AuthenticatedSpectacularAPIView.permission_classes
|
||||
|
||||
def test_authenticated_spectacular_swagger_view_requires_authentication(self):
|
||||
"""Test that AuthenticatedSpectacularSwaggerView requires authentication."""
|
||||
assert IsAuthenticated in AuthenticatedSpectacularSwaggerView.permission_classes
|
||||
|
||||
def test_authenticated_spectacular_redoc_view_requires_authentication(self):
|
||||
"""Test that AuthenticatedSpectacularRedocView requires authentication."""
|
||||
assert IsAuthenticated in AuthenticatedSpectacularRedocView.permission_classes
|
||||
@@ -9,6 +9,9 @@ LOCAL_SETTINGS = (
|
||||
'DEBUG',
|
||||
'NAMED_URL_GRAPH',
|
||||
'DISPATCHER_MOCK_PUBLISH',
|
||||
# Platform flags are managed by the platform flags system and have environment-specific defaults
|
||||
'FEATURE_DISPATCHERD_ENABLED',
|
||||
'FEATURE_INDIRECT_NODE_COUNTING_ENABLED',
|
||||
)
|
||||
|
||||
|
||||
@@ -28,7 +31,7 @@ def test_default_settings():
|
||||
continue
|
||||
default_val = getattr(settings.default_settings, k, None)
|
||||
snapshot_val = settings.DEFAULTS_SNAPSHOT[k]
|
||||
assert default_val == snapshot_val, f'Setting for {k} does not match shapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||
assert default_val == snapshot_val, f'Setting for {k} does not match snapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||
|
||||
|
||||
def test_django_conf_settings_is_awx_settings():
|
||||
@@ -69,3 +72,27 @@ def test_merge_application_name():
|
||||
result = merge_application_name(settings)["DATABASES__default__OPTIONS__application_name"]
|
||||
assert result.startswith("awx-")
|
||||
assert "test-cluster" in result
|
||||
|
||||
|
||||
def test_development_defaults_feature_flags(monkeypatch):
|
||||
"""Ensure that development_defaults.py sets the correct feature flags."""
|
||||
monkeypatch.setenv('AWX_MODE', 'development')
|
||||
|
||||
# Import the development_defaults module directly to trigger coverage of the new lines
|
||||
import importlib.util
|
||||
import os
|
||||
|
||||
spec = importlib.util.spec_from_file_location("development_defaults", os.path.join(os.path.dirname(__file__), "../../../settings/development_defaults.py"))
|
||||
development_defaults = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(development_defaults)
|
||||
|
||||
# Also import through the development settings to ensure both paths are tested
|
||||
from awx.settings.development import FEATURE_INDIRECT_NODE_COUNTING_ENABLED, FEATURE_DISPATCHERD_ENABLED
|
||||
|
||||
# Verify the feature flags are set correctly in both the module and settings
|
||||
assert hasattr(development_defaults, 'FEATURE_INDIRECT_NODE_COUNTING_ENABLED')
|
||||
assert development_defaults.FEATURE_INDIRECT_NODE_COUNTING_ENABLED is True
|
||||
assert hasattr(development_defaults, 'FEATURE_DISPATCHERD_ENABLED')
|
||||
assert development_defaults.FEATURE_DISPATCHERD_ENABLED is True
|
||||
assert FEATURE_INDIRECT_NODE_COUNTING_ENABLED is True
|
||||
assert FEATURE_DISPATCHERD_ENABLED is True
|
||||
|
||||
@@ -461,6 +461,7 @@ class TestExtraVarSanitation(TestJobExecution):
|
||||
|
||||
|
||||
class TestGenericRun:
|
||||
@pytest.mark.django_db(reset_sequences=True)
|
||||
def test_generic_failure(self, patch_Job, execution_environment, mock_me, mock_create_partition):
|
||||
job = Job(status='running', inventory=Inventory(), project=Project(local_path='/projects/_23_foo'))
|
||||
job.websocket_emit_status = mock.Mock()
|
||||
@@ -545,6 +546,7 @@ class TestGenericRun:
|
||||
private_data_dir, extra_vars, safe_dict = call_args
|
||||
assert extra_vars['super_secret'] == "CLASSIFIED"
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment, mock_me):
|
||||
job = Job(project=Project(), inventory=Inventory())
|
||||
job.execution_environment = execution_environment
|
||||
@@ -845,6 +847,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
[None, '0'],
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir, mock_me):
|
||||
task = jobs.RunJob()
|
||||
task.instance = job
|
||||
@@ -901,6 +904,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
|
||||
assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_awx_task_env(self, settings, private_data_dir, job, mock_me):
|
||||
settings.AWX_TASK_ENV = {'FOO': 'BAR'}
|
||||
task = jobs.RunJob()
|
||||
@@ -1086,6 +1090,70 @@ class TestProjectUpdateCredentials(TestJobExecution):
|
||||
assert env['FOO'] == 'BAR'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProjectUpdateRefspec(TestJobExecution):
|
||||
@pytest.fixture
|
||||
def project_update(self, execution_environment):
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org, allow_override=True)
|
||||
project_update = ProjectUpdate(pk=1, project=proj, scm_type='git')
|
||||
project_update.websocket_emit_status = mock.Mock()
|
||||
project_update.execution_environment = execution_environment
|
||||
return project_update
|
||||
|
||||
def test_refspec_with_allow_override_includes_plus_prefix(self, project_update, private_data_dir, mock_me):
|
||||
"""Test that refspec includes + prefix to allow non-fast-forward updates when allow_override is True"""
|
||||
task = jobs.RunProjectUpdate()
|
||||
task.instance = project_update
|
||||
|
||||
# Call build_extra_vars_file which sets the refspec
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
# Read the extra vars file to check the refspec
|
||||
with open(os.path.join(private_data_dir, 'env', 'extravars')) as fd:
|
||||
extra_vars = yaml.load(fd, Loader=SafeLoader)
|
||||
|
||||
# Verify the refspec includes the + prefix for force updates
|
||||
assert 'scm_refspec' in extra_vars
|
||||
assert extra_vars['scm_refspec'] == '+refs/heads/*:refs/remotes/origin/*'
|
||||
|
||||
def test_custom_refspec_not_overridden(self, project_update, private_data_dir, mock_me):
|
||||
"""Test that custom user-provided refspec is not overridden"""
|
||||
task = jobs.RunProjectUpdate()
|
||||
task.instance = project_update
|
||||
project_update.scm_refspec = 'refs/pull/*/head:refs/remotes/origin/pr/*'
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with open(os.path.join(private_data_dir, 'env', 'extravars')) as fd:
|
||||
extra_vars = yaml.load(fd, Loader=SafeLoader)
|
||||
|
||||
# Custom refspec should be preserved
|
||||
assert extra_vars['scm_refspec'] == 'refs/pull/*/head:refs/remotes/origin/pr/*'
|
||||
|
||||
def test_no_refspec_without_allow_override(self, execution_environment, private_data_dir, mock_me):
|
||||
"""Test that no refspec is set when allow_override is False"""
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org, allow_override=False)
|
||||
project_update = ProjectUpdate(pk=1, project=proj, scm_type='git')
|
||||
project_update.websocket_emit_status = mock.Mock()
|
||||
project_update.execution_environment = execution_environment
|
||||
|
||||
task = jobs.RunProjectUpdate()
|
||||
task.instance = project_update
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with open(os.path.join(private_data_dir, 'env', 'extravars')) as fd:
|
||||
extra_vars = yaml.load(fd, Loader=SafeLoader)
|
||||
|
||||
# No refspec should be set
|
||||
assert 'scm_refspec' not in extra_vars
|
||||
|
||||
|
||||
class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
@pytest.fixture
|
||||
def inventory_update(self, execution_environment):
|
||||
|
||||
@@ -139,7 +139,7 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
return tmpl
|
||||
|
||||
|
||||
@task_awx(queue='rsyslog_configurer')
|
||||
@task_awx(queue='rsyslog_configurer', timeout=600, on_duplicate='queue_one')
|
||||
def reconfigure_rsyslog():
|
||||
tmpl = construct_rsyslog_conf_template()
|
||||
# Write config to a temp file then move it to preserve atomicity
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
from ansible_base.resource_registry.shared_types import RoleDefinitionType
|
||||
|
||||
from ansible_base.resource_registry.shared_types import (
|
||||
FeatureFlagType,
|
||||
RoleDefinitionType,
|
||||
OrganizationType,
|
||||
TeamType,
|
||||
UserType,
|
||||
)
|
||||
from ansible_base.feature_flags.models import AAPFlag
|
||||
from awx.main import models
|
||||
|
||||
|
||||
@@ -15,7 +21,11 @@ RESOURCE_LIST = (
|
||||
models.Organization,
|
||||
shared_resource=SharedResource(serializer=OrganizationType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(models.User, shared_resource=SharedResource(serializer=UserType, is_provider=False), name_field="username"),
|
||||
ResourceConfig(
|
||||
models.User,
|
||||
shared_resource=SharedResource(serializer=UserType, is_provider=False),
|
||||
name_field="username",
|
||||
),
|
||||
ResourceConfig(
|
||||
models.Team,
|
||||
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
||||
@@ -25,4 +35,8 @@ RESOURCE_LIST = (
|
||||
RoleDefinition,
|
||||
shared_resource=SharedResource(serializer=RoleDefinitionType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(
|
||||
AAPFlag,
|
||||
shared_resource=SharedResource(serializer=FeatureFlagType, is_provider=False),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -8,7 +8,6 @@ from ansible_base.lib.dynamic_config import (
|
||||
load_envvars,
|
||||
load_python_file_with_injected_context,
|
||||
load_standard_settings_files,
|
||||
toggle_feature_flags,
|
||||
)
|
||||
from .functions import (
|
||||
assert_production_settings,
|
||||
@@ -71,12 +70,5 @@ DYNACONF.update(
|
||||
merge=True,
|
||||
)
|
||||
|
||||
# Toggle feature flags based on installer settings
|
||||
DYNACONF.update(
|
||||
toggle_feature_flags(DYNACONF),
|
||||
loader_identifier="awx.settings:toggle_feature_flags",
|
||||
merge=True,
|
||||
)
|
||||
|
||||
# Update django.conf.settings with DYNACONF values
|
||||
export(__name__, DYNACONF)
|
||||
|
||||
@@ -83,7 +83,7 @@ USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = [
|
||||
os.path.join(BASE_DIR, 'ui', 'build', 'static'),
|
||||
os.path.join(BASE_DIR, 'ui', 'build'),
|
||||
os.path.join(BASE_DIR, 'static'),
|
||||
]
|
||||
|
||||
@@ -375,15 +375,13 @@ REST_FRAMEWORK = {
|
||||
'VIEW_DESCRIPTION_FUNCTION': 'awx.api.generics.get_view_description',
|
||||
'NON_FIELD_ERRORS_KEY': '__all__',
|
||||
'DEFAULT_VERSION': 'v2',
|
||||
# For swagger schema generation
|
||||
# For OpenAPI schema generation with drf-spectacular
|
||||
# see https://github.com/encode/django-rest-framework/pull/6532
|
||||
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.AutoSchema',
|
||||
'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema',
|
||||
# 'URL_FORMAT_OVERRIDE': None,
|
||||
}
|
||||
|
||||
SWAGGER_SETTINGS = {
|
||||
'DEFAULT_AUTO_SCHEMA_CLASS': 'awx.api.swagger.CustomSwaggerAutoSchema',
|
||||
}
|
||||
# SWAGGER_SETTINGS removed - migrated to drf-spectacular (see SPECTACULAR_SETTINGS below)
|
||||
|
||||
AUTHENTICATION_BACKENDS = ('awx.main.backends.AWXModelBackend',)
|
||||
|
||||
@@ -1036,7 +1034,44 @@ ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
ANSIBLE_BASE_PERMISSION_MODEL = 'main.Permission'
|
||||
|
||||
# Defaults to be overridden by DAB
|
||||
SPECTACULAR_SETTINGS = {}
|
||||
SPECTACULAR_SETTINGS = {
|
||||
'TITLE': 'AWX API',
|
||||
'DESCRIPTION': 'AWX API Documentation',
|
||||
'VERSION': 'v2',
|
||||
'OAS_VERSION': '3.0.3', # Set OpenAPI Specification version to 3.0.3
|
||||
'SERVE_INCLUDE_SCHEMA': False,
|
||||
'SCHEMA_PATH_PREFIX': r'/api/v[0-9]',
|
||||
'DEFAULT_GENERATOR_CLASS': 'drf_spectacular.generators.SchemaGenerator',
|
||||
'SCHEMA_COERCE_PATH_PK_SUFFIX': True,
|
||||
'CONTACT': {'email': 'controller-eng@redhat.com'},
|
||||
'LICENSE': {'name': 'Apache License'},
|
||||
'TERMS_OF_SERVICE': 'https://www.google.com/policies/terms/',
|
||||
# Use our custom schema class that handles swagger_topic and deprecated views
|
||||
'DEFAULT_SCHEMA_CLASS': 'awx.api.schema.CustomAutoSchema',
|
||||
'COMPONENT_SPLIT_REQUEST': True,
|
||||
'SWAGGER_UI_SETTINGS': {
|
||||
'deepLinking': True,
|
||||
'persistAuthorization': True,
|
||||
'displayOperationId': True,
|
||||
},
|
||||
# Resolve enum naming collisions with meaningful names
|
||||
'ENUM_NAME_OVERRIDES': {
|
||||
# Status field collisions
|
||||
'Status4e1Enum': 'UnifiedJobStatusEnum',
|
||||
'Status876Enum': 'JobStatusEnum',
|
||||
# Job type field collisions
|
||||
'JobType8b8Enum': 'JobTemplateJobTypeEnum',
|
||||
'JobType95bEnum': 'AdHocCommandJobTypeEnum',
|
||||
'JobType963Enum': 'ProjectUpdateJobTypeEnum',
|
||||
# Verbosity field collisions
|
||||
'Verbosity481Enum': 'JobVerbosityEnum',
|
||||
'Verbosity8cfEnum': 'InventoryUpdateVerbosityEnum',
|
||||
# Event field collision
|
||||
'Event4d3Enum': 'JobEventEnum',
|
||||
# Kind field collision
|
||||
'Kind362Enum': 'InventoryKindEnum',
|
||||
},
|
||||
}
|
||||
OAUTH2_PROVIDER = {}
|
||||
|
||||
# Add a postfix to the API URL patterns
|
||||
@@ -1113,11 +1148,8 @@ OPA_REQUEST_TIMEOUT = 1.5 # The number of seconds after which the connection to
|
||||
OPA_REQUEST_RETRIES = 2 # The number of retry attempts for connecting to the OPA server. Default is 2.
|
||||
|
||||
# feature flags
|
||||
FLAG_SOURCES = ('flags.sources.SettingsFlagsSource',)
|
||||
FLAGS = {
|
||||
'FEATURE_INDIRECT_NODE_COUNTING_ENABLED': [{'condition': 'boolean', 'value': False}],
|
||||
'FEATURE_DISPATCHERD_ENABLED': [{'condition': 'boolean', 'value': False}],
|
||||
}
|
||||
FEATURE_INDIRECT_NODE_COUNTING_ENABLED = False
|
||||
FEATURE_DISPATCHERD_ENABLED = False
|
||||
|
||||
# Dispatcher worker lifetime. If set to None, workers will never be retired
|
||||
# based on age. Note workers will finish their last task before retiring if
|
||||
|
||||
@@ -11,8 +11,6 @@ import socket
|
||||
# /usr/lib64/python/mimetypes.py
|
||||
import mimetypes
|
||||
|
||||
from dynaconf import post_hook
|
||||
|
||||
# awx-manage shell_plus --notebook
|
||||
NOTEBOOK_ARGUMENTS = ['--NotebookApp.token=', '--ip', '0.0.0.0', '--port', '9888', '--allow-root', '--no-browser']
|
||||
|
||||
@@ -41,11 +39,14 @@ PENDO_TRACKING_STATE = "off"
|
||||
INSIGHTS_TRACKING_STATE = False
|
||||
|
||||
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
||||
INSTALLED_APPS = "@merge drf_yasg,debug_toolbar"
|
||||
INSTALLED_APPS = "@merge drf_spectacular,debug_toolbar"
|
||||
MIDDLEWARE = "@insert 0 debug_toolbar.middleware.DebugToolbarMiddleware"
|
||||
|
||||
DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
|
||||
# drf-spectacular settings for API schema generation
|
||||
# SPECTACULAR_SETTINGS moved to defaults.py so it's available in all environments
|
||||
|
||||
# Configure a default UUID for development only.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
@@ -67,11 +68,5 @@ AWX_DISABLE_TASK_MANAGERS = False
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
|
||||
# This modifies FLAGS set by defaults, must be deferred to run later
|
||||
@post_hook
|
||||
def set_dev_flags(settings):
|
||||
defaults_flags = settings.get("FLAGS", {})
|
||||
defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
||||
defaults_flags['FEATURE_DISPATCHERD_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
||||
return {'FLAGS': defaults_flags}
|
||||
FEATURE_INDIRECT_NODE_COUNTING_ENABLED = True
|
||||
FEATURE_DISPATCHERD_ENABLED = True
|
||||
|
||||
30
licenses/drf-spectacular.txt
Normal file
30
licenses/drf-spectacular.txt
Normal file
@@ -0,0 +1,30 @@
|
||||
Copyright © 2011-present, Encode OSS Ltd.
|
||||
Copyright © 2019-2021, T. Franzel <tfranzel@gmail.com>, Cashlink Technologies GmbH.
|
||||
Copyright © 2021-present, T. Franzel <tfranzel@gmail.com>.
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
23
licenses/uritemplate.txt
Normal file
23
licenses/uritemplate.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
10
pytest.ini
10
pytest.ini
@@ -23,7 +23,8 @@ filterwarnings =
|
||||
|
||||
# NOTE: the following are present using python 3.11
|
||||
# FIXME: Set `USE_TZ` to `True`.
|
||||
once:The default value of USE_TZ will change from False to True in Django 5.0. Set USE_TZ to False in your project settings if you want to keep the current default behavior.:django.utils.deprecation.RemovedInDjango50Warning:django.conf
|
||||
# Note: RemovedInDjango50Warning may not exist in newer Django versions
|
||||
ignore:The default value of USE_TZ will change from False to True in Django 5.0. Set USE_TZ to False in your project settings if you want to keep the current default behavior.
|
||||
|
||||
# FIXME: Delete this entry once `pyparsing` is updated.
|
||||
once:module 'sre_constants' is deprecated:DeprecationWarning:_pytest.assertion.rewrite
|
||||
@@ -46,11 +47,12 @@ filterwarnings =
|
||||
once:DateTimeField User.date_joined received a naive datetime .2020-01-01 00.00.00. while time zone support is active.:RuntimeWarning:django.db.models.fields
|
||||
|
||||
# FIXME: Delete this entry once the deprecation is acted upon.
|
||||
once:'index_together' is deprecated. Use 'Meta.indexes' in 'main.\w+' instead.:django.utils.deprecation.RemovedInDjango51Warning:django.db.models.options
|
||||
# Note: RemovedInDjango51Warning may not exist in newer Django versions
|
||||
ignore:'index_together' is deprecated. Use 'Meta.indexes' in 'main.\w+' instead.
|
||||
|
||||
# FIXME: Update `awx.main.migrations._dab_rbac` and delete this entry.
|
||||
# once:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.:django.utils.deprecation.RemovedInDjango50Warning:django.db.models.query
|
||||
once:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.:django.utils.deprecation.RemovedInDjango50Warning:awx.main.migrations._dab_rbac
|
||||
# Note: RemovedInDjango50Warning may not exist in newer Django versions
|
||||
ignore:Using QuerySet.iterator.. after prefetch_related.. without specifying chunk_size is deprecated.
|
||||
|
||||
# FIXME: Delete this entry once the **broken** always-true assertions in the
|
||||
# FIXME: following tests are fixed:
|
||||
|
||||
@@ -13,7 +13,7 @@ cryptography
|
||||
Cython
|
||||
daphne
|
||||
distro
|
||||
django==4.2.21 # CVE-2025-32873
|
||||
django==4.2.26 # CVE-2025-32873
|
||||
django-cors-headers
|
||||
django-crum
|
||||
django-extensions
|
||||
@@ -22,6 +22,7 @@ django-polymorphic
|
||||
django-solo
|
||||
djangorestframework==3.15.2 # upgrading to 3.16+ throws NOT_REQUIRED_DEFAULT error on required fields in serializer that have no default
|
||||
djangorestframework-yaml
|
||||
drf-spectacular>=0.27.0
|
||||
dynaconf
|
||||
filelock
|
||||
GitPython>=3.1.37 # CVE-2023-41040
|
||||
|
||||
@@ -8,7 +8,7 @@ aiohappyeyeballs==2.6.1
|
||||
# via aiohttp
|
||||
aiohttp[speedups]==3.13.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# aiohttp-retry
|
||||
# opa-python-client
|
||||
# twilio
|
||||
@@ -17,21 +17,18 @@ aiohttp-retry==2.9.1
|
||||
aiosignal==1.4.0
|
||||
# via aiohttp
|
||||
ansi2html==1.9.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# git+https://github.com/ansible/ansible-runner.git@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
# via -r requirements.in
|
||||
asciichartpy==1.5.25
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
asgiref==3.10.0
|
||||
# via -r requirements.in
|
||||
asgiref==3.11.0
|
||||
# via
|
||||
# channels
|
||||
# channels-redis
|
||||
# daphne
|
||||
# django
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
asn1==3.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
attrs==25.4.0
|
||||
# via
|
||||
# aiohttp
|
||||
@@ -45,12 +42,6 @@ autocommand==2.2.2
|
||||
# via jaraco-text
|
||||
automat==25.4.16
|
||||
# via twisted
|
||||
# awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
awx-plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# awx-plugins-core
|
||||
azure-core==1.35.1
|
||||
# via
|
||||
# azure-identity
|
||||
@@ -59,31 +50,28 @@ azure-core==1.35.1
|
||||
# azure-keyvault-secrets
|
||||
# msrest
|
||||
azure-identity==1.25.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
azure-keyvault==4.2.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
azure-keyvault-certificates==4.10.0
|
||||
# via azure-keyvault
|
||||
azure-keyvault-keys==4.11.0
|
||||
# via azure-keyvault
|
||||
azure-keyvault-secrets==4.10.0
|
||||
# via azure-keyvault
|
||||
backports-tarfile==1.2.0
|
||||
# via jaraco-context
|
||||
boto3==1.40.46
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
botocore==1.40.46
|
||||
boto3==1.41.3
|
||||
# via -r requirements.in
|
||||
botocore==1.41.3
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# boto3
|
||||
# s3transfer
|
||||
brotli==1.1.0
|
||||
# via aiohttp
|
||||
cachetools==6.2.0
|
||||
# via google-auth
|
||||
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
|
||||
certifi==2025.11.12
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# kubernetes
|
||||
# msrest
|
||||
# requests
|
||||
@@ -94,41 +82,39 @@ cffi==2.0.0
|
||||
# pynacl
|
||||
channels==4.3.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels-redis
|
||||
channels-redis==4.3.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
charset-normalizer==3.4.3
|
||||
# via requests
|
||||
click==8.1.8
|
||||
# via receptorctl
|
||||
constantly==23.10.4
|
||||
# via twisted
|
||||
cryptography==46.0.2
|
||||
cryptography==46.0.3
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# adal
|
||||
# autobahn
|
||||
# azure-identity
|
||||
# azure-keyvault-keys
|
||||
# django-ansible-base
|
||||
# msal
|
||||
# pyjwt
|
||||
# pyopenssl
|
||||
# service-identity
|
||||
cython==3.1.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
daphne==4.2.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
dispatcherd==2025.5.21
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
distro==1.9.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django==4.2.21
|
||||
# via -r requirements.in
|
||||
django==4.2.26
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
# django-crum
|
||||
# django-extensions
|
||||
@@ -137,42 +123,37 @@ django==4.2.21
|
||||
# django-polymorphic
|
||||
# django-solo
|
||||
# djangorestframework
|
||||
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
# drf-spectacular
|
||||
django-cors-headers==4.9.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
django-crum==0.7.9
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
# via -r requirements.in
|
||||
django-extensions==4.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-flags==5.0.14
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
# via -r requirements.in
|
||||
django-flags==5.1.0
|
||||
# via -r requirements.in
|
||||
django-guid==3.5.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
django-polymorphic==4.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
django-solo==2.4.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
djangorestframework==3.15.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
# -r requirements.in
|
||||
# drf-spectacular
|
||||
djangorestframework-yaml==2.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
drf-spectacular==0.29.0
|
||||
# via -r requirements.in
|
||||
durationpy==0.10
|
||||
# via kubernetes
|
||||
dynaconf==3.2.11
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
dynaconf==3.2.12
|
||||
# via -r requirements.in
|
||||
enum-compat==0.0.3
|
||||
# via asn1
|
||||
filelock==3.19.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
frozenlist==1.8.0
|
||||
# via
|
||||
# aiohttp
|
||||
@@ -180,7 +161,7 @@ frozenlist==1.8.0
|
||||
gitdb==4.0.12
|
||||
# via gitpython
|
||||
gitpython==3.1.45
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
google-auth==2.41.1
|
||||
# via kubernetes
|
||||
googleapis-common-protos==1.70.0
|
||||
@@ -189,7 +170,7 @@ googleapis-common-protos==1.70.0
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
grpcio==1.75.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
hiredis==3.2.1
|
||||
# via redis
|
||||
@@ -199,21 +180,19 @@ hyperlink==21.0.0
|
||||
# twisted
|
||||
idna==3.10
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# hyperlink
|
||||
# requests
|
||||
# twisted
|
||||
# yarl
|
||||
importlib-metadata==8.7.0
|
||||
# via opentelemetry-api
|
||||
importlib-resources==6.5.2
|
||||
# via irc
|
||||
incremental==24.7.2
|
||||
# via twisted
|
||||
inflection==0.5.1
|
||||
# via django-ansible-base
|
||||
# via drf-spectacular
|
||||
irc==20.5.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
isodate==0.7.2
|
||||
# via
|
||||
# azure-keyvault-certificates
|
||||
@@ -238,17 +217,19 @@ jaraco-text==4.0.0
|
||||
# irc
|
||||
# jaraco-collections
|
||||
jinja2==3.1.6
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
jq==1.10.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
json-log-formatter==1.1.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
jsonschema==4.25.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via
|
||||
# -r requirements.in
|
||||
# drf-spectacular
|
||||
jsonschema-specifications==2025.9.1
|
||||
# via jsonschema
|
||||
kubernetes==34.1.0
|
||||
@@ -256,11 +237,11 @@ kubernetes==34.1.0
|
||||
lockfile==0.12.2
|
||||
# via python-daemon
|
||||
markdown==3.9
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
markupsafe==3.0.3
|
||||
# via jinja2
|
||||
maturin==1.9.6
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
more-itertools==10.8.0
|
||||
# via
|
||||
# irc
|
||||
@@ -275,12 +256,12 @@ msal-extensions==1.3.1
|
||||
# via azure-identity
|
||||
msgpack==1.1.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels-redis
|
||||
msrest==0.7.1
|
||||
# via msrestazure
|
||||
msrestazure==0.6.4.post1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
multidict==6.7.0
|
||||
# via
|
||||
# aiohttp
|
||||
@@ -288,12 +269,12 @@ multidict==6.7.0
|
||||
oauthlib==3.3.1
|
||||
# via requests-oauthlib
|
||||
opa-python-client==2.0.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
openshift==0.13.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
opentelemetry-api==1.37.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-instrumentation
|
||||
@@ -301,7 +282,7 @@ opentelemetry-api==1.37.0
|
||||
# opentelemetry-sdk
|
||||
# opentelemetry-semantic-conventions
|
||||
opentelemetry-exporter-otlp==1.37.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
opentelemetry-exporter-otlp-proto-common==1.37.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -313,7 +294,7 @@ opentelemetry-exporter-otlp-proto-http==1.37.0
|
||||
opentelemetry-instrumentation==0.58b0
|
||||
# via opentelemetry-instrumentation-logging
|
||||
opentelemetry-instrumentation-logging==0.58b0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
opentelemetry-proto==1.37.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-common
|
||||
@@ -321,7 +302,7 @@ opentelemetry-proto==1.37.0
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-sdk==1.37.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-semantic-conventions==0.58b0
|
||||
@@ -330,33 +311,30 @@ opentelemetry-semantic-conventions==0.58b0
|
||||
# opentelemetry-sdk
|
||||
packaging==25.0
|
||||
# via
|
||||
# ansible-runner
|
||||
# django-guid
|
||||
# opentelemetry-instrumentation
|
||||
# setuptools-scm
|
||||
pbr==7.0.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
pexpect==4.9.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
# via -r requirements.in
|
||||
pkgconfig==1.5.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
prometheus-client==0.23.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
propcache==0.4.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
protobuf==6.32.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# googleapis-common-protos
|
||||
# opentelemetry-proto
|
||||
psutil==7.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
psycopg==3.2.10
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
pyasn1==0.6.1
|
||||
@@ -373,15 +351,12 @@ pycares==4.11.0
|
||||
pycparser==2.23
|
||||
# via cffi
|
||||
pygerduty==0.38.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
pygithub==2.8.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# awx-plugins-core
|
||||
# via -r requirements.in
|
||||
pyjwt[crypto]==2.10.1
|
||||
# via
|
||||
# adal
|
||||
# django-ansible-base
|
||||
# msal
|
||||
# pygithub
|
||||
# twilio
|
||||
@@ -389,14 +364,12 @@ pynacl==1.6.0
|
||||
# via pygithub
|
||||
pyopenssl==25.3.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# twisted
|
||||
pyparsing==2.4.7
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
python-daemon==3.1.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
# via -r requirements.in
|
||||
python-dateutil==2.9.0.post0
|
||||
# via
|
||||
# adal
|
||||
@@ -405,28 +378,28 @@ python-dateutil==2.9.0.post0
|
||||
# receptorctl
|
||||
# tempora
|
||||
python-dsv-sdk==1.0.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
python-string-utils==1.0.0
|
||||
# via openshift
|
||||
python-tss-sdk==2.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
pytz==2025.2
|
||||
# via irc
|
||||
pyyaml==6.0.3
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
# -r requirements.in
|
||||
# dispatcherd
|
||||
# djangorestframework-yaml
|
||||
# drf-spectacular
|
||||
# kubernetes
|
||||
# receptorctl
|
||||
pyzstd==0.18.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
receptorctl==1.6.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
redis[hiredis]==6.4.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# channels-redis
|
||||
referencing==0.36.2
|
||||
# via
|
||||
@@ -434,10 +407,9 @@ referencing==0.36.2
|
||||
# jsonschema-specifications
|
||||
requests==2.32.5
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# adal
|
||||
# azure-core
|
||||
# django-ansible-base
|
||||
# kubernetes
|
||||
# msal
|
||||
# msrest
|
||||
@@ -458,16 +430,16 @@ rpds-py==0.27.1
|
||||
# referencing
|
||||
rsa==4.9.1
|
||||
# via google-auth
|
||||
s3transfer==0.14.0
|
||||
s3transfer==0.15.0
|
||||
# via boto3
|
||||
semantic-version==2.10.0
|
||||
# via setuptools-rust
|
||||
service-identity==24.2.0
|
||||
# via twisted
|
||||
setuptools-rust==1.10.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
setuptools-scm[toml]==8.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
six==1.17.0
|
||||
# via
|
||||
# azure-core
|
||||
@@ -477,29 +449,27 @@ six==1.17.0
|
||||
# pygerduty
|
||||
# python-dateutil
|
||||
slack-sdk==3.37.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
smmap==5.0.2
|
||||
# via gitdb
|
||||
sqlparse==0.5.3
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# django
|
||||
# django-ansible-base
|
||||
tempora==5.8.1
|
||||
# via
|
||||
# irc
|
||||
# jaraco-logging
|
||||
twilio==9.8.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
twisted[tls]==25.5.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# daphne
|
||||
txaio==25.9.2
|
||||
# via autobahn
|
||||
typing-extensions==4.15.0
|
||||
# via
|
||||
# aiosignal
|
||||
# azure-core
|
||||
# azure-identity
|
||||
# azure-keyvault-certificates
|
||||
@@ -511,28 +481,25 @@ typing-extensions==4.15.0
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
# opentelemetry-sdk
|
||||
# opentelemetry-semantic-conventions
|
||||
# psycopg
|
||||
# pygithub
|
||||
# pyopenssl
|
||||
# pyzstd
|
||||
# referencing
|
||||
# twisted
|
||||
uritemplate==4.2.0
|
||||
# via drf-spectacular
|
||||
urllib3==2.3.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# botocore
|
||||
# django-ansible-base
|
||||
# kubernetes
|
||||
# pygithub
|
||||
# requests
|
||||
uwsgi==2.0.30
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
uwsgitop==0.12
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
websocket-client==1.8.0
|
||||
# via kubernetes
|
||||
wheel==0.42.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
wrapt==1.17.3
|
||||
# via opentelemetry-instrumentation
|
||||
yarl==1.22.0
|
||||
@@ -541,15 +508,13 @@ zipp==3.23.0
|
||||
# via importlib-metadata
|
||||
zope-interface==8.0.1
|
||||
# via twisted
|
||||
zstandard==0.25.0
|
||||
# via aiohttp
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
pip==21.2.4
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
# via -r requirements.in
|
||||
setuptools==80.9.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# -r requirements.in
|
||||
# asciichartpy
|
||||
# autobahn
|
||||
# incremental
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
build
|
||||
django-debug-toolbar==3.2.4
|
||||
django-test-migrations
|
||||
drf-yasg<1.21.10 # introduces new DeprecationWarning that is turned into error
|
||||
drf-spectacular>=0.27.0 # Modern OpenAPI 3.0 schema generator
|
||||
# pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed
|
||||
ipython>=7.31.1 # https://github.com/ansible/awx/security/dependabot/30
|
||||
unittest2
|
||||
|
||||
Reference in New Issue
Block a user