Compare commits

..

3 Commits

Author SHA1 Message Date
Elijah DeLee
19e3cba35c Merge branch 'devel' into x-request-id 2024-04-24 15:04:13 -05:00
Elijah DeLee
c11ff49a56 fixup syntax 2024-04-24 14:48:02 -05:00
Elijah DeLee
51bcf82cf4 include x-request-id header in perf log if exists 2024-04-24 13:51:42 -05:00
3497 changed files with 384001 additions and 28438 deletions

View File

@@ -1,57 +0,0 @@
---
codecov:
notify:
after_n_builds: 9 # Number of test matrix+lint jobs uploading coverage
wait_for_ci: false
require_ci_to_pass: false
token: >- # repo-scoped, upload-only, needed for stability in PRs from forks
2b8c7a7a-7293-4a00-bf02-19bd55a1389b
comment:
require_changes: true
coverage:
range: 100..100
status:
patch:
default:
target: 100%
pytest:
target: 100%
flags:
- pytest
typing:
flags:
- MyPy
project:
default:
target: 75%
lib:
flags:
- pytest
paths:
- awx/
target: 75%
tests:
flags:
- pytest
paths:
- tests/
- >-
**/test/
- >-
**/tests/
- >-
**/test/**
- >-
**/tests/**
target: 95%
typing:
flags:
- MyPy
target: 100%
...

View File

@@ -1,6 +1,16 @@
[run]
source = awx
branch = True
omit =
awx/main/migrations/*
awx/lib/site-packages/*
[report]
# Regexes for lines to exclude from consideration
exclude_also =
exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover
# Don't complain about missing debug-only code:
def __repr__
if self\.debug
@@ -13,35 +23,7 @@ exclude_also =
if 0:
if __name__ == .__main__.:
^\s*@pytest\.mark\.xfail
[run]
branch = True
# NOTE: `disable_warnings` is needed when `pytest-cov` runs in tandem
# NOTE: with `pytest-xdist`. These warnings are false negative in this
# NOTE: context.
#
# NOTE: It's `coveragepy` that emits the warnings and previously they
# NOTE: wouldn't get on the radar of `pytest`'s `filterwarnings`
# NOTE: mechanism. This changed, however, with `pytest >= 8.4`. And
# NOTE: since we set `filterwarnings = error`, those warnings are being
# NOTE: raised as exceptions, cascading into `pytest`'s internals and
# NOTE: causing tracebacks and crashes of the test sessions.
#
# Ref:
# * https://github.com/pytest-dev/pytest-cov/issues/693
# * https://github.com/pytest-dev/pytest-cov/pull/695
# * https://github.com/pytest-dev/pytest-cov/pull/696
disable_warnings =
module-not-measured
omit =
awx/main/migrations/*
awx/settings/defaults.py
awx/settings/*_defaults.py
source =
.
source_pkgs =
awx
ignore_errors = True
[xml]
output = ./reports/coverage.xml

View File

@@ -1,3 +1,3 @@
# Community Code of Conduct
Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).

View File

@@ -13,7 +13,7 @@ body:
attributes:
label: Please confirm the following
options:
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
required: true
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
required: true

View File

@@ -5,7 +5,7 @@ contact_links:
url: https://github.com/ansible/awx#get-involved
about: For general debugging or technical support please see the Get Involved section of our readme.
- name: 📝 Ansible Code of Conduct
url: https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser
url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser
about: AWX uses the Ansible Code of Conduct; ❤ Be nice to other members of the community. ☮ Behave.
- name: 💼 For Enterprise
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser

View File

@@ -13,7 +13,7 @@ body:
attributes:
label: Please confirm the following
options:
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
- label: I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
required: true
- label: I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
required: true

View File

@@ -4,8 +4,7 @@
<!---
If you are fixing an existing issue, please include "related #nnn" in your
commit message and your description; but you should still explain what
the change does. Also please make sure that if this PR has an attached JIRA, put AAP-<number>
in as the first entry for your PR title.
the change does.
-->
##### ISSUE TYPE
@@ -17,11 +16,17 @@ in as the first entry for your PR title.
##### COMPONENT NAME
<!--- Name of the module/plugin/module/task -->
- API
- UI
- Collection
- CLI
- Docs
- Other
##### AWX VERSION
<!--- Paste verbatim output from `make VERSION` between quotes below -->
```
```
##### ADDITIONAL INFORMATION

View File

@@ -4,13 +4,13 @@ inputs:
github-token:
description: GitHub Token for registry access
required: true
private-github-key:
description: GitHub private key for private repositories
required: false
default: ''
runs:
using: composite
steps:
- name: Get python version from Makefile
shell: bash
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Set lower case owner name
shell: bash
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
@@ -22,21 +22,13 @@ runs:
run: |
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- uses: ./.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ inputs.private-github-key }}
- name: Pre-pull latest devel image to warm cache
shell: bash
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
docker pull -q `make print-DEVEL_IMAGE_NAME`
continue-on-error: true
run: docker pull ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
- name: Build image for current source checkout
shell: bash
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
COMPOSE_TAG=${{ github.base_ref }} \
make docker-compose-build

View File

@@ -9,34 +9,24 @@ inputs:
required: false
default: false
type: boolean
private-github-key:
description: GitHub private key for private repositories
required: false
default: ''
outputs:
ip:
description: The IP of the tools_awx_1 container
value: ${{ steps.data.outputs.ip }}
admin-token:
description: OAuth token for admin user
value: ${{ steps.data.outputs.admin_token }}
runs:
using: composite
steps:
- name: Disable apparmor for rsyslogd, first step
shell: bash
run: sudo ln -s /etc/apparmor.d/usr.sbin.rsyslogd /etc/apparmor.d/disable/
- name: Disable apparmor for rsyslogd, second step
shell: bash
run: sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.rsyslogd
- name: Build awx_devel image for running checks
uses: ./.github/actions/awx_devel_image
with:
github-token: ${{ inputs.github-token }}
private-github-key: ${{ inputs.private-github-key }}
- name: Upgrade ansible-core
shell: bash
run: python -m pip install --upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core
- name: Install system deps
shell: bash
@@ -46,10 +36,8 @@ runs:
shell: bash
run: |
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
DJANGO_COLORS=nocolor \
SUPERVISOR_ARGS="-n -t" \
COMPOSE_UP_OPTS="-d --no-color" \
COMPOSE_TAG=${{ github.base_ref }} \
COMPOSE_UP_OPTS="-d" \
make docker-compose
- name: Update default AWX password
@@ -69,9 +57,21 @@ runs:
awx-manage update_password --username=admin --password=password
EOSH
- name: Build UI
# This must be a string comparison in composite actions:
# https://github.com/actions/runner/issues/2238
if: ${{ inputs.build-ui == 'true' }}
shell: bash
run: |
docker exec -i tools_awx_1 sh <<-EOSH
make ui-devel
EOSH
- name: Get instance data
id: data
shell: bash
run: |
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT

View File

@@ -1,27 +0,0 @@
name: 'Setup Python from Makefile'
description: 'Extract and set up Python version from Makefile'
inputs:
python-version:
description: 'Override Python version (optional)'
required: false
default: ''
working-directory:
description: 'Directory containing the Makefile'
required: false
default: '.'
runs:
using: composite
steps:
- name: Get python version from Makefile
shell: bash
run: |
if [ -n "${{ inputs.python-version }}" ]; then
echo "py_version=${{ inputs.python-version }}" >> $GITHUB_ENV
else
cd ${{ inputs.working-directory }}
echo "py_version=`make PYTHON_VERSION`" >> $GITHUB_ENV
fi
- name: Install python
uses: actions/setup-python@v5
with:
python-version: ${{ env.py_version }}

View File

@@ -1,29 +0,0 @@
name: 'Setup SSH for GitHub'
description: 'Configure SSH for private repository access'
inputs:
ssh-private-key:
description: 'SSH private key for repository access'
required: false
default: ''
runs:
using: composite
steps:
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ inputs.ssh-private-key }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ inputs.ssh-private-key }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}

View File

@@ -13,7 +13,7 @@ runs:
docker logs tools_awx_1 > ${{ inputs.log-filename }}
- name: Upload AWX logs as artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: docker-compose-logs-${{ inputs.log-filename }}
name: docker-compose-logs
path: ${{ inputs.log-filename }}

View File

@@ -8,10 +8,3 @@ updates:
labels:
- "docs"
- "dependencies"
- package-ecosystem: "pip"
directory: "requirements/"
schedule:
interval: "daily" #run daily until we trust it, then back this off to weekly
open-pull-requests-limit: 2
labels:
- "dependencies"

View File

@@ -6,6 +6,8 @@ needs_triage:
- "Feature Summary"
"component:ui":
- "\\[X\\] UI"
"component:ui_next":
- "\\[X\\] UI \\(tech preview\\)"
"component:api":
- "\\[X\\] API"
"component:docs":

View File

@@ -1,5 +1,8 @@
"component:api":
- any: ["awx/**/*"]
- any: ["awx/**/*", "!awx/ui/**"]
"component:ui":
- any: ["awx/ui/**/*"]
"component:docs":
- any: ["docs/**/*"]
@@ -11,4 +14,5 @@
- any: ["awx_collection/**/*"]
"dependencies":
- any: ["awx/ui/package.json"]
- any: ["requirements/*"]

View File

@@ -1,6 +1,7 @@
## General
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
@@ -70,10 +71,10 @@ Thank you for your submission and for supporting AWX!
- Hello, we'd love to help, but we need a little more information about the problem you're having. Screenshots, log outputs, or any reproducers would be very helpful.
### Code of Conduct
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html
- Hello. Please keep in mind that Ansible adheres to a Code of Conduct in its community spaces. The spirit of the code of conduct is to be kind, and this is your friendly reminder to be so. Please see the full code of conduct here if you have questions: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html
### EE Contents / Community General
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://docs.ansible.com/projects/builder/en/stable/ \
- Hello. The awx-ee contains the collections and dependencies needed for supported AWX features to function. Anything beyond that (like the community.general package) will require you to build your own EE. For information on how to do that, see https://ansible-builder.readthedocs.io/en/stable/ \
\
The Ansible Community is looking at building an EE that corresponds to all of the collections inside the ansible package. That may help you if and when it happens; see https://github.com/ansible-community/community-topics/issues/31 for details.
@@ -82,18 +83,18 @@ The Ansible Community is looking at building an EE that corresponds to all of th
## Mailing List Triage
### Create an issue
- Hello, thanks for reaching out on list. We think this merits an issue on our GitHub, https://github.com/ansible/awx/issues. If you could open an issue up on GitHub it will get tagged and integrated into our planning and workflow. All future work will be tracked there. Issues should include as much information as possible, including screenshots, log outputs, or any reproducers.
- Hello, thanks for reaching out on list. We think this merits an issue on our Github, https://github.com/ansible/awx/issues. If you could open an issue up on Github it will get tagged and integrated into our planning and workflow. All future work will be tracked there. Issues should include as much information as possible, including screenshots, log outputs, or any reproducers.
### Create a Pull Request
- Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
### Receptor
- You can find the receptor docs here: https://docs.ansible.com/projects/receptor/en/latest/
- You can find the receptor docs here: https://receptor.readthedocs.io/en/latest/
- Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
### Ansible Engine not AWX
- Hello, your question seems to be about Ansible development, not about AWX. Try asking on in the Forum https://forum.ansible.com/tag/development
- Hello, your question seems to be about using Ansible Core, not about AWX. https://forum.ansible.com/tag/ansible-core is the best place to visit for user questions about Ansible. Thanks!
- Hello, your question seems to be about Ansible development, not about AWX. Try asking on the Ansible-devel specific mailing list: https://groups.google.com/g/ansible-devel
- Hello, your question seems to be about using Ansible, not about AWX. https://groups.google.com/g/ansible-project is the best place to visit for user questions about Ansible. Thanks!
### Ansible Galaxy not AWX
- Hey there. That sounds like an FAQ question. Did this: https://www.ansible.com/products/awx-project/faq cover your question?
@@ -103,7 +104,7 @@ The Ansible Community is looking at building an EE that corresponds to all of th
- AWX-Operator: https://github.com/ansible/awx-operator/blob/devel/CONTRIBUTING.md
### Oracle AWX
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
### Community Resolved
Hi,

View File

@@ -1,102 +0,0 @@
---
name: API Schema Change Detection
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
UPSTREAM_REPOSITORY_ID: 91594105
on:
pull_request:
branches:
- devel
- release_**
- feature_**
- stable-**
jobs:
api-schema-detection:
name: Detect API Schema Changes
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
packages: write
contents: read
steps:
- uses: actions/checkout@v4
with:
show-progress: false
fetch-depth: 0
- name: Build awx_devel image for schema check
uses: ./.github/actions/awx_devel_image
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Detect API schema changes
id: schema-check
continue-on-error: true
run: |
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS' \
AWX_DOCKER_CMD='make detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}' \
make docker-runner 2>&1 | tee schema-diff.txt
exit ${PIPESTATUS[0]}
- name: Validate OpenAPI schema
id: schema-validation
continue-on-error: true
run: |
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS' \
AWX_DOCKER_CMD='make validate-openapi-schema' \
make docker-runner 2>&1 | tee schema-validation.txt
exit ${PIPESTATUS[0]}
- name: Add schema validation and diff to job summary
if: always()
# show text and if for some reason, it can't be generated, state that it can't be.
run: |
echo "## API Schema Check Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Show validation status
echo "### OpenAPI Validation" >> $GITHUB_STEP_SUMMARY
if [ -f schema-validation.txt ] && grep -q "✓ Schema is valid" schema-validation.txt; then
echo "✅ **Status:** PASSED - Schema is valid OpenAPI 3.0.3" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Status:** FAILED - Schema validation failed" >> $GITHUB_STEP_SUMMARY
if [ -f schema-validation.txt ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "<details><summary>Validation errors</summary>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
cat schema-validation.txt >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
echo "</details>" >> $GITHUB_STEP_SUMMARY
fi
fi
echo "" >> $GITHUB_STEP_SUMMARY
# Show schema changes
echo "### Schema Changes" >> $GITHUB_STEP_SUMMARY
if [ -f schema-diff.txt ]; then
if grep -q "^+" schema-diff.txt || grep -q "^-" schema-diff.txt; then
echo "**Changes detected** between this PR and the base branch" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Truncate to first 1000 lines to stay under GitHub's 1MB summary limit
TOTAL_LINES=$(wc -l < schema-diff.txt)
if [ $TOTAL_LINES -gt 1000 ]; then
echo "_Showing first 1000 of ${TOTAL_LINES} lines. See job logs or download artifact for full diff._" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo '```diff' >> $GITHUB_STEP_SUMMARY
head -n 1000 schema-diff.txt >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
else
echo "No schema changes detected" >> $GITHUB_STEP_SUMMARY
fi
else
echo "Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
fi

View File

@@ -5,12 +5,8 @@ env:
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
UPSTREAM_REPOSITORY_ID: 91594105
on:
pull_request:
push:
branches:
- devel # needed to publish code coverage post-merge
jobs:
common-tests:
name: ${{ matrix.tests.name }}
@@ -24,140 +20,48 @@ jobs:
matrix:
tests:
- name: api-test
command: /start_tests.sh test_coverage
coverage-upload-name: ""
command: /start_tests.sh
- name: api-migrations
command: /start_tests.sh test_migrations
coverage-upload-name: ""
- name: api-lint
command: /var/lib/awx/venv/awx/bin/tox -e linters
coverage-upload-name: ""
- name: api-swagger
command: /start_tests.sh swagger
- name: awx-collection
command: /start_tests.sh test_collection_all
coverage-upload-name: "awx-collection"
- name: api-schema
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
- name: ui-lint
command: make ui-lint
- name: ui-test-screens
command: make ui-test-screens
- name: ui-test-general
command: make ui-test-general
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: actions/checkout@v3
- name: Build awx_devel image for running checks
uses: ./.github/actions/awx_devel_image
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Run check ${{ matrix.tests.name }}
id: make-run
run: >-
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS -e GITHUB_OUTPUT -v "${GITHUB_OUTPUT}:${GITHUB_OUTPUT}:rw,Z"'
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
make docker-runner
- name: Inject PR number into coverage.xml
if: >-
!cancelled()
&& github.event_name == 'pull_request'
&& steps.make-run.outputs.cov-report-files != ''
run: |
if [ -f "reports/coverage.xml" ]; then
sed -i '2i<!-- PR ${{ github.event.pull_request.number }} -->' reports/coverage.xml
echo "Injected PR number ${{ github.event.pull_request.number }} into coverage.xml"
fi
- name: Upload test coverage to Codecov
if: >-
!cancelled()
&& steps.make-run.outputs.cov-report-files != ''
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: >-
${{
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
}}
files: >-
${{ steps.make-run.outputs.cov-report-files }}
flags: >-
CI-GHA,
pytest,
OS-${{
runner.os
}}
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov
if: >-
!cancelled()
&& steps.make-run.outputs.test-result-files != ''
uses: codecov/test-results-action@v1
with:
fail_ci_if_error: >-
${{
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
}}
files: >-
${{ steps.make-run.outputs.test-result-files }}
flags: >-
CI-GHA,
pytest,
OS-${{
runner.os
}}
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tests.name }}-artifacts
path: reports/coverage.xml
retention-days: 5
- name: Upload awx jUnit test reports
if: >-
!cancelled()
&& steps.make-run.outputs.test-result-files != ''
&& github.event_name == 'push'
&& env.UPSTREAM_REPOSITORY_ID == github.repository_id
&& github.ref_name == github.event.repository.default_branch
run: |
for junit_file in $(echo '${{ steps.make-run.outputs.test-result-files }}' | sed 's/,/ /')
do
curl \
-v \
--user "${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_USER }}:${{ secrets.PDE_ORG_RESULTS_UPLOAD_PASSWORD }}" \
--form "xunit_xml=@${junit_file}" \
--form "component_name=${{ matrix.tests.coverage-upload-name || 'awx' }}" \
--form "git_commit_sha=${{ github.sha }}" \
--form "git_repository_url=https://github.com/${{ github.repository }}" \
"${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_URL }}/api/results/upload/"
done
run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make docker-runner
dev-env:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.13'
- uses: actions/checkout@v3
- uses: ./.github/actions/run_awx_devel
id: awx
with:
build-ui: false
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Run live dev env tests
run: docker exec tools_awx_1 /bin/bash -c "make live_test"
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
with:
log-filename: live-tests.log
- name: Run smoke test
run: ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
awx-operator:
runs-on: ubuntu-latest
@@ -166,36 +70,29 @@ jobs:
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
steps:
- name: Checkout awx
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
show-progress: false
path: awx
- uses: ./awx/.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Checkout awx-operator
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
show-progress: false\
repository: ansible/awx-operator
path: awx-operator
- name: Setup python, referencing action at awx relative path
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
- name: Get python version from Makefile
working-directory: awx
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
python-version: '3.12'
python-version: ${{ env.py_version }}
- name: Install playbook dependencies
run: |
python -m pip install docker
python3 -m pip install docker
- name: Check Python version
working-directory: awx
run: |
make print-PYTHON
- name: Build AWX image
working-directory: awx
run: |
@@ -207,105 +104,40 @@ jobs:
- name: Run test deployment with awx-operator
working-directory: awx-operator
id: awx_operator_test
timeout-minutes: 60
continue-on-error: true
run: |
set +e
timeout 15m bash -elc '
python -m pip install -r molecule/requirements.txt
python -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
ansible-galaxy collection install -r molecule/requirements.yml
sudo rm -f $(which kustomize)
make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
'
rc=$?
if [ $rc -eq 124 ]; then
echo "timed_out=true" >> "$GITHUB_OUTPUT"
fi
exit $rc
python3 -m pip install -r molecule/requirements.txt
ansible-galaxy collection install -r molecule/requirements.yml
sudo rm -f $(which kustomize)
make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
env:
AWX_TEST_IMAGE: local/awx
AWX_TEST_VERSION: ci
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
STORE_DEBUG_OUTPUT: true
- name: Collect awx-operator logs on timeout
# Only run on timeout; normal failures should use molecule's built-in log collection.
if: steps.awx_operator_test.outputs.timed_out == 'true'
run: |
mkdir -p "$DEBUG_OUTPUT_DIR"
if command -v kind >/dev/null 2>&1; then
for cluster in $(kind get clusters 2>/dev/null); do
kind export logs "$DEBUG_OUTPUT_DIR/$cluster" --name "$cluster" || true
done
fi
if command -v kubectl >/dev/null 2>&1; then
kubectl get all -A -o wide > "$DEBUG_OUTPUT_DIR/kubectl-get-all.txt" || true
kubectl get pods -A -o wide > "$DEBUG_OUTPUT_DIR/kubectl-get-pods.txt" || true
kubectl describe pods -A > "$DEBUG_OUTPUT_DIR/kubectl-describe-pods.txt" || true
fi
docker ps -a > "$DEBUG_OUTPUT_DIR/docker-ps.txt" || true
- name: Upload debug output
if: always()
uses: actions/upload-artifact@v4
if: failure()
uses: actions/upload-artifact@v3
with:
name: awx-operator-debug-output
path: ${{ env.DEBUG_OUTPUT_DIR }}
- name: Fail awx-operator check if test deployment failed
if: steps.awx_operator_test.outcome != 'success'
run: exit 1
collection-sanity:
name: awx_collection sanity
runs-on: ubuntu-latest
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
ansible:
- stable-2.17
# - devel
steps:
- name: Perform sanity testing
uses: ansible-community/ansible-test-gh-action@release/v1
with:
ansible-core-version: ${{ matrix.ansible }}
codecov-token: ${{ secrets.CODECOV_TOKEN }}
collection-root: awx_collection
pre-test-cmd: >-
ansible-playbook
-i localhost,
tools/template_galaxy.yml
-e collection_package=awx
-e collection_namespace=awx
-e collection_version=1.0.0
-e '{"awx_template_version": false}'
testing-type: sanity
- uses: actions/checkout@v3
- name: Upload awx jUnit test reports to the unified dashboard
if: >-
!cancelled()
&& steps.make-run.outputs.test-result-files != ''
&& github.event_name == 'push'
&& env.UPSTREAM_REPOSITORY_ID == github.repository_id
&& github.ref_name == github.event.repository.default_branch
run: |
for junit_file in $(echo '${{ steps.make-run.outputs.test-result-files }}' | sed 's/,/ /')
do
curl \
-v \
--user "${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_USER }}:${{ secrets.PDE_ORG_RESULTS_UPLOAD_PASSWORD }}" \
--form "xunit_xml=@${junit_file}" \
--form "component_name=awx" \
--form "git_commit_sha=${{ github.sha }}" \
--form "git_repository_url=https://github.com/${{ github.repository }}" \
"${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_URL }}/api/results/upload/"
done
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
- name: Upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core
- name: Run sanity tests
run: make test_collection_sanity
collection-integration:
name: awx_collection integration
@@ -322,74 +154,37 @@ jobs:
- name: r-z0-9
regex: ^[r-z0-9]
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.13'
- name: Remove system ansible to avoid conflicts
run: |
python -m pip uninstall -y ansible ansible-core || true
- uses: actions/checkout@v3
- uses: ./.github/actions/run_awx_devel
id: awx
with:
build-ui: false
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Install dependencies for running tests
run: |
python -m pip install -e ./awxkit/
python -m pip install -r awx_collection/requirements.txt
hash -r # Rehash to pick up newly installed scripts
python3 -m pip install -e ./awxkit/
python3 -m pip install -r awx_collection/requirements.txt
- name: Run integration tests
id: make-run
run: |
echo "::remove-matcher owner=python::" # Disable annoying annotations from setup-python
echo '[general]' > ~/.tower_cli.cfg
echo 'host = https://${{ steps.awx.outputs.ip }}:8043' >> ~/.tower_cli.cfg
echo 'username = admin' >> ~/.tower_cli.cfg
echo 'password = password' >> ~/.tower_cli.cfg
echo 'oauth_token = ${{ steps.awx.outputs.admin-token }}' >> ~/.tower_cli.cfg
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
export PYTHONPATH="$(python -c 'import site; print(":".join(site.getsitepackages()))')${PYTHONPATH:+:$PYTHONPATH}"
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--coverage --requirements $TARGETS" test_collection_integration
env:
ANSIBLE_TEST_PREFER_PODMAN: 1
- name: Upload test coverage to Codecov
if: >-
!cancelled()
&& steps.make-run.outputs.cov-report-files != ''
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: >-
${{
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
}}
files: >-
${{ steps.make-run.outputs.cov-report-files }}
flags: >-
CI-GHA,
ansible-test,
integration,
OS-${{
runner.os
}}
token: ${{ secrets.CODECOV_TOKEN }}
# Upload coverage report as artifact
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: always()
with:
name: coverage-${{ matrix.target-regex.name }}
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
retention-days: 1
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
@@ -405,48 +200,77 @@ jobs:
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.13'
- name: Remove system ansible to avoid conflicts
run: |
python -m pip uninstall -y ansible ansible-core || true
- uses: actions/checkout@v3
- name: Upgrade ansible-core
run: python -m pip install --upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core
- name: Download coverage artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
merge-multiple: true
path: coverage
pattern: coverage-*
- name: Combine coverage
run: |
make COLLECTION_VERSION=100.100.100-git install_collection
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
cd coverage
for i in coverage-*; do
cp -rv $i/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
done
cd ~/.ansible/collections/ansible_collections/awx/awx
hash -r # Rehash to pick up newly installed scripts
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage combine --requirements
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage html
ansible-test coverage combine --requirements
ansible-test coverage html
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
PATH="$(python -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$PATH" ansible-test coverage report >> $GITHUB_STEP_SUMMARY
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
echo >> $GITHUB_STEP_SUMMARY
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
echo 'Download the HTML artifacts to view the coverage report.' >> $GITHUB_STEP_SUMMARY
# This is a huge hack, there's no official action for removing artifacts currently.
# Also ACTIONS_RUNTIME_URL and ACTIONS_RUNTIME_TOKEN aren't available in normal run
# steps, so we have to use github-script to get them.
#
# The advantage of doing this, though, is that we save on artifact storage space.
- name: Get secret artifact runtime URL
uses: actions/github-script@v6
id: get-runtime-url
with:
result-encoding: string
script: |
const { ACTIONS_RUNTIME_URL } = process.env;
return ACTIONS_RUNTIME_URL;
- name: Get secret artifact runtime token
uses: actions/github-script@v6
id: get-runtime-token
with:
result-encoding: string
script: |
const { ACTIONS_RUNTIME_TOKEN } = process.env;
return ACTIONS_RUNTIME_TOKEN;
- name: Remove intermediary artifacts
env:
ACTIONS_RUNTIME_URL: ${{ steps.get-runtime-url.outputs.result }}
ACTIONS_RUNTIME_TOKEN: ${{ steps.get-runtime-token.outputs.result }}
run: |
echo "::add-mask::${ACTIONS_RUNTIME_TOKEN}"
artifacts=$(
curl -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
${ACTIONS_RUNTIME_URL}_apis/pipelines/workflows/${{ github.run_id }}/artifacts?api-version=6.0-preview \
| jq -r '.value | .[] | select(.name | startswith("coverage-")) | .url'
)
for artifact in $artifacts; do
curl -i -X DELETE -H "Accept: application/json;api-version=6.0-preview" -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" "$artifact"
done
- name: Upload coverage report as artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: awx-collection-integration-coverage-html
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/reports/coverage

View File

@@ -1,57 +0,0 @@
---
name: django-ansible-base requirements update
on:
workflow_dispatch:
schedule:
- cron: '0 6 * * *' # once an day @ 6 AM
permissions:
pull-requests: write
contents: write
jobs:
dab-pin-newest:
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
runs-on: ubuntu-latest
steps:
- id: dab-release
name: Get current django-ansible-base release version
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
with:
owner: ansible
repo: django-ansible-base
excludes: prerelease, draft
- name: Check out respository code
uses: actions/checkout@v4
- id: dab-pinned
name: Get current django-ansible-base pinned version
run:
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
- name: Update django-ansible-base pinned version to upstream release
run:
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
- name: Create Pull Request
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
with:
base: devel
branch: bump-django-ansible-base
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
body: |
##### SUMMARY
Automated .github/workflows/dab-release.yml
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
##### ISSUE TYPE
- Bug, Docs Fix or other nominal change
##### COMPONENT NAME
- API
commit-message: |
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
add-paths:
requirements/requirements_git.txt

View File

@@ -2,7 +2,6 @@
name: Build/Push Development Images
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
on:
workflow_dispatch:
push:
@@ -10,7 +9,6 @@ on:
- devel
- release_*
- feature_*
- stable-*
jobs:
push-development-images:
runs-on: ubuntu-latest
@@ -36,9 +34,7 @@ jobs:
exit 0
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
- uses: actions/checkout@v4
with:
show-progress: false
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
@@ -50,30 +46,32 @@ jobs:
run: |
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
env:
OWNER: '${{ github.repository_owner }}'
- uses: ./.github/actions/setup-python
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.py_version }}
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Setup node and npm for the new UI build
- name: Setup node and npm
uses: actions/setup-node@v2
with:
node-version: '18'
node-version: '16.13.1'
if: matrix.build-targets.image-name == 'awx'
- name: Prebuild new UI for awx image (to speed up build process)
- name: Prebuild UI for awx image (to speed up build process)
run: |
make ui
sudo apt-get install gettext
make ui-release
make ui-next
if: matrix.build-targets.image-name == 'awx'
- uses: ./.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Build and push AWX devel images
run: |
make ${{ matrix.build-targets.make-target }}

View File

@@ -8,13 +8,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
- uses: actions/checkout@v3
- name: install tox
run: pip install tox

75
.github/workflows/e2e_test.yml vendored Normal file
View File

@@ -0,0 +1,75 @@
---
name: E2E Tests
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on:
pull_request_target:
types: [labeled]
jobs:
e2e-test:
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
runs-on: ubuntu-latest
timeout-minutes: 40
permissions:
packages: write
contents: read
strategy:
matrix:
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/run_awx_devel
id: awx
with:
build-ui: true
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Pull awx_cypress_base image
run: |
docker pull quay.io/awx/awx_cypress_base:latest
- name: Checkout test project
uses: actions/checkout@v3
with:
repository: ${{ github.repository_owner }}/tower-qa
ssh-key: ${{ secrets.QA_REPO_KEY }}
path: tower-qa
ref: devel
- name: Build cypress
run: |
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
docker build -t awx-pf-tests .
- name: Run E2E tests
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
run: |
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
export COMMIT_INFO_SHA=$GITHUB_SHA
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
AWX_IP=${{ steps.awx.outputs.ip }}
printenv > .env
echo "Executing tests:"
docker run \
--network '_sources_default' \
--ipc=host \
--env-file=.env \
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
-e CYPRESS_AWX_E2E_USERNAME=admin \
-e CYPRESS_AWX_E2E_PASSWORD='password' \
-e COMMAND="npm run cypress-concurrently-gha" \
-v /dev/shm:/dev/shm \
-v $PWD:/e2e \
-w /e2e \
awx-pf-tests run --project .
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
with:
log-filename: e2e-${{ matrix.job }}.log

View File

@@ -20,4 +20,4 @@ jobs:
run: |
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \
-a "bucket=awx-public-ci-files object=${{ github.event.repository.name }}/${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"

View File

@@ -30,15 +30,10 @@ jobs:
timeout-minutes: 20
name: Label Issue - Community
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/setup-python
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- name: Install python requests
run: pip install requests
- name: Check if user is a member of Ansible org
uses: jannekem/run-python-script-action@v1
id: check_user

View File

@@ -29,14 +29,8 @@ jobs:
timeout-minutes: 20
name: Label PR - Community
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- name: Install python requests
run: pip install requests
- name: Check if user is a member of Ansible org

View File

@@ -29,14 +29,18 @@ jobs:
- name: Set GitHub Env vars if release event
if: ${{ github.event_name == 'release' }}
run: |
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
echo "TAG_NAME=${{ env.TAG_NAME }}" >> $GITHUB_ENV
- name: Checkout awx
uses: actions/checkout@v4
with:
show-progress: false
uses: actions/checkout@v3
- uses: ./.github/actions/setup-python
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.py_version }}
- name: Install dependencies
run: |
@@ -56,18 +60,15 @@ jobs:
COLLECTION_VERSION: ${{ env.TAG_NAME }}
COLLECTION_TEMPLATE_VERSION: true
run: |
sudo apt-get install jq
make build_collection
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
if [[ "$count" == "1" ]]; then
curl_with_redirects=$(curl --head -sLw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
curl_without_redirects=$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
if [[ "$curl_with_redirects" == "302" ]] || [[ "$curl_without_redirects" == "302" ]]; then
echo "Galaxy release already done";
elif [[ "$count" == "0" ]]; then
else
ansible-galaxy collection publish \
--token=${{ secrets.GALAXY_TOKEN }} \
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
else
echo "Unexpected count from galaxy search: $count";
exit 1;
awx_collection_build/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz;
fi
- name: Set official pypi info

View File

@@ -1,248 +0,0 @@
# SonarCloud Analysis Workflow for awx
#
# This workflow runs SonarCloud analysis triggered by CI workflow completion.
# It is split into two separate jobs for clarity and maintainability:
#
# FLOW: CI completes → workflow_run triggers this workflow → appropriate job runs
#
# JOB 1: sonar-pr-analysis (for PRs)
# - Triggered by: workflow_run (CI on pull_request)
# - Steps: Download coverage → Get PR info → Get changed files → Run SonarCloud PR analysis
# - Scans: All changed files in the PR (Python, YAML, JSON, etc.)
# - Quality gate: Focuses on new/changed code in PR only
#
# JOB 2: sonar-branch-analysis (for long-lived branches)
# - Triggered by: workflow_run (CI on push to devel)
# - Steps: Download coverage → Run SonarCloud branch analysis
# - Scans: Full codebase
# - Quality gate: Focuses on overall project health
#
# This ensures coverage data is always available from CI before analysis runs.
#
# What files are scanned:
# - All files in the repository that SonarCloud can analyze
# - Excludes: tests, scripts, dev environments, external collections (see sonar-project.properties)
# With much help from:
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/30
# https://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/32
name: SonarCloud
on:
workflow_run: # This is triggered by CI being completed.
workflows:
- CI
types:
- completed
permissions: read-all
jobs:
sonar-pr-analysis:
name: SonarCloud PR Analysis
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.event == 'pull_request' &&
github.repository == 'ansible/awx'
steps:
- uses: actions/checkout@v4
# Download all individual coverage artifacts from CI workflow
- name: Download coverage artifacts
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
workflow: CI
run_id: ${{ github.event.workflow_run.id }}
pattern: api-test-artifacts
# Extract PR metadata from workflow_run event
- name: Set PR metadata and prepare files for analysis
env:
COMMIT_SHA: ${{ github.event.workflow_run.head_sha }}
REPO_NAME: ${{ github.event.repository.full_name }}
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Find all downloaded coverage XML files
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
echo "Found coverage files: $coverage_files"
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
# Extract PR number from first coverage.xml file found
first_coverage=$(find . -name "coverage.xml" -type f | head -1)
if [ -f "$first_coverage" ]; then
PR_NUMBER=$(grep -m 1 '<!-- PR' "$first_coverage" | awk '{print $3}' || echo "")
else
PR_NUMBER=""
fi
echo "🔍 SonarCloud Analysis Decision Summary"
echo "========================================"
echo "├── CI Event: ✅ Pull Request"
echo "├── PR Number from coverage.xml: #${PR_NUMBER:-<not found>}"
if [ -z "$PR_NUMBER" ]; then
echo "##[error]❌ FATAL: PR number not found in coverage.xml"
echo "##[error]This job requires a PR number to run PR analysis."
echo "##[error]The ci workflow should have injected the PR number into coverage.xml."
exit 1
fi
# Get PR metadata from GitHub API
PR_DATA=$(gh api "repos/$REPO_NAME/pulls/$PR_NUMBER")
PR_BASE=$(echo "$PR_DATA" | jq -r '.base.ref')
PR_HEAD=$(echo "$PR_DATA" | jq -r '.head.ref')
# Print summary
echo "🔍 SonarCloud Analysis Decision Summary"
echo "========================================"
echo "├── CI Event: ✅ Pull Request"
echo "├── PR Number: #$PR_NUMBER"
echo "├── Base Branch: $PR_BASE"
echo "├── Head Branch: $PR_HEAD"
echo "├── Repo: $REPO_NAME"
# Export to GitHub env for later steps
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
echo "PR_BASE=$PR_BASE" >> $GITHUB_ENV
echo "PR_HEAD=$PR_HEAD" >> $GITHUB_ENV
echo "COMMIT_SHA=$COMMIT_SHA" >> $GITHUB_ENV
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
# Get all changed files from PR (with error handling)
files=""
if [ -n "$PR_NUMBER" ]; then
if gh api repos/$REPO_NAME/pulls/$PR_NUMBER/files --jq '.[].filename' > /tmp/pr_files.txt 2>/tmp/pr_error.txt; then
files=$(cat /tmp/pr_files.txt)
else
echo "├── Changed Files: ⚠️ Could not fetch (likely test repo or PR not found)"
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
echo "├── Coverage Data: ✅ Available"
else
echo "├── Coverage Data: ⚠️ Not available"
fi
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
# No files = no inclusions filter = full scan
exit 0
fi
else
echo "├── PR Number: ⚠️ Not available"
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
echo "├── Coverage Data: ✅ Available"
else
echo "├── Coverage Data: ⚠️ Not available"
fi
echo "└── Result: ✅ Running SonarCloud analysis (full scan)"
exit 0
fi
# Get file extensions and count for summary
extensions=$(echo "$files" | sed 's/.*\.//' | sort | uniq | tr '\n' ',' | sed 's/,$//')
file_count=$(echo "$files" | wc -l)
echo "├── Changed Files: $file_count file(s) (.${extensions})"
# Check if coverage.xml exists and has content
if [ -f coverage.xml ] && [ -s coverage.xml ]; then
echo "├── Coverage Data: ✅ Available"
else
echo "├── Coverage Data: ⚠️ Not available (analysis will proceed without coverage)"
fi
# Prepare file list for Sonar
echo "All changed files in PR:"
echo "$files"
# Filter out files that are excluded by .coveragerc to avoid coverage conflicts
# This prevents SonarCloud from analyzing files that have no coverage data
if [ -n "$files" ]; then
# Filter out files matching .coveragerc omit patterns
filtered_files=$(echo "$files" | grep -v "settings/.*_defaults\.py$" | grep -v "settings/defaults\.py$" | grep -v "main/migrations/")
# Show which files were filtered out for transparency
excluded_files=$(echo "$files" | grep -E "(settings/.*_defaults\.py$|settings/defaults\.py$|main/migrations/)" || true)
if [ -n "$excluded_files" ]; then
echo "├── Filtered out (coverage-excluded): $(echo "$excluded_files" | wc -l) file(s)"
echo "$excluded_files" | sed 's/^/│ - /'
fi
if [ -n "$filtered_files" ]; then
inclusions=$(echo "$filtered_files" | tr '\n' ',' | sed 's/,$//')
echo "SONAR_INCLUSIONS=$inclusions" >> $GITHUB_ENV
echo "└── Result: ✅ Will scan these files (excluding coverage-omitted files): $inclusions"
else
echo "└── Result: ✅ All changed files are excluded by coverage config, running full SonarCloud analysis"
# Don't set SONAR_INCLUSIONS, let it scan everything per sonar-project.properties
fi
else
echo "└── Result: ✅ Running SonarCloud analysis"
fi
- name: Add base branch
if: env.PR_NUMBER != ''
run: |
gh pr checkout ${{ env.PR_NUMBER }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: SonarCloud Scan
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
with:
args: >
-Dsonar.scm.revision=${{ env.COMMIT_SHA }}
-Dsonar.pullrequest.key=${{ env.PR_NUMBER }}
-Dsonar.pullrequest.branch=${{ env.PR_HEAD }}
-Dsonar.pullrequest.base=${{ env.PR_BASE }}
-Dsonar.python.coverage.reportPaths=${{ env.COVERAGE_PATHS }}
${{ env.SONAR_INCLUSIONS && format('-Dsonar.inclusions={0}', env.SONAR_INCLUSIONS) || '' }}
sonar-branch-analysis:
name: SonarCloud Branch Analysis
runs-on: ubuntu-latest
if: |
github.event_name == 'workflow_run' &&
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.event == 'push' &&
github.repository == 'ansible/awx'
steps:
- uses: actions/checkout@v4
# Download all individual coverage artifacts from CI workflow (optional for branch pushes)
- name: Download coverage artifacts
continue-on-error: true
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
workflow: CI
run_id: ${{ github.event.workflow_run.id }}
pattern: api-test-artifacts
- name: Print SonarCloud Analysis Summary
env:
BRANCH_NAME: ${{ github.event.workflow_run.head_branch }}
run: |
# Find all downloaded coverage XML files
coverage_files=$(find . -name "coverage.xml" -type f | tr '\n' ',' | sed 's/,$//')
echo "Found coverage files: $coverage_files"
echo "COVERAGE_PATHS=$coverage_files" >> $GITHUB_ENV
echo "🔍 SonarCloud Analysis Summary"
echo "=============================="
echo "├── CI Event: ✅ Push (via workflow_run)"
echo "├── Branch: $BRANCH_NAME"
echo "├── Coverage Files: ${coverage_files:-none}"
echo "├── Python Changes: N/A (Full codebase scan)"
echo "└── Result: ✅ Proceed - \"Running SonarCloud analysis\""
- name: SonarCloud Scan
uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
with:
args: >
-Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
-Dsonar.branch.name=${{ github.event.workflow_run.head_branch }}
${{ env.COVERAGE_PATHS && format('-Dsonar.python.coverage.reportPaths={0}', env.COVERAGE_PATHS) || '' }}

View File

@@ -45,28 +45,30 @@ jobs:
exit 0
- name: Checkout awx
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
show-progress: false
path: awx
- name: Checkout awx-operator
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
show-progress: false
repository: ${{ github.repository_owner }}/awx-operator
path: awx-operator
- name: Checkout awx-logos
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
show-progress: false
repository: ansible/awx-logos
path: awx-logos
- uses: ./awx/.github/actions/setup-python
- name: Get python version from Makefile
working-directory: awx
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
working-directory: awx
python-version: ${{ env.py_version }}
- name: Install playbook dependencies
run: |
@@ -84,16 +86,17 @@ jobs:
run: |
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
- name: Setup node and npm for new UI build
uses: actions/setup-node@v4
- name: Setup node and npm
uses: actions/setup-node@v2
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: awx/awx/ui/**/package-lock.json
node-version: '16.13.1'
- name: Prebuild new UI for awx image (to speed up build process)
- name: Prebuild UI for awx image (to speed up build process)
working-directory: awx
run: make ui
run: |
sudo apt-get install gettext
make ui-release
make ui-next
- name: Set build env variables
run: |
@@ -133,9 +136,9 @@ jobs:
- name: Pulling images for test deployment with awx-operator
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
run: |
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
docker pull -q ${AWX_EE_TEST_IMAGE}
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
docker pull ${AWX_OPERATOR_TEST_IMAGE}
docker pull ${AWX_EE_TEST_IMAGE}
docker pull ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
- name: Run test deployment with awx-operator
working-directory: awx-operator

View File

@@ -13,9 +13,7 @@ jobs:
steps:
- name: Checkout branch
uses: actions/checkout@v4
with:
show-progress: false
uses: actions/checkout@v3
- name: Update PR Body
env:

View File

@@ -5,13 +5,11 @@ env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on:
workflow_dispatch:
push:
branches:
- devel
- release_**
- feature_**
- stable-**
jobs:
push:
runs-on: ubuntu-latest
@@ -20,30 +18,41 @@ jobs:
packages: write
contents: read
steps:
- uses: actions/checkout@v4
with:
show-progress: false
- uses: actions/checkout@v3
- name: Build awx_devel image to use for schema gen
uses: ./.github/actions/awx_devel_image
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
python-version: ${{ env.py_version }}
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Pre-pull image to warm build cache
run: |
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
- name: Build image
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
- name: Generate API Schema
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
--workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
- name: Upload API Schema
uses: keithweaver/aws-s3-github-action@4dd5a7b81d54abaa23bbac92b27e85d7f405ae53
with:
command: cp
source: ${{ github.workspace }}/schema.json
destination: s3://awx-public-ci-files/${{ github.event.repository.name }}/${{ github.ref_name }}/schema.json
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_KEY }}
aws_region: us-east-1
flags: --acl public-read --only-show-errors
env:
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_REGION: 'us-east-1'
run: |
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"

27
.gitignore vendored
View File

@@ -1,7 +1,6 @@
# Ignore generated schema
swagger.json
schema.json
schema.yaml
reference-schema.json
# Tags
@@ -21,10 +20,23 @@ awx/projects
awx/job_output
awx/public/media
awx/public/static
awx/ui/tests/test-results.xml
awx/ui/client/src/local_settings.json
awx/main/fixtures
awx/*.log
tower/tower_warnings.log
celerybeat-schedule
awx/ui/static
awx/ui/build_test
awx/ui/client/languages
awx/ui/templates/ui/index.html
awx/ui/templates/ui/installing.html
awx/ui/node_modules/
awx/ui/src/locales/*/messages.js
awx/ui/coverage/
awx/ui/build
awx/ui/.env.local
awx/ui/instrumented
rsyslog.pid
tools/docker-compose/ansible/awx_dump.sql
tools/docker-compose/Dockerfile
@@ -32,6 +44,7 @@ tools/docker-compose/_build
tools/docker-compose/_sources
tools/docker-compose/overrides/
tools/docker-compose-minikube/_sources
tools/docker-compose/keycloak.awx.realm.json
!tools/docker-compose/editable_dependencies
tools/docker-compose/editable_dependencies/*
@@ -66,6 +79,11 @@ __pycache__
/tmp
**/npm-debug.log*
# UI build flag files
awx/ui/.deps_built
awx/ui/.release_built
awx/ui/.release_deps_built
# Testing
.cache
.coverage
@@ -143,16 +161,15 @@ use_dev_supervisor.txt
.idea/*
*.unison.tmp
*.#
/awx/ui/.ui-built
/_build/
/_build_kube_dev/
/Dockerfile
/Dockerfile.dev
/Dockerfile.kube-dev
awx/ui/src
awx/ui/build
awx/ui/.ui-built
awx/ui_next
awx/ui_next/src
awx/ui_next/build
# Docs build stuff
docs/docsite/build/

View File

@@ -7,7 +7,7 @@ build:
os: ubuntu-22.04
tools:
python: >-
3.12
3.11
commands:
- pip install --user tox
- python3 -m tox -e docs --notest -v

View File

@@ -5,12 +5,12 @@ ignore: |
awx/main/tests/data/inventory/plugins/**
# vault files
awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml
awx/ui/test/e2e/tests/smoke-vars.yml
awx/ui/node_modules
tools/docker-compose/_sources
# django template files
awx/api/templates/instance_install_bundle/**
.readthedocs.yaml
tools/loki
tools/otel
extends: default

View File

@@ -2,7 +2,7 @@
Hi there! We're excited to have you as a contributor.
Have questions about this document or anything not covered here? Create a topic using the [AWX tag on the Ansible Forum](https://forum.ansible.com/tag/awx).
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
## Table of contents
@@ -30,8 +30,8 @@ Have questions about this document or anything not covered here? Create a topic
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see [git push docs](https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt).
- If submitting a large code change, it's a good idea to create a [forum topic tagged with 'awx'](https://forum.ansible.com/tag/awx), and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.libera.chat, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
## Setting up your development environment
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
#### Frontend Development
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
#### Fork and clone the AWX repo
@@ -121,18 +121,18 @@ If it has someone assigned to it then that person is the person responsible for
**NOTES**
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the [Ansible Forum](https://forum.ansible.com/tag/awx).
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](./awx/ui/README.md).
### Translations
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
## Submitting Pull Requests
@@ -143,13 +143,15 @@ Here are a few things you can do to help the visibility of your change, and incr
- No issues when running linters/code checkers
- Python: black: `(container)/awx_devel$ make black`
- Javascript: `(container)/awx_devel$ make ui-lint`
- No issues from unit tests
- Python: py.test: `(container)/awx_devel$ make test`
- JavaScript: `(container)/awx_devel$ make ui-test`
- Write tests for new functionality, update/add tests for bug fixes
- Make the smallest change possible
- Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
It's generally a good idea to discuss features with us first by engaging on the [Ansible Forum](https://forum.ansible.com/tag/awx).
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
`git pull`, and `git rebase`, rather than `git merge`.
@@ -159,11 +161,11 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
## Reporting Issues
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
## Getting Help
If you require additional assistance, please submit your question to the [Ansible Forum](https://forum.ansible.com/tag/awx).
If you require additional assistance, please reach out to us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
For extra information on debugging tools, see [Debugging](./docs/debugging/).

View File

@@ -1,11 +1,11 @@
# Issues
## Reporting
## Reporting
Use the GitHub [issue tracker](https://github.com/ansible/awx/issues) for filing bugs. In order to save time, and help us respond to issues quickly, make sure to fill out as much of the issue template
as possible. Version information, and an accurate reproducing scenario are critical to helping us identify the problem.
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [Ansible Forum](https://forum.ansible.com/tag/awx).
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [mailing list](https://groups.google.com/forum/#!forum/awx-project) , and the `#ansible-awx` channel on irc.libera.chat to get help.
Before opening a new issue, please use the issue search feature to see if what you're experiencing has already been reported. If you have any extra detail to provide, please comment. Otherwise, rather than posting a "me too" comment, please consider giving it a ["thumbs up"](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comment) to give us an indication of the severity of the problem.
@@ -14,7 +14,7 @@ Before opening a new issue, please use the issue search feature to see if what y
When reporting issues for the UI, we also appreciate having screen shots and any error messages from the web browser's console. It's not unusual for browser extensions
and plugins to cause problems. Reporting those will also help speed up analyzing and resolving UI bugs.
### API and backend issues
### API and backend issues
For the API and backend services, please capture all of the logs that you can from the time the problem occurred.

View File

@@ -4,7 +4,9 @@ recursive-include awx *.mo
recursive-include awx/static *
recursive-include awx/templates *.html
recursive-include awx/api/templates *.md *.html *.yml
recursive-include awx/ui/build *.html
recursive-include awx/ui/build *
recursive-include awx/ui_next/build *
recursive-include awx/playbooks *.yml
recursive-include awx/lib/site-packages *
recursive-include awx/plugins *.ps1
@@ -15,6 +17,7 @@ recursive-include licenses *
recursive-exclude awx devonly.py*
recursive-exclude awx/api/tests *
recursive-exclude awx/main/tests *
recursive-exclude awx/ui/client *
recursive-exclude awx/settings local_settings.py*
include tools/scripts/request_tower_configuration.sh
include tools/scripts/request_tower_configuration.ps1

340
Makefile
View File

@@ -1,6 +1,6 @@
-include awx/ui/Makefile
-include awx/ui_next/Makefile
PYTHON := $(notdir $(shell for i in python3.12 python3.11 python3; do command -v $$i; done|sed 1q))
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
SHELL := bash
DOCKER_COMPOSE ?= docker compose
OFFICIAL ?= no
@@ -8,7 +8,6 @@ NODE ?= node
NPM_BIN ?= npm
KIND_BIN ?= $(shell which kind)
CHROMIUM_BIN=/tmp/chrome-linux/chrome
GIT_REPO_NAME ?= $(shell basename `git rev-parse --show-toplevel`)
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
MANAGEMENT_COMMAND ?= awx-manage
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py 2> /dev/null)
@@ -19,20 +18,12 @@ COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d .
COLLECTION_SANITY_ARGS ?= --docker
# collection unit testing directories
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
# pytest added args to collect coverage
COVERAGE_ARGS ?= --cov --cov-report=xml --junitxml=reports/junit.xml
# pytest test directories
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
# pytest args to run tests in parallel
PARALLEL_TESTS ?= -n auto
# collection integration test directories (defaults to all)
COLLECTION_TEST_TARGET ?=
# Python version for ansible-test (must be 3.11, 3.12, or 3.13)
ANSIBLE_TEST_PYTHON_VERSION ?= 3.13
# args for collection install
COLLECTION_PACKAGE ?= awx
COLLECTION_NAMESPACE ?= awx
COLLECTION_INSTALL = $(HOME)/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
COLLECTION_TEMPLATE_VERSION ?= false
# NOTE: This defaults the container image version to the branch that's active
@@ -40,6 +31,10 @@ COMPOSE_TAG ?= $(GIT_BRANCH)
MAIN_NODE_TYPE ?= hybrid
# If set to true docker-compose will also start a pgbouncer instance and use it
PGBOUNCER ?= false
# If set to true docker-compose will also start a keycloak instance
KEYCLOAK ?= false
# If set to true docker-compose will also start an ldap instance
LDAP ?= false
# If set to true docker-compose will also start a splunk instance
SPLUNK ?= false
# If set to true docker-compose will also start a prometheus instance
@@ -50,14 +45,10 @@ GRAFANA ?= false
VAULT ?= false
# If set to true docker-compose will also start a hashicorp vault instance with TLS enabled
VAULT_TLS ?= false
# If set to true docker-compose will also start an OpenTelemetry Collector instance
OTEL ?= false
# If set to true docker-compose will also start a Loki instance
LOKI ?= false
# If set to true docker-compose will also start a tacacs+ instance
TACACS ?= false
# If set to true docker-compose will install editable dependencies
EDITABLE_DEPENDENCIES ?= false
# If set to true, use tls for postgres connection
PG_TLS ?= false
VENV_BASE ?= /var/lib/awx/venv
@@ -65,12 +56,7 @@ DEV_DOCKER_OWNER ?= ansible
# Docker will only accept lowercase, so github names like Paul need to be paul
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_devel:$(COMPOSE_TAG)
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_kube_devel:$(COMPOSE_TAG)
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME):$(COMPOSE_TAG)
# Common command to use for running ansible-playbook
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
@@ -79,7 +65,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
# These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements
VENV_BOOTSTRAP ?= pip==25.3 setuptools==80.9.0 setuptools_scm[toml]==9.2.2 wheel==0.45.1 cython==3.1.3
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
NAME ?= awx
@@ -94,24 +80,11 @@ I18N_FLAG_FILE = .i18n_built
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
# DOCKER_CACHE=--no-cache make docker-compose-build
ifeq ($(DOCKER_CACHE),)
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
else
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
endif
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
update_requirements upgrade_requirements update_requirements_dev \
docker_update_requirements docker_upgrade_requirements docker_update_requirements_dev \
develop refresh adduser migrate dbchange \
receiver test test_unit test_coverage coverage_html \
sdist \
ui-release ui-devel \
VERSION PYTHON_VERSION docker-compose-sources \
.git/hooks/pre-commit
@@ -134,7 +107,7 @@ clean-languages:
find ./awx/locale/ -type f -regex '.*\.mo$$' -delete
## Remove temporary build files, compiled Python files.
clean: clean-api clean-awxkit clean-dist
clean: clean-ui clean-api clean-awxkit clean-dist
rm -rf awx/public
rm -rf awx/lib/site-packages
rm -rf awx/job_status
@@ -148,7 +121,7 @@ clean-api:
rm -rf build $(NAME)-$(VERSION) *.egg-info
rm -rf .tox
find . -type f -regex ".*\.py[co]$$" -delete
find . -type d -name "__pycache__" -exec rm -rf {} +
find . -type d -name "__pycache__" -delete
rm -f awx/awx_test.sqlite3*
rm -rf requirements/vendor
rm -rf awx/projects
@@ -198,36 +171,6 @@ requirements_dev: requirements_awx requirements_awx_dev
requirements_test: requirements
## Update requirements files using pip-compile (run inside container)
update_requirements:
cd requirements && ./updater.sh run
## Upgrade all requirements to latest versions (run inside container)
upgrade_requirements:
cd requirements && ./updater.sh upgrade
## Update development requirements (run inside container)
update_requirements_dev:
cd requirements && ./updater.sh dev
## Update requirements using docker-runner
docker_update_requirements:
@echo "Running requirements updater..."
AWX_DOCKER_CMD='make update_requirements' $(MAKE) docker-runner
@echo "Requirements update complete!"
## Upgrade requirements using docker-runner
docker_upgrade_requirements:
@echo "Running requirements upgrader..."
AWX_DOCKER_CMD='make upgrade_requirements' $(MAKE) docker-runner
@echo "Requirements upgrade complete!"
## Update dev requirements using docker-runner
docker_update_requirements_dev:
@echo "Running dev requirements updater..."
AWX_DOCKER_CMD='make update_requirements_dev' $(MAKE) docker-runner
@echo "Dev requirements update complete!"
## "Install" awx package in development mode.
develop:
@if [ "$(VIRTUAL_ENV)" ]; then \
@@ -263,6 +206,12 @@ migrate:
dbchange:
$(MANAGEMENT_COMMAND) makemigrations
supervisor:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
supervisord --pidfile=/tmp/supervisor_pid -n
collectstatic:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
@@ -289,7 +238,7 @@ dispatcher:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(PYTHON) manage.py dispatcherd
$(PYTHON) manage.py run_dispatcher
## Run to start the zeromq callback receiver
receiver:
@@ -348,17 +297,15 @@ black: reports
@echo "fi" >> .git/hooks/pre-commit
@chmod +x .git/hooks/pre-commit
genschema: awx-link reports
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(MANAGEMENT_COMMAND) spectacular --format openapi-json --file schema.json
genschema: reports
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
mv swagger.json schema.json
genschema-yaml: awx-link reports
swagger: reports
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(MANAGEMENT_COMMAND) spectacular --format openapi --file schema.yaml
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
check: black
@@ -371,38 +318,26 @@ api-lint:
awx-link:
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
PYTEST_ARGS ?= -n auto
## Run all API unit tests.
test:
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PARALLEL_TESTS) $(TEST_DIRS)
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PYTEST_ARGS) $(TEST_DIRS)
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
live_test:
cd awx/main/tests/live && py.test tests/
## Run all API unit tests with coverage enabled.
test_coverage:
$(MAKE) test PYTEST_ADDOPTS="--create-db $(COVERAGE_ARGS)"
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=awxkit/coverage.xml,reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=awxkit/report.xml,reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
test_migrations:
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test --create-db $(PARALLEL_TESTS) $(COVERAGE_ARGS) $(TEST_DIRS)
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test $(PYTEST_ARGS) $(TEST_DIRS)
## Runs AWX_DOCKER_CMD inside a new docker container.
docker-runner:
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z $(AWX_DOCKER_ARGS) --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
test_collection:
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
@@ -411,12 +346,7 @@ test_collection:
fi && \
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
ansible --version
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
fi
py.test $(COLLECTION_TEST_DIRS) -v
# The python path needs to be modified so that the tests can find Ansible within the container
# First we will use anything expility set as PYTHONPATH
# Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory)
@@ -432,7 +362,7 @@ symlink_collection:
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
awx_collection_build: $(shell find awx_collection -type f)
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
-e collection_package=$(COLLECTION_PACKAGE) \
-e collection_namespace=$(COLLECTION_NAMESPACE) \
-e collection_version=$(COLLECTION_VERSION) \
@@ -451,29 +381,23 @@ test_collection_sanity:
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
ansible --version
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
cd $(COLLECTION_INSTALL) && \
ansible-test sanity $(COLLECTION_SANITY_ARGS) --coverage --junit && \
ansible-test coverage xml --requirements --group-by command --group-by version
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=sanity*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
echo test-result-files="$$(find "$(COLLECTION_INSTALL)/tests/output/junit/" -type f -name '*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
fi
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
test_collection_integration: install_collection
cd $(COLLECTION_INSTALL) && \
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test integration --python $(ANSIBLE_TEST_PYTHON_VERSION) --coverage -vvv $(COLLECTION_TEST_TARGET) && \
PATH="$$($(PYTHON) -c 'import sys; import os; print(os.path.dirname(sys.executable))'):$$PATH" ansible-test coverage xml --requirements --group-by command --group-by version
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
then \
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
fi
cd $(COLLECTION_INSTALL) && ansible-test integration -vvv $(COLLECTION_TEST_TARGET)
test_unit:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
py.test awx/main/tests/unit awx/conf/tests/unit
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
## Run all API unit tests with coverage enabled.
test_coverage:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
## Output test coverage as HTML (into htmlcov directory).
coverage_html:
@@ -492,7 +416,76 @@ bulk_data:
fi; \
$(PYTHON) tools/data_generators/rbac_dummy_data_generator.py --preset=$(DATA_GEN_PRESET)
# UI TASKS
# --------------------------------------
UI_BUILD_FLAG_FILE = awx/ui/.ui-built
clean-ui:
rm -rf node_modules
rm -rf awx/ui/node_modules
rm -rf awx/ui/build
rm -rf awx/ui/src/locales/_build
rm -rf $(UI_BUILD_FLAG_FILE)
# the collectstatic command doesn't like it if this dir doesn't exist.
mkdir -p awx/ui/build/static
awx/ui/node_modules:
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
$(UI_BUILD_FLAG_FILE):
$(MAKE) awx/ui/node_modules
$(PYTHON) tools/scripts/compilemessages.py
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
touch $@
ui-release: $(UI_BUILD_FLAG_FILE)
ui-devel: awx/ui/node_modules
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
@if [ -d "/var/lib/awx" ] ; then \
mkdir -p /var/lib/awx/public/static/css; \
mkdir -p /var/lib/awx/public/static/js; \
mkdir -p /var/lib/awx/public/static/media; \
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css; \
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js; \
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media; \
fi
ui-devel-instrumented: awx/ui/node_modules
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
ui-devel-test: awx/ui/node_modules
$(NPM_BIN) --prefix awx/ui --loglevel warn run start
ui-lint:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui lint
$(NPM_BIN) run --prefix awx/ui prettier-check
ui-test:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui test
ui-test-screens:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui pretest
$(NPM_BIN) run --prefix awx/ui test-screens --runInBand
ui-test-general:
$(NPM_BIN) --prefix awx/ui install
$(NPM_BIN) run --prefix awx/ui pretest
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
HEADLESS ?= no
ifeq ($(HEADLESS), yes)
dist/$(SDIST_TAR_FILE):
else
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
endif
$(PYTHON) -m build -s
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
@@ -523,33 +516,34 @@ endif
docker-compose-sources: .git/hooks/pre-commit
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
fi;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
-e awx_image=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_devel \
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
-e awx_image_tag=$(COMPOSE_TAG) \
-e receptor_image=$(RECEPTOR_IMAGE) \
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
-e enable_pgbouncer=$(PGBOUNCER) \
-e enable_keycloak=$(KEYCLOAK) \
-e enable_ldap=$(LDAP) \
-e enable_splunk=$(SPLUNK) \
-e enable_prometheus=$(PROMETHEUS) \
-e enable_grafana=$(GRAFANA) \
-e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \
-e enable_otel=$(OTEL) \
-e enable_loki=$(LOKI) \
-e enable_tacacs=$(TACACS) \
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
-e pg_tls=$(PG_TLS) \
$(EXTRA_SOURCES_ANSIBLE_OPTS)
docker-compose: awx/projects docker-compose-sources
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
-e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS); \
-e vault_tls=$(VAULT_TLS) \
-e enable_ldap=$(LDAP); \
$(MAKE) docker-compose-up
docker-compose-up:
@@ -568,20 +562,14 @@ docker-compose-test: awx/projects docker-compose-sources
docker-compose-runtest: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
docker-compose-build-schema: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 make genschema
docker-compose-build-swagger: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
SCHEMA_DIFF_BASE_FOLDER ?= awx
SCHEMA_DIFF_BASE_BRANCH ?= devel
detect-schema-change: genschema
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_FOLDER)/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
curl https://s3.amazonaws.com/awx-public-ci-files/$(SCHEMA_DIFF_BASE_BRANCH)/schema.json -o reference-schema.json
# Ignore differences in whitespace with -b
# diff exits with 1 when files differ - capture but don't fail
-diff -u -b reference-schema.json schema.json
validate-openapi-schema: genschema
@echo "Validating OpenAPI schema from schema.json..."
@python3 -c "from openapi_spec_validator import validate; import json; spec = json.load(open('schema.json')); validate(spec); print('✓ OpenAPI Schema is valid!')"
diff -u -b reference-schema.json schema.json
docker-compose-clean: awx/projects
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
@@ -595,7 +583,7 @@ docker-compose-container-group-clean:
.PHONY: Dockerfile.dev
## Generate Dockerfile.dev for awx_devel image
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
ansible-playbook tools/ansible/dockerfile.yml \
-e dockerfile_name=Dockerfile.dev \
-e build_dev=True \
-e receptor_image=$(RECEPTOR_IMAGE)
@@ -603,22 +591,21 @@ Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
## Build awx_devel image for docker compose development environment
docker-compose-build: Dockerfile.dev
DOCKER_BUILDKIT=1 docker build \
--ssh default=$(SSH_AUTH_SOCK) \
-f Dockerfile.dev \
-t $(DEVEL_IMAGE_NAME) \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_DEVEL_CACHE_FLAG) .
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
.PHONY: docker-compose-buildx
## Build awx_devel image for docker compose development environment for multiple architectures
docker-compose-buildx: Dockerfile.dev
- docker buildx create --name docker-compose-buildx
docker buildx use docker-compose-buildx
docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
- docker buildx build \
--push \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_DEVEL_CACHE_FLAG) \
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) \
--platform=$(PLATFORMS) \
--tag $(DEVEL_IMAGE_NAME) \
-f Dockerfile.dev .
@@ -629,13 +616,28 @@ docker-clean:
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
docker-refresh: docker-clean docker-compose
## Docker Development Environment with Elastic Stack Connected
docker-compose-elk: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
docker-compose-cluster-elk: awx/projects docker-compose-sources
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
docker-compose-container-group:
MINIKUBE_CONTAINER_GROUP=true $(MAKE) docker-compose
clean-elk:
docker stop tools_kibana_1
docker stop tools_logstash_1
docker stop tools_elasticsearch_1
docker rm tools_logstash_1
docker rm tools_elasticsearch_1
docker rm tools_kibana_1
VERSION:
@echo "awx: $(VERSION)"
@@ -656,33 +658,29 @@ version-for-buildyml:
.PHONY: Dockerfile
## Generate Dockerfile for awx image
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
ansible-playbook tools/ansible/dockerfile.yml \
-e receptor_image=$(RECEPTOR_IMAGE) \
-e headless=$(HEADLESS)
## Build awx image for deployment on Kubernetes environment.
awx-kube-build: Dockerfile
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
--ssh default=$(SSH_AUTH_SOCK) \
--build-arg VERSION=$(VERSION) \
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
--build-arg HEADLESS=$(HEADLESS) \
$(DOCKER_KUBE_CACHE_FLAG) \
-t $(IMAGE_KUBE) .
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
## Build multi-arch awx image for deployment on Kubernetes environment.
awx-kube-buildx: Dockerfile
- docker buildx create --name awx-kube-buildx
docker buildx use awx-kube-buildx
docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
- docker buildx build \
--push \
--build-arg VERSION=$(VERSION) \
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
--build-arg HEADLESS=$(HEADLESS) \
--platform=$(PLATFORMS) \
$(DOCKER_KUBE_CACHE_FLAG) \
--tag $(IMAGE_KUBE) \
--tag $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) \
-f Dockerfile .
- docker buildx rm awx-kube-buildx
@@ -690,7 +688,7 @@ awx-kube-buildx: Dockerfile
.PHONY: Dockerfile.kube-dev
## Generate Docker.kube-dev for awx_kube_devel image
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
ansible-playbook tools/ansible/dockerfile.yml \
-e dockerfile_name=Dockerfile.kube-dev \
-e kube_dev=True \
-e template_dest=_build_kube_dev \
@@ -699,31 +697,39 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
## Build awx_kube_devel image for development on local Kubernetes environment.
awx-kube-dev-build: Dockerfile.kube-dev
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
--ssh default=$(SSH_AUTH_SOCK) \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
-t $(IMAGE_KUBE_DEV) .
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
awx-kube-dev-buildx: Dockerfile.kube-dev
- docker buildx create --name awx-kube-dev-buildx
docker buildx use awx-kube-dev-buildx
docker buildx build \
--ssh default=$(SSH_AUTH_SOCK) \
- docker buildx build \
--push \
--build-arg BUILDKIT_INLINE_CACHE=1 \
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
--platform=$(PLATFORMS) \
--tag $(IMAGE_KUBE_DEV) \
--tag $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
-f Dockerfile.kube-dev .
- docker buildx rm awx-kube-dev-buildx
kind-dev-load: awx-kube-dev-build
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
# Translation TASKS
# --------------------------------------
## generate UI .pot file, an empty template of strings yet to be translated
pot: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template --clean
## generate UI .po files for each locale (will update translated strings for `en`)
po: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings -- --clean
## generate API django .pot .po
messages:
@if [ "$(VENV_BASE)" ]; then \
@@ -770,6 +776,6 @@ help/generate:
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
@printf "\n"
## Display help for ui targets
help/ui:
@$(MAKE) -s help MAKEFILE_LIST="awx/ui/Makefile"
## Display help for ui-next targets
help/ui-next:
@$(MAKE) -s help MAKEFILE_LIST="awx/ui_next/Makefile"

View File

@@ -1,24 +1,13 @@
[![CI](https://github.com/ansible/awx/actions/workflows/ci.yml/badge.svg?branch=devel)](https://github.com/ansible/awx/actions/workflows/ci.yml) [![codecov](https://codecov.io/github/ansible/awx/graph/badge.svg?token=4L4GSP9IAR)](https://codecov.io/github/ansible/awx) [![Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-yellow.svg)](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html) [![Apache v2 License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/ansible/awx/blob/devel/LICENSE.md) [![AWX on the Ansible Forum](https://img.shields.io/badge/mailing%20list-AWX-orange.svg)](https://forum.ansible.com/tag/awx)
[![CI](https://github.com/ansible/awx/actions/workflows/ci.yml/badge.svg?branch=devel)](https://github.com/ansible/awx/actions/workflows/ci.yml) [![Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-Ansible-yellow.svg)](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [![Apache v2 License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/ansible/awx/blob/devel/LICENSE.md) [![AWX Mailing List](https://img.shields.io/badge/mailing%20list-AWX-orange.svg)](https://groups.google.com/g/awx-project)
[![Ansible Matrix](https://img.shields.io/badge/matrix-Ansible%20Community-blueviolet.svg?logo=matrix)](https://chat.ansible.im/#/welcome) [![Ansible Discourse](https://img.shields.io/badge/discourse-Ansible%20Community-yellowgreen.svg?logo=discourse)](https://forum.ansible.com)
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
> [!CAUTION]
> The last release of this repository was released on Jul 2, 2024.
> **Releases of this project are now paused during a large scale refactoring.**
> For more information, follow [the Forum](https://forum.ansible.com/) and - more specifically - see the various communications on the matter:
>
> * [Blog: Upcoming Changes to the AWX Project](https://www.ansible.com/blog/upcoming-changes-to-the-awx-project/)
> * [Streamlining AWX Releases](https://forum.ansible.com/t/streamlining-awx-releases/6894) Primary update
> * [Refactoring AWX into a Pluggable, Service-Oriented Architecture](https://forum.ansible.com/t/refactoring-awx-into-a-pluggable-service-oriented-architecture/7404)
> * [Upcoming changes to AWX Operator installation methods](https://forum.ansible.com/t/upcoming-changes-to-awx-operator-installation-methods/7598)
> * [AWX UI and credential types transitioning to the new pluggable architecture](https://forum.ansible.com/t/awx-ui-and-credential-types-transitioning-to-the-new-pluggable-architecture/8027)
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is one of the upstream projects for [Red Hat Ansible Automation Platform](https://www.ansible.com/products/automation-platform).
To install AWX, please view the [Install guide](./INSTALL.md).
To learn more about using AWX, view the [AWX docs site](https://docs.ansible.com/projects/awx/en/latest/).
To learn more about using AWX, view the [AWX docs site](https://ansible.readthedocs.io/projects/awx/en/latest/).
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
@@ -29,9 +18,9 @@ Contributing
- Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX.
- All code submissions are made through pull requests against the `devel` branch.
- All contributors must use `git commit --signoff` for any commit to be merged and agree that usage of `--signoff` constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
- All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
- If submitting a large code change, it's a good idea to join discuss via the [Ansible Forum](https://forum.ansible.com/tag/awx). This helps everyone know what's going on, and it also helps save time and effort if the community decides some changes are needed.
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on web.libera.chat and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
Reporting Issues
----------------
@@ -41,11 +30,12 @@ If you're experiencing a problem that you feel is a bug in AWX or have ideas for
Code of Conduct
---------------
We require all of our community members and contributors to adhere to the [Ansible code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
Get Involved
------------
We welcome your feedback and ideas via the [Ansible Forum](https://forum.ansible.com/tag/awx).
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
For a full list of all the ways to talk with the Ansible Community, see the [AWX Communication guide](https://docs.ansible.com/projects/awx/en/latest/contributor/communication.html).
- Join the [Ansible AWX channel on Matrix](https://matrix.to/#/#awx:ansible.com)
- Join the [Ansible Community Forum](https://forum.ansible.com)

View File

@@ -5,7 +5,6 @@ from __future__ import absolute_import, unicode_literals
import os
import sys
import warnings
from importlib.metadata import PackageNotFoundError, version as _get_version
def get_version():
@@ -35,8 +34,10 @@ def version_file():
try:
__version__ = _get_version('awx')
except PackageNotFoundError:
import pkg_resources
__version__ = pkg_resources.get_distribution('awx').version
except pkg_resources.DistributionNotFound:
__version__ = get_version()
__all__ = ['__version__']
@@ -60,16 +61,90 @@ else:
from django.db import connection
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
commands.append(f[:-4])
except OSError:
pass
return commands
def oauth2_getattribute(self, attr):
# Custom method to override
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
from django.conf import settings
from oauth2_provider.settings import DEFAULTS
val = None
if (isinstance(attr, str)) and (attr in DEFAULTS) and (not attr.startswith('_')):
# certain Django OAuth Toolkit migrations actually reference
# setting lookups for references to model classes (e.g.,
# oauth2_settings.REFRESH_TOKEN_MODEL)
# If we're doing an OAuth2 setting lookup *while running* a migration,
# don't do our usual database settings lookup
val = settings.OAUTH2_PROVIDER.get(attr)
if val is None:
val = object.__getattribute__(self, attr)
return val
def prepare_env():
# Update the default settings environment variable based on current mode.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings')
os.environ.setdefault('AWX_MODE', MODE)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
# Hide DeprecationWarnings when running in production. Need to first load
# settings to apply our filter after Django's own warnings filter.
from django.conf import settings
if not settings.DEBUG: # pragma: no cover
warnings.simplefilter('ignore', DeprecationWarning)
# Monkeypatch Django find_commands to also work with .pyc files.
import django.core.management
django.core.management.find_commands = find_commands
# Monkeypatch Oauth2 toolkit settings class to check for settings
# in django.conf settings each time, not just once during import
import oauth2_provider.settings
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
# database settings to use when management command is run as an external
# program via unit tests.
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
# Disable capturing all SQL queries in memory when in DEBUG mode.
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
# Use the default devserver addr/port defined in settings for runserver.
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
from django.core.management.commands import runserver as core_runserver
original_handle = core_runserver.Command.handle
def handle(self, *args, **options):
if not options.get('addrport'):
options['addrport'] = '%s:%d' % (default_addr, int(default_port))
elif options.get('addrport').isdigit():
options['addrport'] = '%s:%d' % (default_addr, int(options['addrport']))
return original_handle(self, *args, **options)
core_runserver.Command.handle = handle
def manage():

View File

@@ -11,6 +11,9 @@ from django.utils.encoding import smart_str
# Django REST Framework
from rest_framework import authentication
# Django-OAuth-Toolkit
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
logger = logging.getLogger('awx.api.authentication')
@@ -33,3 +36,16 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
class SessionAuthentication(authentication.SessionAuthentication):
def authenticate_header(self, request):
return 'Session'
class LoggedOAuth2Authentication(OAuth2Authentication):
def authenticate(self, request):
ret = super(LoggedOAuth2Authentication, self).authenticate(request)
if ret:
user, token = ret
username = user.username if user else '<none>'
logger.info(
smart_str(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk))
)
setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x])
return ret

View File

@@ -6,6 +6,10 @@ from rest_framework import serializers
# AWX
from awx.conf import fields, register, register_validate
from awx.api.fields import OAuth2ProviderField
from oauth2_provider.settings import oauth2_settings
from awx.sso.common import is_remote_auth_enabled
register(
'SESSION_COOKIE_AGE',
@@ -31,7 +35,10 @@ register(
'DISABLE_LOCAL_AUTH',
field_class=fields.BooleanField,
label=_('Disable the built-in authentication system'),
help_text=_("Controls whether users are prevented from using the built-in authentication system. "),
help_text=_(
"Controls whether users are prevented from using the built-in authentication system. "
"You probably want to do this if you are using an LDAP or SAML integration."
),
category=_('Authentication'),
category_slug='authentication',
)
@@ -43,6 +50,41 @@ register(
category=_('Authentication'),
category_slug='authentication',
)
register(
'OAUTH2_PROVIDER',
field_class=OAuth2ProviderField,
default={
'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS,
},
label=_('OAuth 2 Timeout Settings'),
help_text=_(
'Dictionary for customizing OAuth 2 timeouts, available items are '
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
'the duration of refresh tokens, after expired access tokens, '
'in the number of seconds.'
),
category=_('Authentication'),
category_slug='authentication',
unit=_('seconds'),
)
register(
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
field_class=fields.BooleanField,
default=False,
label=_('Allow External Users to Create OAuth2 Tokens'),
help_text=_(
'For security reasons, users from external auth providers (LDAP, SAML, '
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
'To change this behavior, enable this setting. Existing tokens will '
'not be deleted when this setting is toggled off.'
),
category=_('Authentication'),
category_slug='authentication',
)
register(
'LOGIN_REDIRECT_OVERRIDE',
field_class=fields.CharField,
@@ -67,7 +109,7 @@ register(
def authentication_validate(serializer, attrs):
if attrs.get('DISABLE_LOCAL_AUTH', False):
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
return attrs

View File

@@ -9,6 +9,7 @@ from django.core.exceptions import ObjectDoesNotExist
from rest_framework import serializers
# AWX
from awx.conf import fields
from awx.main.models import Credential
__all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimField']
@@ -21,7 +22,7 @@ class NullFieldMixin(object):
"""
def validate_empty_values(self, data):
is_empty_value, data = super(NullFieldMixin, self).validate_empty_values(data)
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
if is_empty_value and data is None:
return (False, data)
return (is_empty_value, data)
@@ -78,6 +79,19 @@ class VerbatimField(serializers.Field):
return value
class OAuth2ProviderField(fields.DictField):
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
child = fields.IntegerField(min_value=1)
def to_internal_value(self, data):
data = super(OAuth2ProviderField, self).to_internal_value(data)
invalid_flags = set(data.keys()) - self.valid_key_names
if invalid_flags:
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
return data
class DeprecatedCredentialField(serializers.IntegerField):
def __init__(self, **kwargs):
kwargs['allow_null'] = True

View File

@@ -13,8 +13,8 @@ from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldDoesNotExist
from django.db import connection, transaction
from django.db.models.fields.related import OneToOneRel
from django.http import QueryDict, JsonResponse
from django.shortcuts import get_object_or_404, redirect
from django.http import QueryDict
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils.encoding import smart_str
from django.utils.safestring import mark_safe
@@ -30,23 +30,18 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import StaticHTMLRenderer
from rest_framework.negotiation import DefaultContentNegotiation
# Shared code for the AWX platform
from awx_plugins.interfaces._temporary_private_licensing_api import detect_server_product_name
# django-ansible-base
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
from ansible_base.lib.utils.models import get_all_field_names
from ansible_base.lib.utils.requests import get_remote_host, is_proxied_request
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
from ansible_base.rbac.permission_registry import permission_registry
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
# AWX
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
from awx.main.models.rbac import give_creator_permissions
from awx.main.access import optimize_queryset
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
from awx.main.utils.licensing import server_product_name
from awx.main.views import ApiErrorView
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
from awx.api.versioning import URLPathVersioning
@@ -81,14 +76,7 @@ analytics_logger = logging.getLogger('awx.analytics.performance')
class LoggedLoginView(auth_views.LoginView):
def get(self, request, *args, **kwargs):
if is_proxied_request():
next = request.GET.get('next', "")
if next:
next = f"?next={next}"
return redirect(f"/{next}")
# The django.auth.contrib login form doesn't perform the content
# negotiation we've come to expect from DRF; add in code to catch
# situations where Accept != text/html (or */*) and reply with
@@ -104,19 +92,9 @@ class LoggedLoginView(auth_views.LoginView):
return super(LoggedLoginView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
if is_proxied_request():
# Give a message, saying to login via AAP
return JsonResponse(
{
'detail': _('Please log in via Platform Authentication.'),
},
status=status.HTTP_401_UNAUTHORIZED,
)
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
if request.user.is_authenticated:
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
ret.set_cookie(
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
)
@@ -125,21 +103,13 @@ class LoggedLoginView(auth_views.LoginView):
return ret
else:
if 'username' in self.request.POST:
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
ret.status_code = 401
return ret
class LoggedLogoutView(auth_views.LogoutView):
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
def dispatch(self, request, *args, **kwargs):
if is_proxied_request():
# 1) We intentionally don't obey ?next= here, just always redirect to platform login
# 2) Hack to prevent rewrites of Location header
qs = "?__gateway_no_rewrite__=1&next=/"
return redirect(f"/api/gateway/v1/logout/{qs}")
original_user = getattr(request, 'user', None)
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
current_user = getattr(request, 'user', None)
@@ -161,14 +131,16 @@ def get_view_description(view, html=False):
def get_default_schema():
# drf-spectacular is configured via REST_FRAMEWORK['DEFAULT_SCHEMA_CLASS']
# Just use the DRF default, which will pick up our CustomAutoSchema
return views.APIView.schema
if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import AutoSchema
return AutoSchema()
else:
return views.APIView.schema
class APIView(views.APIView):
# Schema is inherited from DRF's APIView, which uses DEFAULT_SCHEMA_CLASS
# No need to override it here - drf-spectacular will handle it
schema = get_default_schema()
versioning_class = URLPathVersioning
def initialize_request(self, request, *args, **kwargs):
@@ -176,23 +148,22 @@ class APIView(views.APIView):
Store the Django REST Framework Request object as an attribute on the
normal Django request, store time the request started.
"""
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
self.time_started = time.time()
if getattr(settings, 'SQL_DEBUG', False):
self.queries_before = len(connection.queries)
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
remote_headers = settings.REMOTE_HOST_HEADERS
else:
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
# they respect the allowed proxy list
if settings.PROXY_IP_ALLOWED_LIST:
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
if all(
[
settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
]
):
for custom_header in settings.REMOTE_HOST_HEADERS:
if custom_header.startswith('HTTP_'):
request.environ.pop(custom_header, None)
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
request.drf_request = drf_request
@@ -237,21 +208,17 @@ class APIView(views.APIView):
return response
if response.status_code >= 400:
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
msg_data = {
'status_code': response.status_code,
'user_name': request.user,
'url_path': request.path,
'remote_addr': ip,
'remote_addr': request.META.get('REMOTE_ADDR', None),
}
if type(response.data) is dict:
msg_data['error'] = response.data.get('error', response.status_text)
elif type(response.data) is list:
if len(response.data) > 0 and isinstance(response.data[0], str):
msg_data['error'] = str(response.data[0])
else:
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
else:
msg_data['error'] = response.status_text
@@ -265,8 +232,7 @@ class APIView(views.APIView):
if hasattr(self, '__init_request_error__'):
response = self.handle_exception(self.__init_request_error__)
if response.status_code == 401:
if response.data and 'detail' in response.data:
response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
logger.info(status_msg)
else:
logger.warning(status_msg)
@@ -275,7 +241,7 @@ class APIView(views.APIView):
time_started = getattr(self, 'time_started', None)
if request.user.is_authenticated:
response['X-API-Product-Version'] = get_awx_version()
response['X-API-Product-Name'] = detect_server_product_name()
response['X-API-Product-Name'] = server_product_name()
response['X-API-Node'] = settings.CLUSTER_HOST_ID
if time_started:
@@ -372,6 +338,12 @@ class APIView(views.APIView):
kwargs.pop('version')
return super(APIView, self).dispatch(request, *args, **kwargs)
def check_permissions(self, request):
if request.method not in ('GET', 'OPTIONS', 'HEAD'):
if 'write' not in getattr(request.user, 'oauth_scopes', ['write']):
raise PermissionDenied()
return super(APIView, self).check_permissions(request)
class GenericAPIView(generics.GenericAPIView, APIView):
# Base class for all model-based views.
@@ -764,7 +736,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
def unattach(self, request, *args, **kwargs):
sub_id, res = self.unattach_validate(request)
(sub_id, res) = self.unattach_validate(request)
if res:
return res
return self.unattach_by_id(request, sub_id)
@@ -842,7 +814,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
auditor_role = RoleDefinition.objects.filter(name="Platform Auditor").first()
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
if auditor_role:
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
return qs.distinct()
@@ -1023,9 +995,6 @@ class GenericCancelView(RetrieveAPIView):
# In subclass set model, serializer_class
obj_permission_type = 'cancel'
def get(self, request, *args, **kwargs):
return super(GenericCancelView, self).get(request, *args, **kwargs)
@transaction.non_atomic_requests
def dispatch(self, *args, **kwargs):
return super(GenericCancelView, self).dispatch(*args, **kwargs)

View File

@@ -103,7 +103,7 @@ class Metadata(metadata.SimpleMetadata):
default = field.get_default()
if type(default) is UUID:
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost':
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
field_info['default'] = default
except serializers.SkipField:

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import MetricsView
urls = [re_path(r'^$', MetricsView.as_view(), name='metrics_view')]
__all__ = ['urls']

View File

@@ -111,7 +111,7 @@ class UnifiedJobEventPagination(Pagination):
def __init__(self, *args, **kwargs):
self.use_limit_paginator = False
self.limit_pagination = LimitPagination()
super().__init__(*args, **kwargs)
return super().__init__(*args, **kwargs)
def paginate_queryset(self, queryset, request, view=None):
if 'limit' in request.query_params:

View File

@@ -10,7 +10,7 @@ from rest_framework import permissions
# AWX
from awx.main.access import check_user_access
from awx.main.models import Inventory, UnifiedJob, Organization
from awx.main.models import Inventory, UnifiedJob
from awx.main.utils import get_object_or_400
logger = logging.getLogger('awx.api.permissions')
@@ -228,19 +228,12 @@ class InventoryInventorySourcesUpdatePermission(ModelAccessPermission):
class UserPermission(ModelAccessPermission):
def check_post_permissions(self, request, view, obj=None):
if not request.data:
return Organization.access_qs(request.user, 'change').exists()
return request.user.admin_of_organizations.exists()
elif request.user.is_superuser:
return True
raise PermissionDenied()
class IsSystemAdmin(permissions.BasePermission):
def has_permission(self, request, view):
if not (request.user and request.user.is_authenticated):
return False
return request.user.is_superuser
class IsSystemAdminOrAuditor(permissions.BasePermission):
"""
Allows write access only to system admin users.

View File

@@ -1,119 +0,0 @@
import warnings
from rest_framework.permissions import IsAuthenticated
from drf_spectacular.openapi import AutoSchema
from drf_spectacular.views import (
SpectacularAPIView,
SpectacularSwaggerView,
SpectacularRedocView,
)
def filter_credential_type_schema(
result,
generator, # NOSONAR
request, # NOSONAR
public, # NOSONAR
):
"""
Postprocessing hook to filter CredentialType kind enum values.
For CredentialTypeRequest and PatchedCredentialTypeRequest schemas (POST/PUT/PATCH),
filter the 'kind' enum to only show 'cloud' and 'net' values.
This ensures the OpenAPI schema accurately reflects that only 'cloud' and 'net'
credential types can be created or modified via the API, matching the validation
in CredentialTypeSerializer.validate().
Args:
result: The OpenAPI schema dict to be modified
generator, request, public: Required by drf-spectacular interface (unused)
Returns:
The modified OpenAPI schema dict
"""
schemas = result.get('components', {}).get('schemas', {})
# Filter CredentialTypeRequest (POST/PUT) - field is required
if 'CredentialTypeRequest' in schemas:
kind_prop = schemas['CredentialTypeRequest'].get('properties', {}).get('kind', {})
if 'enum' in kind_prop:
# Filter to only cloud and net (no None - field is required)
kind_prop['enum'] = ['cloud', 'net']
kind_prop['description'] = "* `cloud` - Cloud\\n* `net` - Network"
# Filter PatchedCredentialTypeRequest (PATCH) - field is optional
if 'PatchedCredentialTypeRequest' in schemas:
kind_prop = schemas['PatchedCredentialTypeRequest'].get('properties', {}).get('kind', {})
if 'enum' in kind_prop:
# Filter to only cloud and net (None allowed - field can be omitted in PATCH)
kind_prop['enum'] = ['cloud', 'net', None]
kind_prop['description'] = "* `cloud` - Cloud\\n* `net` - Network"
return result
class CustomAutoSchema(AutoSchema):
"""Custom AutoSchema to add swagger_topic to tags and handle deprecated endpoints."""
def get_tags(self):
tags = []
try:
if hasattr(self.view, 'get_serializer'):
serializer = self.view.get_serializer()
else:
serializer = None
except Exception:
serializer = None
warnings.warn(
'{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be '
'generated for this view.'.format(self.view.__class__.__name__)
)
if hasattr(self.view, 'swagger_topic'):
tags.append(str(self.view.swagger_topic).title())
elif serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
elif hasattr(self.view, 'model'):
tags.append(str(self.view.model._meta.verbose_name_plural).title())
else:
tags = super().get_tags() # Use default drf-spectacular behavior
if not tags:
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
tags = ['api'] # Fallback to default value
return tags
def is_deprecated(self):
"""Return `True` if this operation is to be marked as deprecated."""
return getattr(self.view, 'deprecated', False)
class AuthenticatedSpectacularAPIView(SpectacularAPIView):
"""SpectacularAPIView that requires authentication."""
permission_classes = [IsAuthenticated]
class AuthenticatedSpectacularSwaggerView(SpectacularSwaggerView):
"""SpectacularSwaggerView that requires authentication."""
permission_classes = [IsAuthenticated]
class AuthenticatedSpectacularRedocView(SpectacularRedocView):
"""SpectacularRedocView that requires authentication."""
permission_classes = [IsAuthenticated]
# Schema view (returns OpenAPI schema JSON/YAML)
schema_view = AuthenticatedSpectacularAPIView.as_view()
# Swagger UI view
swagger_ui_view = AuthenticatedSpectacularSwaggerView.as_view(url_name='api:schema-json')
# ReDoc UI view
redoc_view = AuthenticatedSpectacularRedocView.as_view(url_name='api:schema-json')

View File

@@ -6,12 +6,14 @@ import copy
import json
import logging
import re
import yaml
import urllib.parse
from collections import Counter, OrderedDict
from datetime import timedelta
from uuid import uuid4
# OAuth2
from oauthlib import oauth2
from oauthlib.common import generate_token
# Jinja
from jinja2 import sandbox, StrictUndefined
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
@@ -48,7 +50,7 @@ from ansible_base.rbac import permission_registry
# AWX
from awx.main.access import get_user_capabilities
from awx.main.constants import ACTIVE_STATES, org_role_to_permission
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE, org_role_to_permission
from awx.main.models import (
ActivityStream,
AdHocCommand,
@@ -77,11 +79,14 @@ from awx.main.models import (
Label,
Notification,
NotificationTemplate,
OAuth2AccessToken,
OAuth2Application,
Organization,
Project,
ProjectUpdate,
ProjectUpdateEvent,
ReceptorAddress,
RefreshToken,
Role,
Schedule,
SystemJob,
@@ -97,6 +102,7 @@ from awx.main.models import (
WorkflowJobTemplate,
WorkflowJobTemplateNode,
StdoutMaxBytesExceeded,
CLOUD_INVENTORY_SOURCES,
)
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
from awx.main.models.rbac import role_summary_fields_generator, give_creator_permissions, get_role_codenames, to_permissions, get_role_from_object_role
@@ -113,11 +119,8 @@ from awx.main.utils import (
truncate_stdout,
get_licenser,
)
from awx.main.utils.filters import SmartFilter
from awx.main.utils.plugins import load_combined_inventory_source_options
from awx.main.utils.named_url_graph import reset_counters
from awx.main.utils.inventory_vars import update_group_variables
from awx.main.scheduler.task_manager_models import TaskManagerModels
from awx.main.redact import UriCleaner, REPLACE_STR
from awx.main.signals import update_inventory_computed_fields
@@ -131,6 +134,8 @@ from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, Ver
# AWX Utils
from awx.api.validators import HostnameRegexValidator
from awx.sso.common import get_external_account
logger = logging.getLogger('awx.api.serializers')
# Fields that should be summarized regardless of object type.
@@ -629,41 +634,15 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
return exclusions
def validate(self, attrs):
"""
Apply serializer validation. Called by DRF.
Can be extended by subclasses. Or consider overwriting
`validate_with_obj` in subclasses, which provides access to the model
object and exception handling for field validation.
:param dict attrs: The names and values of the model form fields.
:raise rest_framework.exceptions.ValidationError: If the validation
fails.
The exception must contain a dict with the names of the form fields
which failed validation as keys, and a list of error messages as
values. This ensures that the error messages are rendered near the
relevant fields.
:return: The names and values from the model form fields, possibly
modified by the validations.
:rtype: dict
"""
attrs = super(BaseSerializer, self).validate(attrs)
# Create/update a model instance and run its full_clean() method to
# do any validation implemented on the model class.
exclusions = self.get_validation_exclusions(self.instance)
# Create a new model instance or take the existing one if it exists,
# and update its attributes with the respective field values from
# attrs.
obj = self.instance or self.Meta.model()
for k, v in attrs.items():
if k not in exclusions and k != 'canonical_address_port':
setattr(obj, k, v)
try:
# Run serializer validators which need the model object for
# validation.
self.validate_with_obj(attrs, obj)
# Apply any validations implemented on the model class.
# Create/update a model instance and run its full_clean() method to
# do any validation implemented on the model class.
exclusions = self.get_validation_exclusions(self.instance)
obj = self.instance or self.Meta.model()
for k, v in attrs.items():
if k not in exclusions and k != 'canonical_address_port':
setattr(obj, k, v)
obj.full_clean(exclude=exclusions)
# full_clean may modify values on the instance; copy those changes
# back to attrs so they are saved.
@@ -692,32 +671,6 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
raise ValidationError(d)
return attrs
def validate_with_obj(self, attrs, obj):
"""
Overwrite this if you need the model instance for your validation.
:param dict attrs: The names and values of the model form fields.
:param obj: An instance of the class's meta model.
If the serializer runs on a newly created object, obj contains only
the attrs from its serializer. If the serializer runs because an
object has been edited, obj is the existing model instance with all
attributes and values available.
:raise django.core.exceptionsValidationError: Raise this if your
validation fails.
To make the error appear at the respective form field, instantiate
the Exception with a dict containing the field name as key and the
error message as value.
Example: ``ValidationError({"password": "Not good enough!"})``
If the exception contains just a string, the message cannot be
related to a field and is rendered at the top of the model form.
:return: None
"""
return
def reverse(self, *args, **kwargs):
kwargs['request'] = self.context.get('request')
return reverse(*args, **kwargs)
@@ -734,22 +687,7 @@ class EmptySerializer(serializers.Serializer):
pass
class OpaQueryPathMixin(serializers.Serializer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def validate_opa_query_path(self, value):
# Decode the URL and re-encode it
decoded_value = urllib.parse.unquote(value)
re_encoded_value = urllib.parse.quote(decoded_value, safe='/')
if value != re_encoded_value:
raise serializers.ValidationError(_("The URL must be properly encoded."))
return value
class UnifiedJobTemplateSerializer(BaseSerializer, OpaQueryPathMixin):
class UnifiedJobTemplateSerializer(BaseSerializer):
# As a base serializer, the capabilities prefetch is not used directly,
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
capabilities_prefetch = []
@@ -963,13 +901,13 @@ class UnifiedJobSerializer(BaseSerializer):
class UnifiedJobListSerializer(UnifiedJobSerializer):
class Meta:
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-event_processing_finished', '-artifacts')
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-event_processing_finished')
def get_field_names(self, declared_fields, info):
field_names = super(UnifiedJobListSerializer, self).get_field_names(declared_fields, info)
# Meta multiple inheritance and -field_name options don't seem to be
# taking effect above, so remove the undesired fields here.
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'event_processing_finished', 'artifacts'))
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'event_processing_finished'))
def get_types(self):
if type(self) is UnifiedJobListSerializer:
@@ -1023,6 +961,8 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
class UserSerializer(BaseSerializer):
password = serializers.CharField(required=False, default='', help_text=_('Field used to change the password.'))
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
is_system_auditor = serializers.BooleanField(default=False)
show_capabilities = ['edit', 'delete']
@@ -1039,13 +979,22 @@ class UserSerializer(BaseSerializer):
'is_superuser',
'is_system_auditor',
'password',
'ldap_dn',
'last_login',
'external_account',
)
extra_kwargs = {'last_login': {'read_only': True}}
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
ret['password'] = '$encrypted$'
if self.get_external_account(obj):
# If this is an external account it shouldn't have a password field
ret.pop('password', None)
else:
# If its an internal account lets assume there is a password and return $encrypted$ to the user
ret['password'] = '$encrypted$'
if obj and type(self) is UserSerializer:
ret['auth'] = obj.social_auth.values('provider', 'uid')
return ret
def get_validation_exclusions(self, obj=None):
@@ -1054,6 +1003,7 @@ class UserSerializer(BaseSerializer):
return ret
def validate_password(self, value):
django_validate_password(value)
if not self.instance and value in (None, ''):
raise serializers.ValidationError(_('Password required for new User.'))
@@ -1076,52 +1026,11 @@ class UserSerializer(BaseSerializer):
return value
def validate_with_obj(self, attrs, obj):
"""
Validate the password with the Django password validators
To enable the Django password validators, configure
`settings.AUTH_PASSWORD_VALIDATORS` as described in the [Django
docs](https://docs.djangoproject.com/en/5.1/topics/auth/passwords/#enabling-password-validation)
:param dict attrs: The User form field names and their values as a dict.
Example::
{
'username': 'TestUsername', 'first_name': 'FirstName',
'last_name': 'LastName', 'email': 'First.Last@my.org',
'is_superuser': False, 'is_system_auditor': False,
'password': 'secret123'
}
:param obj: The User model instance.
:raises django.core.exceptions.ValidationError: Raise this if at least
one Django password validator fails.
The exception contains a dict ``{"password": <error-message>``}
which indicates that the password field has failed validation, and
the reason for failure.
:return: None.
"""
# We must do this here instead of in `validate_password` bacause some
# django password validators need access to other model instance fields,
# e.g. ``username`` for the ``UserAttributeSimilarityValidator``.
password = attrs.get("password")
# Skip validation if no password has been entered. This may happen when
# an existing User is edited.
if password and password != '$encrypted$':
# Apply validators from settings.AUTH_PASSWORD_VALIDATORS. This may
# raise ValidationError.
#
# If the validation fails, re-raise the exception with adjusted
# content to make the error appear near the password field.
try:
django_validate_password(password, user=obj)
except DjangoValidationError as exc:
raise DjangoValidationError({"password": exc.messages})
def _update_password(self, obj, new_password):
if new_password and new_password != '$encrypted$':
# For now we're not raising an error, just not saving password for
# users managed by LDAP who already have an unusable password set.
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
if new_password and new_password != '$encrypted$' and not self.get_external_account(obj):
obj.set_password(new_password)
obj.save(update_fields=['password'])
@@ -1129,13 +1038,14 @@ class UserSerializer(BaseSerializer):
# as the modified user then inject a session key derived from
# the updated user to prevent logout. This is the logic used by
# the Django admin's own user_change_password view.
if self.instance and self.context['request'].user.username == obj.username:
update_session_auth_hash(self.context['request'], obj)
update_session_auth_hash(self.context['request'], obj)
elif not obj.password:
obj.set_unusable_password()
obj.save(update_fields=['password'])
def get_external_account(self, obj):
return get_external_account(obj)
def create(self, validated_data):
new_password = validated_data.pop('password', None)
is_system_auditor = validated_data.pop('is_system_auditor', None)
@@ -1166,10 +1076,44 @@ class UserSerializer(BaseSerializer):
roles=self.reverse('api:user_roles_list', kwargs={'pk': obj.pk}),
activity_stream=self.reverse('api:user_activity_stream_list', kwargs={'pk': obj.pk}),
access_list=self.reverse('api:user_access_list', kwargs={'pk': obj.pk}),
tokens=self.reverse('api:o_auth2_token_list', kwargs={'pk': obj.pk}),
authorized_tokens=self.reverse('api:user_authorized_token_list', kwargs={'pk': obj.pk}),
personal_tokens=self.reverse('api:user_personal_token_list', kwargs={'pk': obj.pk}),
)
)
return res
def _validate_ldap_managed_field(self, value, field_name):
if not getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
return value
try:
is_ldap_user = bool(self.instance and self.instance.profile.ldap_dn)
except AttributeError:
is_ldap_user = False
if is_ldap_user:
ldap_managed_fields = ['username']
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
if field_name in ldap_managed_fields:
if value != getattr(self.instance, field_name):
raise serializers.ValidationError(_('Unable to change %s on user managed by LDAP.') % field_name)
return value
def validate_username(self, value):
return self._validate_ldap_managed_field(value, 'username')
def validate_first_name(self, value):
return self._validate_ldap_managed_field(value, 'first_name')
def validate_last_name(self, value):
return self._validate_ldap_managed_field(value, 'last_name')
def validate_email(self, value):
return self._validate_ldap_managed_field(value, 'email')
def validate_is_superuser(self, value):
return self._validate_ldap_managed_field(value, 'is_superuser')
class UserActivityStreamSerializer(UserSerializer):
"""Changes to system auditor status are shown as separate entries,
@@ -1182,12 +1126,205 @@ class UserActivityStreamSerializer(UserSerializer):
fields = ('*', '-is_system_auditor')
class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
class BaseOAuth2TokenSerializer(BaseSerializer):
refresh_token = serializers.SerializerMethodField()
token = serializers.SerializerMethodField()
ALLOWED_SCOPES = ['read', 'write']
class Meta:
model = OAuth2AccessToken
fields = ('*', '-name', 'description', 'user', 'token', 'refresh_token', 'application', 'expires', 'scope')
read_only_fields = ('user', 'token', 'expires', 'refresh_token')
extra_kwargs = {'scope': {'allow_null': False, 'required': False}, 'user': {'allow_null': False, 'required': True}}
def get_token(self, obj):
request = self.context.get('request', None)
try:
if request.method == 'POST':
return obj.token
else:
return CENSOR_VALUE
except ObjectDoesNotExist:
return ''
def get_refresh_token(self, obj):
request = self.context.get('request', None)
try:
if not obj.refresh_token:
return None
elif request.method == 'POST':
return getattr(obj.refresh_token, 'token', '')
else:
return CENSOR_VALUE
except ObjectDoesNotExist:
return None
def get_related(self, obj):
ret = super(BaseOAuth2TokenSerializer, self).get_related(obj)
if obj.user:
ret['user'] = self.reverse('api:user_detail', kwargs={'pk': obj.user.pk})
if obj.application:
ret['application'] = self.reverse('api:o_auth2_application_detail', kwargs={'pk': obj.application.pk})
ret['activity_stream'] = self.reverse('api:o_auth2_token_activity_stream_list', kwargs={'pk': obj.pk})
return ret
def _is_valid_scope(self, value):
if not value or (not isinstance(value, str)):
return False
words = value.split()
for word in words:
if words.count(word) > 1:
return False # do not allow duplicates
if word not in self.ALLOWED_SCOPES:
return False
return True
def validate_scope(self, value):
if not self._is_valid_scope(value):
raise serializers.ValidationError(_('Must be a simple space-separated string with allowed scopes {}.').format(self.ALLOWED_SCOPES))
return value
def create(self, validated_data):
validated_data['user'] = self.context['request'].user
try:
return super(BaseOAuth2TokenSerializer, self).create(validated_data)
except oauth2.AccessDeniedError as e:
raise PermissionDenied(str(e))
class UserAuthorizedTokenSerializer(BaseOAuth2TokenSerializer):
class Meta:
extra_kwargs = {
'scope': {'allow_null': False, 'required': False},
'user': {'allow_null': False, 'required': True},
'application': {'allow_null': False, 'required': True},
}
def create(self, validated_data):
current_user = self.context['request'].user
validated_data['token'] = generate_token()
validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
obj = super(UserAuthorizedTokenSerializer, self).create(validated_data)
obj.save()
if obj.application:
RefreshToken.objects.create(user=current_user, token=generate_token(), application=obj.application, access_token=obj)
return obj
class OAuth2TokenSerializer(BaseOAuth2TokenSerializer):
def create(self, validated_data):
current_user = self.context['request'].user
validated_data['token'] = generate_token()
validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
obj = super(OAuth2TokenSerializer, self).create(validated_data)
if obj.application and obj.application.user:
obj.user = obj.application.user
obj.save()
if obj.application:
RefreshToken.objects.create(user=current_user, token=generate_token(), application=obj.application, access_token=obj)
return obj
class OAuth2TokenDetailSerializer(OAuth2TokenSerializer):
class Meta:
read_only_fields = ('*', 'user', 'application')
class UserPersonalTokenSerializer(BaseOAuth2TokenSerializer):
class Meta:
read_only_fields = ('user', 'token', 'expires', 'application')
def create(self, validated_data):
validated_data['token'] = generate_token()
validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
validated_data['application'] = None
obj = super(UserPersonalTokenSerializer, self).create(validated_data)
obj.save()
return obj
class OAuth2ApplicationSerializer(BaseSerializer):
show_capabilities = ['edit', 'delete']
class Meta:
model = OAuth2Application
fields = (
'*',
'description',
'-user',
'client_id',
'client_secret',
'client_type',
'redirect_uris',
'authorization_grant_type',
'skip_authorization',
'organization',
)
read_only_fields = ('client_id', 'client_secret')
read_only_on_update_fields = ('user', 'authorization_grant_type')
extra_kwargs = {
'user': {'allow_null': True, 'required': False},
'organization': {'allow_null': False},
'authorization_grant_type': {'allow_null': False, 'label': _('Authorization Grant Type')},
'client_secret': {'label': _('Client Secret')},
'client_type': {'label': _('Client Type')},
'redirect_uris': {'label': _('Redirect URIs')},
'skip_authorization': {'label': _('Skip Authorization')},
}
def to_representation(self, obj):
ret = super(OAuth2ApplicationSerializer, self).to_representation(obj)
request = self.context.get('request', None)
if request.method != 'POST' and obj.client_type == 'confidential':
ret['client_secret'] = CENSOR_VALUE
if obj.client_type == 'public':
ret.pop('client_secret', None)
return ret
def get_related(self, obj):
res = super(OAuth2ApplicationSerializer, self).get_related(obj)
res.update(
dict(
tokens=self.reverse('api:o_auth2_application_token_list', kwargs={'pk': obj.pk}),
activity_stream=self.reverse('api:o_auth2_application_activity_stream_list', kwargs={'pk': obj.pk}),
)
)
if obj.organization_id:
res.update(
dict(
organization=self.reverse('api:organization_detail', kwargs={'pk': obj.organization_id}),
)
)
return res
def get_modified(self, obj):
if obj is None:
return None
return obj.updated
def _summary_field_tokens(self, obj):
token_list = [{'id': x.pk, 'token': CENSOR_VALUE, 'scope': x.scope} for x in obj.oauth2accesstoken_set.all()[:10]]
if has_model_field_prefetched(obj, 'oauth2accesstoken_set'):
token_count = len(obj.oauth2accesstoken_set.all())
else:
if len(token_list) < 10:
token_count = len(token_list)
else:
token_count = obj.oauth2accesstoken_set.count()
return {'count': token_count, 'results': token_list}
def get_summary_fields(self, obj):
ret = super(OAuth2ApplicationSerializer, self).get_summary_fields(obj)
ret['tokens'] = self._summary_field_tokens(obj)
return ret
class OrganizationSerializer(BaseSerializer):
show_capabilities = ['edit', 'delete']
class Meta:
model = Organization
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment', 'opa_query_path')
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment')
read_only_fields = ('*', 'custom_virtualenv')
def get_related(self, obj):
@@ -1202,6 +1339,7 @@ class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
admins=self.reverse('api:organization_admins_list', kwargs={'pk': obj.pk}),
teams=self.reverse('api:organization_teams_list', kwargs={'pk': obj.pk}),
credentials=self.reverse('api:organization_credential_list', kwargs={'pk': obj.pk}),
applications=self.reverse('api:organization_applications_list', kwargs={'pk': obj.pk}),
activity_stream=self.reverse('api:organization_activity_stream_list', kwargs={'pk': obj.pk}),
notification_templates=self.reverse('api:organization_notification_templates_list', kwargs={'pk': obj.pk}),
notification_templates_started=self.reverse('api:organization_notification_templates_started_list', kwargs={'pk': obj.pk}),
@@ -1230,7 +1368,7 @@ class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
# to a team. This provides a hint to the ui so it can know to not
# display these roles for team role selection.
for key in ('admin_role', 'member_role'):
if summary_dict and key in summary_dict.get('object_roles', {}):
if key in summary_dict.get('object_roles', {}):
summary_dict['object_roles'][key]['user_only'] = True
return summary_dict
@@ -1541,7 +1679,7 @@ class LabelsListMixin(object):
return res
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQueryPathMixin):
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
show_capabilities = ['edit', 'delete', 'adhoc', 'copy']
capabilities_prefetch = ['admin', 'adhoc', {'copy': 'organization.inventory_admin'}]
@@ -1562,7 +1700,6 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQuery
'inventory_sources_with_failures',
'pending_deletion',
'prevent_instance_group_fallback',
'opa_query_path',
)
def get_related(self, obj):
@@ -1632,68 +1769,8 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQuery
if kind == 'smart' and not host_filter:
raise serializers.ValidationError({'host_filter': _('Smart inventories must specify host_filter')})
return super(InventorySerializer, self).validate(attrs)
@staticmethod
def _update_variables(variables, inventory_id):
"""
Update the inventory variables of the 'all'-group.
The variables field contains vars from the inventory dialog, hence
representing the "all"-group variables.
Since this is not an update from an inventory source, we update the
variables when the inventory details form is saved.
A user edit on the inventory variables is considered a reset of the
variables update history. Particularly if the user removes a variable by
editing the inventory variables field, the variable is not supposed to
reappear with a value from a previous inventory source update.
We achieve this by forcing `reset=True` on such an update.
As a side-effect, variables which have been set by source updates and
have survived a user-edit (i.e. they have not been deleted from the
variables field) will be assumed to originate from the user edit and are
thus no longer deleted from the inventory when they are removed from
their original source!
Note that we use the inventory source id -1 for user-edit updates
because a regular inventory source cannot have an id of -1 since
PostgreSQL assigns pk's starting from 1 (if this assumption doesn't hold
true, we have to assign another special value for invsrc_id).
:param str variables: The variables as plain text in yaml or json
format.
:param int inventory_id: The primary key of the related inventory
object.
"""
variables_dict = parse_yaml_or_json(variables, silent_failure=False)
logger.debug(f"InventorySerializer._update_variables: {inventory_id=} {variables_dict=}, {variables=}")
update_group_variables(
group_id=None, # `None` denotes the 'all' group (which doesn't have a pk).
newvars=variables_dict,
dbvars=None,
invsrc_id=-1,
inventory_id=inventory_id,
reset=True,
)
def create(self, validated_data):
"""Called when a new inventory has to be created."""
logger.debug(f"InventorySerializer.create({validated_data=}) >>>>")
obj = super().create(validated_data)
self._update_variables(validated_data.get("variables") or "", obj.id)
return obj
def update(self, obj, validated_data):
"""Called when an existing inventory is updated."""
logger.debug(f"InventorySerializer.update({validated_data=}) >>>>")
obj = super().update(obj, validated_data)
self._update_variables(validated_data.get("variables") or "", obj.id)
return obj
class ConstructedFieldMixin(serializers.Field):
def get_attribute(self, instance):
@@ -1735,7 +1812,7 @@ class ConstructedInventorySerializer(InventorySerializer):
required=False,
allow_null=True,
min_value=0,
max_value=5,
max_value=2,
default=None,
help_text=_('The verbosity level for the related auto-created inventory source, special to constructed inventory'),
)
@@ -1983,12 +2060,10 @@ class GroupSerializer(BaseSerializerWithVariables):
return res
def validate(self, attrs):
# Do not allow the group name to conflict with an existing host name.
name = force_str(attrs.get('name', self.instance and self.instance.name or ''))
inventory = attrs.get('inventory', self.instance and self.instance.inventory or '')
if Host.objects.filter(name=name, inventory=inventory).exists():
raise serializers.ValidationError(_('A Host with that name already exists.'))
#
return super(GroupSerializer, self).validate(attrs)
def validate_name(self, value):
@@ -2165,13 +2240,13 @@ class BulkHostDeleteSerializer(serializers.Serializer):
attrs['hosts_data'] = attrs['host_qs'].values()
if len(attrs['host_qs']) == 0:
error_hosts = dict.fromkeys(attrs['hosts'], "Hosts do not exist or you lack permission to delete it")
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in attrs['hosts']}
raise serializers.ValidationError({'hosts': error_hosts})
if len(attrs['host_qs']) < len(attrs['hosts']):
hosts_exists = [host['id'] for host in attrs['hosts_data']]
failed_hosts = list(set(attrs['hosts']).difference(hosts_exists))
error_hosts = dict.fromkeys(failed_hosts, "Hosts do not exist or you lack permission to delete it")
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in failed_hosts}
raise serializers.ValidationError({'hosts': error_hosts})
# Getting all inventories that the hosts can be in
@@ -2273,7 +2348,6 @@ class GroupVariableDataSerializer(BaseVariableDataSerializer):
class InventorySourceOptionsSerializer(BaseSerializer):
credential = DeprecatedCredentialField(help_text=_('Cloud credential to use for inventory updates.'))
source = serializers.ChoiceField(choices=[])
class Meta:
fields = (
@@ -2295,14 +2369,6 @@ class InventorySourceOptionsSerializer(BaseSerializer):
)
read_only_fields = ('*', 'custom_virtualenv')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'source' in self.fields:
source_options = load_combined_inventory_source_options()
self.fields['source'].choices = [(plugin, description) for plugin, description in source_options.items()]
def get_related(self, obj):
res = super(InventorySourceOptionsSerializer, self).get_related(obj)
if obj.credential: # TODO: remove when 'credential' field is removed
@@ -2839,7 +2905,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
{
"role": {
"id": None,
"name": _("Platform Auditor"),
"name": _("System Auditor"),
"description": _("Can view all aspects of the system"),
"user_capabilities": {"unattach": False},
},
@@ -3027,6 +3093,11 @@ class CredentialSerializer(BaseSerializer):
ret.remove(field)
return ret
def validate_organization(self, org):
if self.instance and (not self.instance.managed) and self.instance.credential_type.kind == 'galaxy' and org is None:
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
return org
def validate_credential_type(self, credential_type):
if self.instance and credential_type.pk != self.instance.credential_type.pk:
for related_objects in (
@@ -3102,6 +3173,9 @@ class CredentialSerializerCreate(CredentialSerializer):
if attrs.get('team'):
attrs['organization'] = attrs['team'].organization
if 'credential_type' in attrs and attrs['credential_type'].kind == 'galaxy' and list(owner_fields) != ['organization']:
raise serializers.ValidationError({"organization": _("Galaxy credentials must be owned by an Organization.")})
return super(CredentialSerializerCreate, self).validate(attrs)
def create(self, validated_data):
@@ -3319,7 +3393,6 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
'webhook_service',
'webhook_credential',
'prevent_instance_group_fallback',
'opa_query_path',
)
read_only_fields = ('*', 'custom_virtualenv')
@@ -3521,17 +3594,11 @@ class JobRelaunchSerializer(BaseSerializer):
choices=[('all', _('No change to job limit')), ('failed', _('All failed and unreachable hosts'))],
write_only=True,
)
job_type = serializers.ChoiceField(
required=False,
allow_null=True,
choices=NEW_JOB_TYPE_CHOICES,
write_only=True,
)
credential_passwords = VerbatimField(required=False, write_only=True)
credential_passwords = VerbatimField(required=True, write_only=True)
class Meta:
model = Job
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'job_type', 'credential_passwords')
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'credential_passwords')
def validate_credential_passwords(self, value):
pnts = self.instance.passwords_needed_to_start
@@ -5314,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
)
def get_body(self, obj):
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
if obj.notification_type in ('webhook', 'pagerduty'):
if isinstance(obj.body, dict):
if 'body' in obj.body:
return obj.body['body']
@@ -5336,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
def to_representation(self, obj):
ret = super(NotificationSerializer, self).to_representation(obj)
if obj.notification_type in ('webhook', 'awssns'):
if obj.notification_type == 'webhook':
ret.pop('subject')
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
ret.pop('body')
return ret
@@ -5481,7 +5548,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
return summary_fields
def validate_unified_job_template(self, value):
if type(value) == InventorySource and value.source not in load_combined_inventory_source_options():
if type(value) == InventorySource and value.source not in CLOUD_INVENTORY_SOURCES:
raise serializers.ValidationError(_('Inventory Source must be a cloud resource.'))
elif type(value) == Project and value.scm_type == '':
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
@@ -5990,34 +6057,6 @@ class InstanceGroupSerializer(BaseSerializer):
raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group'))
return value
def validate_pod_spec_override(self, value):
if not value:
return value
# value should be empty for non-container groups
if self.instance and not self.instance.is_container_group:
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
pod_spec_override_json = {}
# defect if the value is yaml or json if yaml convert to json
try:
# convert yaml to json
pod_spec_override_json = yaml.safe_load(value)
except yaml.YAMLError:
try:
pod_spec_override_json = json.loads(value)
except json.JSONDecodeError:
raise serializers.ValidationError(_('pod_spec_override must be valid yaml or json'))
# validate the
spec = pod_spec_override_json.get('spec', {})
automount_service_account_token = spec.get('automountServiceAccountToken', False)
if automount_service_account_token:
raise serializers.ValidationError(_('automountServiceAccountToken is not allowed for security reasons'))
return value
def validate(self, attrs):
attrs = super(InstanceGroupSerializer, self).validate(attrs)
@@ -6083,6 +6122,8 @@ class ActivityStreamSerializer(BaseSerializer):
('workflow_job_template_node', ('id', 'unified_job_template_id')),
('label', ('id', 'name', 'organization_id')),
('notification', ('id', 'status', 'notification_type', 'notification_template_id')),
('o_auth2_access_token', ('id', 'user_id', 'description', 'application_id', 'scope')),
('o_auth2_application', ('id', 'name', 'description')),
('credential_type', ('id', 'name', 'description', 'kind', 'managed')),
('ad_hoc_command', ('id', 'name', 'status', 'limit')),
('workflow_approval', ('id', 'name', 'unified_job_id')),

63
awx/api/swagger.py Normal file
View File

@@ -0,0 +1,63 @@
import warnings
from rest_framework.permissions import AllowAny
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
class SuperUserSchemaGenerator(SchemaGenerator):
def has_view_permissions(self, path, method, view):
#
# Generate the Swagger schema as if you were a superuser and
# permissions didn't matter; this short-circuits the schema path
# discovery to include _all_ potential paths in the API.
#
return True
class AutoSchema(DRFAuthSchema):
def get_link(self, path, method, base_url):
link = super(AutoSchema, self).get_link(path, method, base_url)
try:
serializer = self.view.get_serializer()
except Exception:
serializer = None
warnings.warn(
'{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be '
'generated for {} {}.'.format(self.view.__class__.__name__, method, path)
)
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
# auto-generate a topic/tag for the serializer based on its model
if hasattr(self.view, 'swagger_topic'):
link.__dict__['topic'] = str(self.view.swagger_topic).title()
elif serializer and hasattr(serializer, 'Meta'):
link.__dict__['topic'] = str(serializer.Meta.model._meta.verbose_name_plural).title()
elif hasattr(self.view, 'model'):
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
else:
warnings.warn('Could not determine a Swagger tag for path {}'.format(path))
return link
def get_description(self, path, method):
setattr(self.view.request, 'swagger_method', method)
description = super(AutoSchema, self).get_description(path, method)
return description
schema_view = get_schema_view(
openapi.Info(
title="Snippets API",
default_version='v1',
description="Test description",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="contact@snippets.local"),
license=openapi.License(name="BSD License"),
),
public=True,
permission_classes=[AllowAny],
)

View File

@@ -0,0 +1,114 @@
# Token Handling using OAuth2
This page lists OAuth 2 utility endpoints used for authorization, token refresh and revoke.
Note endpoints other than `/api/o/authorize/` are not meant to be used in browsers and do not
support HTTP GET. The endpoints here strictly follow
[RFC specs for OAuth2](https://tools.ietf.org/html/rfc6749), so please use that for detailed
reference. Note AWX net location default to `http://localhost:8013` in examples:
## Create Token for an Application using Authorization code grant type
Given an application "AuthCodeApp" of grant type `authorization-code`,
from the client app, the user makes a GET to the Authorize endpoint with
* `response_type`
* `client_id`
* `redirect_uris`
* `scope`
AWX will respond with the authorization `code` and `state`
to the redirect_uri specified in the application. The client application will then make a POST to the
`api/o/token/` endpoint on AWX with
* `code`
* `client_id`
* `client_secret`
* `grant_type`
* `redirect_uri`
AWX will respond with the `access_token`, `token_type`, `refresh_token`, and `expires_in`. For more
information on testing this flow, refer to [django-oauth-toolkit](http://django-oauth-toolkit.readthedocs.io/en/latest/tutorial/tutorial_01.html#test-your-authorization-server).
## Create Token for an Application using Password grant type
Log in is not required for `password` grant type, so a simple `curl` can be used to acquire a personal access token
via `/api/o/token/` with
* `grant_type`: Required to be "password"
* `username`
* `password`
* `client_id`: Associated application must have grant_type "password"
* `client_secret`
For example:
```bash
curl -X POST \
-H "Content-Type: application/x-www-form-urlencoded" \
-d "grant_type=password&username=<username>&password=<password>&scope=read" \
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569e
IaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
http://localhost:8013/api/o/token/ -i
```
In the above post request, parameters `username` and `password` are username and password of the related
AWX user of the underlying application, and the authentication information is of format
`<client_id>:<client_secret>`, where `client_id` and `client_secret` are the corresponding fields of
underlying application.
Upon success, access token, refresh token and other information are given in the response body in JSON
format:
```text
{
"access_token": "9epHOqHhnXUcgYK8QanOmUQPSgX92g",
"token_type": "Bearer",
"expires_in": 31536000000,
"refresh_token": "jMRX6QvzOTf046KHee3TU5mT3nyXsz",
"scope": "read"
}
```
## Refresh an existing access token
The `/api/o/token/` endpoint is used for refreshing access token:
```bash
curl -X POST \
-H "Content-Type: application/x-www-form-urlencoded" \
-d "grant_type=refresh_token&refresh_token=AL0NK9TTpv0qp54dGbC4VUZtsZ9r8z" \
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
http://localhost:8013/api/o/token/ -i
```
In the above post request, `refresh_token` is provided by `refresh_token` field of the access token
above. The authentication information is of format `<client_id>:<client_secret>`, where `client_id`
and `client_secret` are the corresponding fields of underlying related application of the access token.
Upon success, the new (refreshed) access token with the same scope information as the previous one is
given in the response body in JSON format:
```text
{
"access_token": "NDInWxGJI4iZgqpsreujjbvzCfJqgR",
"token_type": "Bearer",
"expires_in": 31536000000,
"refresh_token": "DqOrmz8bx3srlHkZNKmDpqA86bnQkT",
"scope": "read write"
}
```
Internally, the refresh operation deletes the existing token and a new token is created immediately
after, with information like scope and related application identical to the original one. We can
verify by checking the new token is present at the `api/v2/tokens` endpoint.
## Revoke an access token
Revoking an access token is the same as deleting the token resource object.
Revoking is done by POSTing to `/api/o/revoke_token/` with the token to revoke as parameter:
```bash
curl -X POST -d "token=rQONsve372fQwuc2pn76k3IHDCYpi7" \
-H "Content-Type: application/x-www-form-urlencoded" \
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
http://localhost:8013/api/o/revoke_token/ -i
```
`200 OK` means a successful delete.

View File

@@ -1,6 +1,6 @@
{% if content_only %}<div class="nocode ansi_fore ansi_back{% if dark %} ansi_dark{% endif %}">{% else %}
<!DOCTYPE HTML>
<html lang="en">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>{{ title }}</title>

View File

@@ -2,12 +2,6 @@
- hosts: all
become: yes
tasks:
- name: Create the receptor group
group:
{% verbatim %}
name: "{{ receptor_group }}"
{% endverbatim %}
state: present
- name: Create the receptor user
user:
{% verbatim %}

View File

@@ -1,4 +1,4 @@
---
collections:
- name: ansible.receptor
version: 2.0.6
version: 2.0.3

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import ActivityStreamList, ActivityStreamDetail
urls = [
re_path(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
AdHocCommandStdout,
)
urls = [
re_path(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'),
re_path(r'^(?P<pk>[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import AdHocCommandEventDetail
urls = [
re_path(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
]

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
import awx.api.views.analytics as analytics
urls = [
re_path(r'^$', analytics.AnalyticsRootView.as_view(), name='analytics_root_view'),
re_path(r'^authorized/$', analytics.AnalyticsAuthorizedView.as_view(), name='analytics_authorized'),

View File

@@ -16,6 +16,7 @@ from awx.api.views import (
CredentialExternalTest,
)
urls = [
re_path(r'^$', CredentialList.as_view(), name='credential_list'),
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList
urls = [
re_path(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'),
re_path(r'^(?P<pk>[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest
urls = [
re_path(r'^$', CredentialTypeList.as_view(), name='credential_type_list'),
re_path(r'^(?P<pk>[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'),

View File

@@ -8,6 +8,7 @@ from awx.api.views import (
ExecutionEnvironmentActivityStreamList,
)
urls = [
re_path(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'),

View File

@@ -18,6 +18,7 @@ from awx.api.views import (
GroupAdHocCommandsList,
)
urls = [
re_path(r'^$', GroupList.as_view(), name='group_list'),
re_path(r'^(?P<pk>[0-9]+)/$', GroupDetail.as_view(), name='group_detail'),

View File

@@ -18,6 +18,7 @@ from awx.api.views import (
HostAdHocCommandEventsList,
)
urls = [
re_path(r'^$', HostList.as_view(), name='host_list'),
re_path(r'^(?P<pk>[0-9]+)/$', HostDetail.as_view(), name='host_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
)
from awx.api.views.instance_install_bundle import InstanceInstallBundle
urls = [
re_path(r'^$', InstanceList.as_view(), name='instance_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),

View File

@@ -12,6 +12,7 @@ from awx.api.views import (
InstanceGroupObjectRolesList,
)
urls = [
re_path(r'^$', InstanceGroupList.as_view(), name='instance_group_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),

View File

@@ -29,6 +29,7 @@ from awx.api.views import (
InventoryVariableData,
)
urls = [
re_path(r'^$', InventoryList.as_view(), name='inventory_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'),

View File

@@ -18,6 +18,7 @@ from awx.api.views import (
InventorySourceNotificationTemplatesSuccessList,
)
urls = [
re_path(r'^$', InventorySourceList.as_view(), name='inventory_source_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'),

View File

@@ -15,6 +15,7 @@ from awx.api.views import (
InventoryUpdateCredentialsList,
)
urls = [
re_path(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'),
re_path(r'^(?P<pk>[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'),

View File

@@ -19,6 +19,7 @@ from awx.api.views import (
JobHostSummaryDetail,
)
urls = [
re_path(r'^$', JobList.as_view(), name='job_list'),
re_path(r'^(?P<pk>[0-9]+)/$', JobDetail.as_view(), name='job_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import JobHostSummaryDetail
urls = [re_path(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')]
__all__ = ['urls']

View File

@@ -23,6 +23,7 @@ from awx.api.views import (
JobTemplateCopy,
)
urls = [
re_path(r'^$', JobTemplateList.as_view(), name='job_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views.labels import LabelList, LabelDetail
urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
__all__ = ['urls']

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import NotificationList, NotificationDetail
urls = [
re_path(r'^$', NotificationList.as_view(), name='notification_list'),
re_path(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),

View File

@@ -11,6 +11,7 @@ from awx.api.views import (
NotificationTemplateCopy,
)
urls = [
re_path(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),

27
awx/api/urls/oauth2.py Normal file
View File

@@ -0,0 +1,27 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
from awx.api.views import (
OAuth2ApplicationList,
OAuth2ApplicationDetail,
ApplicationOAuth2TokenList,
OAuth2ApplicationActivityStreamList,
OAuth2TokenList,
OAuth2TokenDetail,
OAuth2TokenActivityStreamList,
)
urls = [
re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
re_path(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
re_path(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'),
re_path(r'^applications/(?P<pk>[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'),
re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
re_path(r'^tokens/(?P<pk>[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'),
re_path(r'^tokens/(?P<pk>[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'),
]
__all__ = ['urls']

View File

@@ -0,0 +1,45 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from datetime import timedelta
from django.utils.timezone import now
from django.conf import settings
from django.urls import re_path
from oauthlib import oauth2
from oauth2_provider import views
from awx.main.models import RefreshToken
from awx.api.views.root import ApiOAuthAuthorizationRootView
class TokenView(views.TokenView):
def create_token_response(self, request):
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
# properly expired (ugh):
#
# https://github.com/jazzband/django-oauth-toolkit/issues/746
#
# This code detects and auto-expires them on refresh grant
# requests.
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
refresh_token = RefreshToken.objects.filter(token=request.POST['refresh_token']).first()
if refresh_token:
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
if refresh_token.created + timedelta(seconds=expire_seconds) < now():
return request.build_absolute_uri(), {}, 'The refresh token has expired.', '403'
try:
return super(TokenView, self).create_token_response(request)
except oauth2.AccessDeniedError as e:
return request.build_absolute_uri(), {}, str(e), '403'
urls = [
re_path(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'),
re_path(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"),
re_path(r"^token/$", TokenView.as_view(), name="token"),
re_path(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"),
]
__all__ = ['urls']

View File

@@ -25,7 +25,8 @@ from awx.api.views.organization import (
OrganizationObjectRolesList,
OrganizationAccessList,
)
from awx.api.views import OrganizationCredentialList
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
urls = [
re_path(r'^$', OrganizationList.as_view(), name='organization_list'),
@@ -65,6 +66,7 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'),
re_path(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
re_path(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
re_path(r'^(?P<pk>[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'),
]
__all__ = ['urls']

View File

@@ -22,6 +22,7 @@ from awx.api.views import (
ProjectCopy,
)
urls = [
re_path(r'^$', ProjectList.as_view(), name='project_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),

View File

@@ -13,6 +13,7 @@ from awx.api.views import (
ProjectUpdateEventsList,
)
urls = [
re_path(r'^$', ProjectUpdateList.as_view(), name='project_update_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'),

View File

@@ -8,6 +8,7 @@ from awx.api.views import (
ReceptorAddressDetail,
)
urls = [
re_path(r'^$', ReceptorAddressesList.as_view(), name='receptor_addresses_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ReceptorAddressDetail.as_view(), name='receptor_address_detail'),

View File

@@ -3,13 +3,16 @@
from django.urls import re_path
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
urls = [
re_path(r'^$', RoleList.as_view(), name='role_list'),
re_path(r'^(?P<pk>[0-9]+)/$', RoleDetail.as_view(), name='role_detail'),
re_path(r'^(?P<pk>[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'),
re_path(r'^(?P<pk>[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'),
re_path(r'^(?P<pk>[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'),
re_path(r'^(?P<pk>[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'),
]
__all__ = ['urls']

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList, ScheduleLabelsList, ScheduleInstanceGroupList
urls = [
re_path(r'^$', ScheduleList.as_view(), name='schedule_list'),
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList
urls = [
re_path(r'^$', SystemJobList.as_view(), name='system_job_list'),
re_path(r'^(?P<pk>[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
SystemJobTemplateNotificationTemplatesSuccessList,
)
urls = [
re_path(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'),

View File

@@ -15,6 +15,7 @@ from awx.api.views import (
TeamAccessList,
)
urls = [
re_path(r'^$', TeamList.as_view(), name='team_list'),
re_path(r'^(?P<pk>[0-9]+)/$', TeamDetail.as_view(), name='team_detail'),

View File

@@ -4,6 +4,7 @@
from __future__ import absolute_import, unicode_literals
from django.urls import include, re_path
from awx import MODE
from awx.api.generics import LoggedLoginView, LoggedLogoutView
from awx.api.views.root import (
ApiRootView,
@@ -14,6 +15,7 @@ from awx.api.views.root import (
ApiV2AttachView,
)
from awx.api.views import (
AuthView,
UserMeList,
DashboardView,
DashboardJobsGraphView,
@@ -24,6 +26,10 @@ from awx.api.views import (
JobTemplateCredentialsList,
SchedulePreview,
ScheduleZoneInfo,
OAuth2ApplicationList,
OAuth2TokenList,
ApplicationOAuth2TokenList,
OAuth2ApplicationDetail,
HostMetricSummaryMonthlyList,
)
@@ -74,6 +80,8 @@ from .schedule import urls as schedule_urls
from .activity_stream import urls as activity_stream_urls
from .instance import urls as instance_urls
from .instance_group import urls as instance_group_urls
from .oauth2 import urls as oauth2_urls
from .oauth2_root import urls as oauth2_root_urls
from .workflow_approval_template import urls as workflow_approval_template_urls
from .workflow_approval import urls as workflow_approval_urls
from .analytics import urls as analytics_urls
@@ -88,11 +96,17 @@ v2_urls = [
re_path(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
re_path(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
re_path(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
re_path(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
re_path(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'),
re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
re_path(r'^', include(oauth2_urls)),
re_path(r'^metrics/$', MetricsView.as_view(), name='metrics_view'),
re_path(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
re_path(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
re_path(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'),
re_path(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'),
re_path(r'^auth/$', AuthView.as_view()),
re_path(r'^me/$', UserMeList.as_view(), name='user_me_list'),
re_path(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
re_path(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'),
@@ -147,15 +161,22 @@ v2_urls = [
app_name = 'api'
urlpatterns = [
re_path(r'^$', ApiRootView.as_view(), name='api_root_view'),
re_path(r'^(?P<version>(v2))/', include(v2_urls)),
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
# the docs/, schema-related endpoints used to be listed here but now exposed by DAB api_documentation app
re_path(r'^o/', include(oauth2_root_urls)),
]
if MODE == 'development':
# Only include these if we are in the development environment
from awx.api.swagger import schema_view
from awx.api.urls.debug import urls as debug_urls
from awx.api.urls.debug import urls as debug_urls
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
urlpatterns += [
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
]

View File

@@ -14,6 +14,10 @@ from awx.api.views import (
UserRolesList,
UserActivityStreamList,
UserAccessList,
OAuth2ApplicationList,
OAuth2UserTokenList,
UserPersonalTokenList,
UserAuthorizedTokenList,
)
urls = [
@@ -27,6 +31,10 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'),
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'),
re_path(r'^(?P<pk>[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'),
re_path(r'^(?P<pk>[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
re_path(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'),
re_path(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'),
re_path(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'),
]
__all__ = ['urls']

View File

@@ -2,6 +2,7 @@ from django.urls import re_path
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver, BitbucketDcWebhookReceiver
urlpatterns = [
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny
urls = [
re_path(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'),

View File

@@ -5,6 +5,7 @@ from django.urls import re_path
from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList
urls = [
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'),
re_path(r'^(?P<pk>[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
WorkflowJobActivityStreamList,
)
urls = [
re_path(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'),

View File

@@ -14,6 +14,7 @@ from awx.api.views import (
WorkflowJobNodeInstanceGroupsList,
)
urls = [
re_path(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'),

View File

@@ -22,6 +22,7 @@ from awx.api.views import (
WorkflowJobTemplateLabelList,
)
urls = [
re_path(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'),

View File

@@ -15,6 +15,7 @@ from awx.api.views import (
WorkflowJobTemplateNodeInstanceGroupsList,
)
urls = [
re_path(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'),
re_path(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'),

File diff suppressed because it is too large Load Diff

View File

@@ -10,13 +10,10 @@ from awx.api.generics import APIView, Response
from awx.api.permissions import AnalyticsPermission
from awx.api.versioning import reverse
from awx.main.utils import get_awx_version
from awx.main.utils.analytics_proxy import OIDCClient
from rest_framework import status
from collections import OrderedDict
from ansible_base.lib.utils.schema import extend_schema_if_available
AUTOMATION_ANALYTICS_API_URL_PATH = "/api/tower-analytics/v1"
AWX_ANALYTICS_API_PREFIX = 'analytics'
@@ -40,8 +37,6 @@ class MissingSettings(Exception):
class GetNotAllowedMixin(object):
skip_ai_description = True
def get(self, request, format=None):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@@ -50,9 +45,7 @@ class AnalyticsRootView(APIView):
permission_classes = (AnalyticsPermission,)
name = _('Automation Analytics')
swagger_topic = 'Automation Analytics'
resource_purpose = 'automation analytics endpoints'
@extend_schema_if_available(extensions={"x-ai-description": "A list of additional API endpoints related to analytics"})
def get(self, request, format=None):
data = OrderedDict()
data['authorized'] = reverse('api:analytics_authorized', request=request)
@@ -105,8 +98,6 @@ class AnalyticsGenericView(APIView):
return Response(response.json(), status=response.status_code)
"""
resource_purpose = 'base view for analytics api proxy'
permission_classes = (AnalyticsPermission,)
@staticmethod
@@ -188,59 +179,33 @@ class AnalyticsGenericView(APIView):
return Response(response.content, status=response.status_code)
@staticmethod
def _base_auth_request(request: requests.Request, method: str, url: str, user: str, pw: str, headers: dict[str, str]) -> requests.Response:
response = requests.request(
method,
url,
auth=(user, pw),
verify=settings.INSIGHTS_CERT_PATH,
params=getattr(request, 'query_params', {}),
headers=headers,
json=getattr(request, 'data', {}),
timeout=(31, 31),
)
return response
def _send_to_analytics(self, request, method):
try:
headers = self._request_headers(request)
self._get_setting('INSIGHTS_TRACKING_STATE', False, ERROR_UPLOAD_NOT_ENABLED)
url = self._get_analytics_url(request.path)
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
if method not in ["GET", "POST", "OPTIONS"]:
return self._error_response(ERROR_UNSUPPORTED_METHOD, method, remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
url = self._get_analytics_url(request.path)
using_subscriptions_credentials = False
try:
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
if not (rh_user and rh_password):
rh_user = self._get_setting('SUBSCRIPTIONS_CLIENT_ID', None, ERROR_MISSING_USER)
rh_password = self._get_setting('SUBSCRIPTIONS_CLIENT_SECRET', None, ERROR_MISSING_PASSWORD)
using_subscriptions_credentials = True
client = OIDCClient(rh_user, rh_password)
response = client.make_request(
else:
response = requests.request(
method,
url,
headers=headers,
auth=(rh_user, rh_password),
verify=settings.INSIGHTS_CERT_PATH,
params=getattr(request, 'query_params', {}),
json=getattr(request, 'data', {}),
params=request.query_params,
headers=headers,
json=request.data,
timeout=(31, 31),
)
except requests.RequestException:
# subscriptions credentials are not valid for basic auth, so just return 401
if using_subscriptions_credentials:
response = Response(status=status.HTTP_401_UNAUTHORIZED)
else:
logger.error("Automation Analytics API request failed, trying base auth method")
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
#
# Missing or wrong user/pass
#
if response.status_code == status.HTTP_401_UNAUTHORIZED:
text = response.get('text', '').rstrip("\n")
text = (response.text or '').rstrip("\n")
return self._error_response(ERROR_UNAUTHORIZED, text, remote=True, remote_status_code=response.status_code)
#
# Not found, No entitlement or No data in Analytics
@@ -265,91 +230,67 @@ class AnalyticsGenericView(APIView):
class AnalyticsGenericListView(AnalyticsGenericView):
resource_purpose = 'analytics api proxy list view'
@extend_schema_if_available(extensions={"x-ai-description": "Get analytics data from Red Hat Insights"})
def get(self, request, format=None):
return self._send_to_analytics(request, method="GET")
@extend_schema_if_available(extensions={"x-ai-description": "Post query to Red Hat Insights analytics"})
def post(self, request, format=None):
return self._send_to_analytics(request, method="POST")
@extend_schema_if_available(extensions={"x-ai-description": "Get analytics endpoint options"})
def options(self, request, format=None):
return self._send_to_analytics(request, method="OPTIONS")
class AnalyticsGenericDetailView(AnalyticsGenericView):
resource_purpose = 'analytics api proxy detail view'
@extend_schema_if_available(extensions={"x-ai-description": "Get specific analytics resource from Red Hat Insights"})
def get(self, request, slug, format=None):
return self._send_to_analytics(request, method="GET")
@extend_schema_if_available(extensions={"x-ai-description": "Post query for specific analytics resource to Red Hat Insights"})
def post(self, request, slug, format=None):
return self._send_to_analytics(request, method="POST")
@extend_schema_if_available(extensions={"x-ai-description": "Get options for specific analytics resource"})
def options(self, request, slug, format=None):
return self._send_to_analytics(request, method="OPTIONS")
@extend_schema_if_available(
extensions={'x-ai-description': 'Check if the user has access to Red Hat Insights'},
)
class AnalyticsAuthorizedView(AnalyticsGenericListView):
name = _("Authorized")
resource_purpose = 'red hat insights authorization status'
class AnalyticsReportsList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Reports")
swagger_topic = "Automation Analytics"
resource_purpose = 'automation analytics reports'
class AnalyticsReportDetail(AnalyticsGenericDetailView):
name = _("Report")
resource_purpose = 'automation analytics report detail'
class AnalyticsReportOptionsList(AnalyticsGenericListView):
name = _("Report Options")
resource_purpose = 'automation analytics report options'
class AnalyticsAdoptionRateList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Adoption Rate")
resource_purpose = 'automation analytics adoption rate data'
class AnalyticsEventExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Event Explorer")
resource_purpose = 'automation analytics event explorer data'
class AnalyticsHostExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Host Explorer")
resource_purpose = 'automation analytics host explorer data'
class AnalyticsJobExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Job Explorer")
resource_purpose = 'automation analytics job explorer data'
class AnalyticsProbeTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Probe Templates")
resource_purpose = 'automation analytics probe templates'
class AnalyticsProbeTemplateForHostsList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("Probe Template For Hosts")
resource_purpose = 'automation analytics probe templates for hosts'
class AnalyticsRoiTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
name = _("ROI Templates")
resource_purpose = 'automation analytics roi templates'

View File

@@ -1,7 +1,5 @@
from collections import OrderedDict
from ansible_base.lib.utils.schema import extend_schema_if_available
from django.utils.translation import gettext_lazy as _
from rest_framework.permissions import IsAuthenticated
@@ -32,7 +30,6 @@ class BulkView(APIView):
]
allowed_methods = ['GET', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Retrieves a list of available bulk actions"})
def get(self, request, format=None):
'''List top level resources'''
data = OrderedDict()
@@ -48,13 +45,11 @@ class BulkJobLaunchView(GenericAPIView):
serializer_class = serializers.BulkJobLaunchSerializer
allowed_methods = ['GET', 'POST', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk job launch endpoint"})
def get(self, request):
data = OrderedDict()
data['detail'] = "Specify a list of unified job templates to launch alongside their launchtime parameters"
return Response(data, status=status.HTTP_200_OK)
@extend_schema_if_available(extensions={"x-ai-description": "Bulk launch job templates"})
def post(self, request):
bulkjob_serializer = serializers.BulkJobLaunchSerializer(data=request.data, context={'request': request})
if bulkjob_serializer.is_valid():
@@ -69,11 +64,9 @@ class BulkHostCreateView(GenericAPIView):
serializer_class = serializers.BulkHostCreateSerializer
allowed_methods = ['GET', 'POST', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk host create endpoint"})
def get(self, request):
return Response({"detail": "Bulk create hosts with this endpoint"}, status=status.HTTP_200_OK)
@extend_schema_if_available(extensions={"x-ai-description": "Bulk create hosts"})
def post(self, request):
serializer = serializers.BulkHostCreateSerializer(data=request.data, context={'request': request})
if serializer.is_valid():
@@ -88,11 +81,9 @@ class BulkHostDeleteView(GenericAPIView):
serializer_class = serializers.BulkHostDeleteSerializer
allowed_methods = ['GET', 'POST', 'OPTIONS']
@extend_schema_if_available(extensions={"x-ai-description": "Get information about bulk host delete endpoint"})
def get(self, request):
return Response({"detail": "Bulk delete hosts with this endpoint"}, status=status.HTTP_200_OK)
@extend_schema_if_available(extensions={"x-ai-description": "Bulk delete hosts"})
def post(self, request):
serializer = serializers.BulkHostDeleteSerializer(data=request.data, context={'request': request})
if serializer.is_valid():

View File

@@ -5,7 +5,6 @@ from django.conf import settings
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from awx.api.generics import APIView
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx.main.scheduler import TaskManager, DependencyManager, WorkflowManager
@@ -15,9 +14,7 @@ class TaskManagerDebugView(APIView):
exclude_from_schema = True
permission_classes = [AllowAny]
prefix = 'Task'
resource_purpose = 'debug task manager'
@extend_schema_if_available(extensions={"x-ai-description": "Trigger task manager scheduling"})
def get(self, request):
TaskManager().schedule()
if not settings.AWX_DISABLE_TASK_MANAGERS:
@@ -32,9 +29,7 @@ class DependencyManagerDebugView(APIView):
exclude_from_schema = True
permission_classes = [AllowAny]
prefix = 'Dependency'
resource_purpose = 'debug dependency manager'
@extend_schema_if_available(extensions={"x-ai-description": "Trigger dependency manager scheduling"})
def get(self, request):
DependencyManager().schedule()
if not settings.AWX_DISABLE_TASK_MANAGERS:
@@ -49,9 +44,7 @@ class WorkflowManagerDebugView(APIView):
exclude_from_schema = True
permission_classes = [AllowAny]
prefix = 'Workflow'
resource_purpose = 'debug workflow manager'
@extend_schema_if_available(extensions={"x-ai-description": "Trigger workflow manager scheduling"})
def get(self, request):
WorkflowManager().schedule()
if not settings.AWX_DISABLE_TASK_MANAGERS:
@@ -65,9 +58,7 @@ class DebugRootView(APIView):
_ignore_model_permissions = True
exclude_from_schema = True
permission_classes = [AllowAny]
resource_purpose = 'debug endpoints root'
@extend_schema_if_available(extensions={"x-ai-description": "List available debug endpoints"})
def get(self, request, format=None):
'''List of available debug urls'''
data = OrderedDict()

View File

@@ -10,10 +10,9 @@ import time
import re
import asn1
from ansible_base.lib.utils.schema import extend_schema_if_available
from awx.api import serializers
from awx.api.generics import GenericAPIView, Response
from awx.api.permissions import IsSystemAdmin
from awx.api.permissions import IsSystemAdminOrAuditor
from awx.main import models
from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization
@@ -49,10 +48,8 @@ class InstanceInstallBundle(GenericAPIView):
name = _('Install Bundle')
model = models.Instance
serializer_class = serializers.InstanceSerializer
permission_classes = (IsSystemAdmin,)
resource_purpose = 'install bundle'
permission_classes = (IsSystemAdminOrAuditor,)
@extend_schema_if_available(extensions={"x-ai-description": "Generate and download install bundle for an instance"})
def get(self, request, *args, **kwargs):
instance_obj = self.get_object()
@@ -198,8 +195,8 @@ def generate_receptor_tls(instance_obj):
.issuer_name(ca_cert.issuer)
.public_key(csr.public_key())
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.datetime.now(datetime.UTC))
.not_valid_after(datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=3650))
.not_valid_before(datetime.datetime.utcnow())
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
.add_extension(
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,

Some files were not shown because too many files have changed in this diff Show More