mirror of
https://github.com/ansible/awx.git
synced 2026-02-08 21:14:47 -03:30
Compare commits
9 Commits
feature_ui
...
test_cyber
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c437a37be7 | ||
|
|
b59cee97d8 | ||
|
|
9ca554ce75 | ||
|
|
81e20c727d | ||
|
|
f3482f4038 | ||
|
|
546fabbb97 | ||
|
|
68862d5085 | ||
|
|
66c7d5e9be | ||
|
|
4a7335676d |
3
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
3
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -19,8 +19,6 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
|
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
|
||||||
required: true
|
required: true
|
||||||
- label: I am **NOT** reporting a (potential) security vulnerability. (These should be emailed to `security@ansible.com` instead.)
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: summary
|
id: summary
|
||||||
@@ -44,7 +42,6 @@ body:
|
|||||||
label: Select the relevant components
|
label: Select the relevant components
|
||||||
options:
|
options:
|
||||||
- label: UI
|
- label: UI
|
||||||
- label: UI (tech preview)
|
|
||||||
- label: API
|
- label: API
|
||||||
- label: Docs
|
- label: Docs
|
||||||
- label: Collection
|
- label: Collection
|
||||||
|
|||||||
34
.github/actions/awx_devel_image/action.yml
vendored
34
.github/actions/awx_devel_image/action.yml
vendored
@@ -1,34 +0,0 @@
|
|||||||
name: Setup images for AWX
|
|
||||||
description: Builds new awx_devel image
|
|
||||||
inputs:
|
|
||||||
github-token:
|
|
||||||
description: GitHub Token for registry access
|
|
||||||
required: true
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Get python version from Makefile
|
|
||||||
shell: bash
|
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Set lower case owner name
|
|
||||||
shell: bash
|
|
||||||
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
|
||||||
env:
|
|
||||||
OWNER: '${{ github.repository_owner }}'
|
|
||||||
|
|
||||||
- name: Log in to registry
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
|
||||||
|
|
||||||
- name: Pre-pull latest devel image to warm cache
|
|
||||||
shell: bash
|
|
||||||
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
|
||||||
|
|
||||||
- name: Build image for current source checkout
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
|
||||||
COMPOSE_TAG=${{ github.base_ref }} \
|
|
||||||
make docker-compose-build
|
|
||||||
77
.github/actions/run_awx_devel/action.yml
vendored
77
.github/actions/run_awx_devel/action.yml
vendored
@@ -1,77 +0,0 @@
|
|||||||
name: Run AWX docker-compose
|
|
||||||
description: Runs AWX with `make docker-compose`
|
|
||||||
inputs:
|
|
||||||
github-token:
|
|
||||||
description: GitHub Token to pass to awx_devel_image
|
|
||||||
required: true
|
|
||||||
build-ui:
|
|
||||||
description: Should the UI be built?
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
outputs:
|
|
||||||
ip:
|
|
||||||
description: The IP of the tools_awx_1 container
|
|
||||||
value: ${{ steps.data.outputs.ip }}
|
|
||||||
admin-token:
|
|
||||||
description: OAuth token for admin user
|
|
||||||
value: ${{ steps.data.outputs.admin_token }}
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Build awx_devel image for running checks
|
|
||||||
uses: ./.github/actions/awx_devel_image
|
|
||||||
with:
|
|
||||||
github-token: ${{ inputs.github-token }}
|
|
||||||
|
|
||||||
- name: Upgrade ansible-core
|
|
||||||
shell: bash
|
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
|
||||||
|
|
||||||
- name: Install system deps
|
|
||||||
shell: bash
|
|
||||||
run: sudo apt-get install -y gettext
|
|
||||||
|
|
||||||
- name: Start AWX
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
|
|
||||||
COMPOSE_TAG=${{ github.base_ref }} \
|
|
||||||
COMPOSE_UP_OPTS="-d" \
|
|
||||||
make docker-compose
|
|
||||||
|
|
||||||
- name: Update default AWX password
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
SECONDS=0
|
|
||||||
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]; do
|
|
||||||
if [[ $SECONDS -gt 600 ]]; then
|
|
||||||
echo "Timing out, AWX never came up"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Waiting for AWX..."
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
echo "AWX is up, updating the password..."
|
|
||||||
docker exec -i tools_awx_1 sh <<-EOSH
|
|
||||||
awx-manage update_password --username=admin --password=password
|
|
||||||
EOSH
|
|
||||||
|
|
||||||
- name: Build UI
|
|
||||||
# This must be a string comparison in composite actions:
|
|
||||||
# https://github.com/actions/runner/issues/2238
|
|
||||||
if: ${{ inputs.build-ui == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
docker exec -i tools_awx_1 sh <<-EOSH
|
|
||||||
make ui-devel
|
|
||||||
EOSH
|
|
||||||
|
|
||||||
- name: Get instance data
|
|
||||||
id: data
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
|
|
||||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
|
||||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
|
||||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
|
||||||
19
.github/actions/upload_awx_devel_logs/action.yml
vendored
19
.github/actions/upload_awx_devel_logs/action.yml
vendored
@@ -1,19 +0,0 @@
|
|||||||
name: Upload logs
|
|
||||||
description: Upload logs from `make docker-compose` devel environment to GitHub as an artifact
|
|
||||||
inputs:
|
|
||||||
log-filename:
|
|
||||||
description: "*Unique* name of the log file"
|
|
||||||
required: true
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Get AWX logs
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
docker logs tools_awx_1 > ${{ inputs.log-filename }}
|
|
||||||
|
|
||||||
- name: Upload AWX logs as artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: docker-compose-logs
|
|
||||||
path: ${{ inputs.log-filename }}
|
|
||||||
19
.github/dependabot.yml
vendored
19
.github/dependabot.yml
vendored
@@ -1,10 +1,19 @@
|
|||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "npm"
|
||||||
directory: "docs/docsite/"
|
directory: "/awx/ui"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "monthly"
|
||||||
open-pull-requests-limit: 2
|
open-pull-requests-limit: 5
|
||||||
|
allow:
|
||||||
|
- dependency-type: "production"
|
||||||
|
reviewers:
|
||||||
|
- "AlexSCorey"
|
||||||
|
- "keithjgrant"
|
||||||
|
- "kialam"
|
||||||
|
- "mabashian"
|
||||||
|
- "marshmalien"
|
||||||
labels:
|
labels:
|
||||||
- "docs"
|
- "component:ui"
|
||||||
- "dependencies"
|
- "dependencies"
|
||||||
|
target-branch: "devel"
|
||||||
|
|||||||
2
.github/issue_labeler.yml
vendored
2
.github/issue_labeler.yml
vendored
@@ -6,8 +6,6 @@ needs_triage:
|
|||||||
- "Feature Summary"
|
- "Feature Summary"
|
||||||
"component:ui":
|
"component:ui":
|
||||||
- "\\[X\\] UI"
|
- "\\[X\\] UI"
|
||||||
"component:ui_next":
|
|
||||||
- "\\[X\\] UI \\(tech preview\\)"
|
|
||||||
"component:api":
|
"component:api":
|
||||||
- "\\[X\\] API"
|
- "\\[X\\] API"
|
||||||
"component:docs":
|
"component:docs":
|
||||||
|
|||||||
3
.github/pr_labeler.yml
vendored
3
.github/pr_labeler.yml
vendored
@@ -15,4 +15,5 @@
|
|||||||
|
|
||||||
"dependencies":
|
"dependencies":
|
||||||
- any: ["awx/ui/package.json"]
|
- any: ["awx/ui/package.json"]
|
||||||
- any: ["requirements/*"]
|
- any: ["awx/requirements/*.txt"]
|
||||||
|
- any: ["awx/requirements/requirements.in"]
|
||||||
|
|||||||
21
.github/triage_replies.md
vendored
21
.github/triage_replies.md
vendored
@@ -7,8 +7,8 @@
|
|||||||
|
|
||||||
## PRs/Issues
|
## PRs/Issues
|
||||||
|
|
||||||
### Visit the Forum or Matrix
|
### Visit our mailing list
|
||||||
- Hello, this appears to be less of a bug report or feature request and more of a question. Could you please ask this on either the [Ansible AWX channel on Matrix](https://matrix.to/#/#awx:ansible.com) or the [Ansible Community Forum](https://forum.ansible.com/tag/awx)?
|
- Hello, this appears to be less of a bug report or feature request and more of a question. Could you please ask this on our mailing list? See https://github.com/ansible/awx/#get-involved for information for ways to connect with us.
|
||||||
|
|
||||||
### Denied Submission
|
### Denied Submission
|
||||||
|
|
||||||
@@ -53,16 +53,6 @@ https://github.com/ansible/awx/#get-involved \
|
|||||||
Thank you once again for this and your interest in AWX!
|
Thank you once again for this and your interest in AWX!
|
||||||
|
|
||||||
|
|
||||||
### Red Hat Support Team
|
|
||||||
- Hi! \
|
|
||||||
\
|
|
||||||
It appears that you are using an RPM build for RHEL. Please reach out to the Red Hat support team and submit a ticket. \
|
|
||||||
\
|
|
||||||
Here is the link to do so: \
|
|
||||||
\
|
|
||||||
https://access.redhat.com/support \
|
|
||||||
\
|
|
||||||
Thank you for your submission and for supporting AWX!
|
|
||||||
|
|
||||||
|
|
||||||
## Common
|
## Common
|
||||||
@@ -106,13 +96,6 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
|||||||
### Oracle AWX
|
### Oracle AWX
|
||||||
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
||||||
|
|
||||||
### Community Resolved
|
|
||||||
Hi,
|
|
||||||
|
|
||||||
We are happy to see that it appears a fix has been provided for your issue, so we will go ahead and close this ticket. Please feel free to reopen if any other problems arise.
|
|
||||||
|
|
||||||
<name of community member who helped> thanks so much for taking the time to write a thoughtful and helpful response to this issue!
|
|
||||||
|
|
||||||
### AWX Release
|
### AWX Release
|
||||||
Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb
|
Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb
|
||||||
|
|
||||||
|
|||||||
258
.github/workflows/ci.yml
vendored
258
.github/workflows/ci.yml
vendored
@@ -1,17 +1,13 @@
|
|||||||
---
|
---
|
||||||
name: CI
|
name: CI
|
||||||
env:
|
env:
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
BRANCH: ${{ github.base_ref || 'devel' }}
|
||||||
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
|
|
||||||
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
jobs:
|
jobs:
|
||||||
common-tests:
|
common-tests:
|
||||||
name: ${{ matrix.tests.name }}
|
name: ${{ matrix.tests.name }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
@@ -21,61 +17,96 @@ jobs:
|
|||||||
tests:
|
tests:
|
||||||
- name: api-test
|
- name: api-test
|
||||||
command: /start_tests.sh
|
command: /start_tests.sh
|
||||||
- name: api-migrations
|
label: Run API Tests
|
||||||
command: /start_tests.sh test_migrations
|
|
||||||
- name: api-lint
|
- name: api-lint
|
||||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||||
|
label: Run API Linters
|
||||||
- name: api-swagger
|
- name: api-swagger
|
||||||
command: /start_tests.sh swagger
|
command: /start_tests.sh swagger
|
||||||
|
label: Generate API Reference
|
||||||
- name: awx-collection
|
- name: awx-collection
|
||||||
command: /start_tests.sh test_collection_all
|
command: /start_tests.sh test_collection_all
|
||||||
|
label: Run Collection Tests
|
||||||
- name: api-schema
|
- name: api-schema
|
||||||
|
label: Check API Schema
|
||||||
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||||
- name: ui-lint
|
- name: ui-lint
|
||||||
|
label: Run UI Linters
|
||||||
command: make ui-lint
|
command: make ui-lint
|
||||||
- name: ui-test-screens
|
- name: ui-test-screens
|
||||||
|
label: Run UI Screens Tests
|
||||||
command: make ui-test-screens
|
command: make ui-test-screens
|
||||||
- name: ui-test-general
|
- name: ui-test-general
|
||||||
|
label: Run UI General Tests
|
||||||
command: make ui-test-general
|
command: make ui-test-general
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Build awx_devel image for running checks
|
- name: Get python version from Makefile
|
||||||
uses: ./.github/actions/awx_devel_image
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
- name: Run check ${{ matrix.tests.name }}
|
- name: Log in to registry
|
||||||
run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make docker-runner
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
|
- name: ${{ matrix.texts.label }}
|
||||||
|
run: |
|
||||||
|
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
|
||||||
|
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }}
|
||||||
dev-env:
|
dev-env:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- name: Get python version from Makefile
|
||||||
id: awx
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
build-ui: false
|
python-version: ${{ env.py_version }}
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
|
||||||
|
|
||||||
- name: Run smoke test
|
- name: Run smoke test
|
||||||
run: ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
|
run: |
|
||||||
|
export DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }}
|
||||||
|
export COMPOSE_TAG=${{ env.BRANCH }}
|
||||||
|
ansible-playbook tools/docker-compose/ansible/smoke-test.yml -e repo_dir=$(pwd) -v
|
||||||
|
|
||||||
awx-operator:
|
awx-operator:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
|
||||||
env:
|
|
||||||
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
- name: Checkout awx-operator
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
repository: ansible/awx-operator
|
repository: ansible/awx-operator
|
||||||
path: awx-operator
|
path: awx-operator
|
||||||
@@ -85,7 +116,7 @@ jobs:
|
|||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
- name: Install python ${{ env.py_version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
@@ -96,11 +127,11 @@ jobs:
|
|||||||
- name: Build AWX image
|
- name: Build AWX image
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
run: |
|
run: |
|
||||||
VERSION=`make version-for-buildyml` make awx-kube-build
|
ansible-playbook -v tools/ansible/build.yml \
|
||||||
env:
|
-e headless=yes \
|
||||||
COMPOSE_TAG: ci
|
-e awx_image=awx \
|
||||||
DEV_DOCKER_TAG_BASE: local
|
-e awx_image_tag=ci \
|
||||||
HEADLESS: yes
|
-e ansible_python_interpreter=$(which python3)
|
||||||
|
|
||||||
- name: Run test deployment with awx-operator
|
- name: Run test deployment with awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
@@ -109,168 +140,7 @@ jobs:
|
|||||||
ansible-galaxy collection install -r molecule/requirements.yml
|
ansible-galaxy collection install -r molecule/requirements.yml
|
||||||
sudo rm -f $(which kustomize)
|
sudo rm -f $(which kustomize)
|
||||||
make kustomize
|
make kustomize
|
||||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: local/awx
|
AWX_TEST_IMAGE: awx
|
||||||
AWX_TEST_VERSION: ci
|
AWX_TEST_VERSION: ci
|
||||||
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
|
|
||||||
STORE_DEBUG_OUTPUT: true
|
|
||||||
|
|
||||||
- name: Upload debug output
|
|
||||||
if: failure()
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: awx-operator-debug-output
|
|
||||||
path: ${{ env.DEBUG_OUTPUT_DIR }}
|
|
||||||
|
|
||||||
collection-sanity:
|
|
||||||
name: awx_collection sanity
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
|
||||||
- name: Upgrade ansible-core
|
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
|
||||||
|
|
||||||
- name: Run sanity tests
|
|
||||||
run: make test_collection_sanity
|
|
||||||
|
|
||||||
collection-integration:
|
|
||||||
name: awx_collection integration
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 60
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
target-regex:
|
|
||||||
- name: a-h
|
|
||||||
regex: ^[a-h]
|
|
||||||
- name: i-p
|
|
||||||
regex: ^[i-p]
|
|
||||||
- name: r-z0-9
|
|
||||||
regex: ^[r-z0-9]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
|
||||||
id: awx
|
|
||||||
with:
|
|
||||||
build-ui: false
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Install dependencies for running tests
|
|
||||||
run: |
|
|
||||||
python3 -m pip install -e ./awxkit/
|
|
||||||
python3 -m pip install -r awx_collection/requirements.txt
|
|
||||||
|
|
||||||
- name: Run integration tests
|
|
||||||
run: |
|
|
||||||
echo "::remove-matcher owner=python::" # Disable annoying annotations from setup-python
|
|
||||||
echo '[general]' > ~/.tower_cli.cfg
|
|
||||||
echo 'host = https://${{ steps.awx.outputs.ip }}:8043' >> ~/.tower_cli.cfg
|
|
||||||
echo 'oauth_token = ${{ steps.awx.outputs.admin-token }}' >> ~/.tower_cli.cfg
|
|
||||||
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
|
|
||||||
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
|
|
||||||
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--coverage --requirements $TARGETS" test_collection_integration
|
|
||||||
env:
|
|
||||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
|
||||||
|
|
||||||
# Upload coverage report as artifact
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
name: coverage-${{ matrix.target-regex.name }}
|
|
||||||
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
|
||||||
|
|
||||||
- uses: ./.github/actions/upload_awx_devel_logs
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
log-filename: collection-integration-${{ matrix.target-regex.name }}.log
|
|
||||||
|
|
||||||
collection-integration-coverage-combine:
|
|
||||||
name: combine awx_collection integration coverage
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 10
|
|
||||||
needs:
|
|
||||||
- collection-integration
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Upgrade ansible-core
|
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
|
||||||
|
|
||||||
- name: Download coverage artifacts
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
path: coverage
|
|
||||||
|
|
||||||
- name: Combine coverage
|
|
||||||
run: |
|
|
||||||
make COLLECTION_VERSION=100.100.100-git install_collection
|
|
||||||
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
|
|
||||||
cd coverage
|
|
||||||
for i in coverage-*; do
|
|
||||||
cp -rv $i/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
|
||||||
done
|
|
||||||
cd ~/.ansible/collections/ansible_collections/awx/awx
|
|
||||||
ansible-test coverage combine --requirements
|
|
||||||
ansible-test coverage html
|
|
||||||
echo '## AWX Collection Integration Coverage' >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
|
||||||
ansible-test coverage report >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo 'Download the HTML artifacts to view the coverage report.' >> $GITHUB_STEP_SUMMARY
|
|
||||||
|
|
||||||
# This is a huge hack, there's no official action for removing artifacts currently.
|
|
||||||
# Also ACTIONS_RUNTIME_URL and ACTIONS_RUNTIME_TOKEN aren't available in normal run
|
|
||||||
# steps, so we have to use github-script to get them.
|
|
||||||
#
|
|
||||||
# The advantage of doing this, though, is that we save on artifact storage space.
|
|
||||||
|
|
||||||
- name: Get secret artifact runtime URL
|
|
||||||
uses: actions/github-script@v6
|
|
||||||
id: get-runtime-url
|
|
||||||
with:
|
|
||||||
result-encoding: string
|
|
||||||
script: |
|
|
||||||
const { ACTIONS_RUNTIME_URL } = process.env;
|
|
||||||
return ACTIONS_RUNTIME_URL;
|
|
||||||
|
|
||||||
- name: Get secret artifact runtime token
|
|
||||||
uses: actions/github-script@v6
|
|
||||||
id: get-runtime-token
|
|
||||||
with:
|
|
||||||
result-encoding: string
|
|
||||||
script: |
|
|
||||||
const { ACTIONS_RUNTIME_TOKEN } = process.env;
|
|
||||||
return ACTIONS_RUNTIME_TOKEN;
|
|
||||||
|
|
||||||
- name: Remove intermediary artifacts
|
|
||||||
env:
|
|
||||||
ACTIONS_RUNTIME_URL: ${{ steps.get-runtime-url.outputs.result }}
|
|
||||||
ACTIONS_RUNTIME_TOKEN: ${{ steps.get-runtime-token.outputs.result }}
|
|
||||||
run: |
|
|
||||||
echo "::add-mask::${ACTIONS_RUNTIME_TOKEN}"
|
|
||||||
artifacts=$(
|
|
||||||
curl -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
|
||||||
${ACTIONS_RUNTIME_URL}_apis/pipelines/workflows/${{ github.run_id }}/artifacts?api-version=6.0-preview \
|
|
||||||
| jq -r '.value | .[] | select(.name | startswith("coverage-")) | .url'
|
|
||||||
)
|
|
||||||
|
|
||||||
for artifact in $artifacts; do
|
|
||||||
curl -i -X DELETE -H "Accept: application/json;api-version=6.0-preview" -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" "$artifact"
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Upload coverage report as artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: awx-collection-integration-coverage-html
|
|
||||||
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/reports/coverage
|
|
||||||
|
|||||||
48
.github/workflows/dab-release.yml
vendored
48
.github/workflows/dab-release.yml
vendored
@@ -1,48 +0,0 @@
|
|||||||
---
|
|
||||||
name: django-ansible-base requirements update
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 6 * * *' # once an day @ 6 AM
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
contents: write
|
|
||||||
jobs:
|
|
||||||
dab-pin-newest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- id: dab-release
|
|
||||||
name: Get current django-ansible-base release version
|
|
||||||
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
|
||||||
with:
|
|
||||||
owner: ansible
|
|
||||||
repo: django-ansible-base
|
|
||||||
excludes: prerelease, draft
|
|
||||||
|
|
||||||
- name: Check out respository code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- id: dab-pinned
|
|
||||||
name: Get current django-ansible-base pinned version
|
|
||||||
run:
|
|
||||||
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Update django-ansible-base pinned version to upstream release
|
|
||||||
run:
|
|
||||||
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
|
||||||
|
|
||||||
- name: Create Pull Request
|
|
||||||
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
|
||||||
with:
|
|
||||||
base: devel
|
|
||||||
branch: bump-django-ansible-base
|
|
||||||
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
|
||||||
body: |
|
|
||||||
Automated .github/workflows/dab-release.yml
|
|
||||||
|
|
||||||
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
|
||||||
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
|
||||||
commit-message: |
|
|
||||||
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
|
||||||
add-paths:
|
|
||||||
requirements/requirements_git.txt
|
|
||||||
75
.github/workflows/devel_images.yml
vendored
75
.github/workflows/devel_images.yml
vendored
@@ -1,58 +1,25 @@
|
|||||||
---
|
---
|
||||||
name: Build/Push Development Images
|
name: Build/Push Development Images
|
||||||
env:
|
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
|
||||||
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
- release_*
|
- release_*
|
||||||
- feature_*
|
|
||||||
jobs:
|
jobs:
|
||||||
push-development-images:
|
push:
|
||||||
|
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 120
|
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
build-targets:
|
|
||||||
- image-name: awx_devel
|
|
||||||
make-target: docker-compose-buildx
|
|
||||||
- image-name: awx_kube_devel
|
|
||||||
make-target: awx-kube-dev-buildx
|
|
||||||
- image-name: awx
|
|
||||||
make-target: awx-kube-buildx
|
|
||||||
steps:
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Skipping build of awx image for non-awx repository
|
- name: Get python version from Makefile
|
||||||
run: |
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
echo "Skipping build of awx image for non-awx repository"
|
|
||||||
exit 0
|
|
||||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Set GITHUB_ENV variables
|
|
||||||
run: |
|
|
||||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
|
||||||
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
|
||||||
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
|
||||||
env:
|
|
||||||
OWNER: '${{ github.repository_owner }}'
|
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
- name: Install python ${{ env.py_version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
@@ -60,29 +27,17 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
- name: Setup node and npm for old UI build
|
- name: Pre-pull image to warm build cache
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: '16'
|
|
||||||
if: matrix.build-targets.image-name == 'awx'
|
|
||||||
|
|
||||||
- name: Prebuild old-UI for awx image (to speed up build process)
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install gettext
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||||
make ui-release
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||||
if: matrix.build-targets.image-name == 'awx'
|
|
||||||
|
|
||||||
- name: Setup node and npm for the new UI build
|
- name: Build images
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: '18'
|
|
||||||
if: matrix.build-targets.image-name == 'awx'
|
|
||||||
|
|
||||||
- name: Prebuild new UI for awx image (to speed up build process)
|
|
||||||
run: |
|
run: |
|
||||||
make ui-next
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||||
if: matrix.build-targets.image-name == 'awx'
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||||
|
|
||||||
- name: Build and push AWX devel images
|
- name: Push image
|
||||||
run: |
|
run: |
|
||||||
make ${{ matrix.build-targets.make-target }}
|
docker push ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||||
|
docker push ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/}
|
||||||
|
|||||||
17
.github/workflows/docs.yml
vendored
17
.github/workflows/docs.yml
vendored
@@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
name: Docsite CI
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
jobs:
|
|
||||||
docsite-build:
|
|
||||||
name: docsite test build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: install tox
|
|
||||||
run: pip install tox
|
|
||||||
|
|
||||||
- name: Assure docs can be built
|
|
||||||
run: tox -e docs
|
|
||||||
108
.github/workflows/e2e_test.yml
vendored
Normal file
108
.github/workflows/e2e_test.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
---
|
||||||
|
name: E2E Tests
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
jobs:
|
||||||
|
e2e-test:
|
||||||
|
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 40
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
contents: read
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
|
- name: Install system deps
|
||||||
|
run: sudo apt-get install -y gettext
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Pre-pull image to warm build cache
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
||||||
|
|
||||||
|
- name: Build UI
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make ui-devel
|
||||||
|
|
||||||
|
- name: Start AWX
|
||||||
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ github.base_ref }} make docker-compose &> make-docker-compose-output.log &
|
||||||
|
|
||||||
|
- name: Pull awx_cypress_base image
|
||||||
|
run: |
|
||||||
|
docker pull quay.io/awx/awx_cypress_base:latest
|
||||||
|
|
||||||
|
- name: Checkout test project
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: ${{ github.repository_owner }}/tower-qa
|
||||||
|
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
||||||
|
path: tower-qa
|
||||||
|
ref: devel
|
||||||
|
|
||||||
|
- name: Build cypress
|
||||||
|
run: |
|
||||||
|
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||||
|
docker build -t awx-pf-tests .
|
||||||
|
|
||||||
|
- name: Update default AWX password
|
||||||
|
run: |
|
||||||
|
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]
|
||||||
|
do
|
||||||
|
echo "Waiting for AWX..."
|
||||||
|
sleep 5;
|
||||||
|
done
|
||||||
|
echo "AWX is up, updating the password..."
|
||||||
|
docker exec -i tools_awx_1 sh <<-EOSH
|
||||||
|
awx-manage update_password --username=admin --password=password
|
||||||
|
EOSH
|
||||||
|
|
||||||
|
- name: Run E2E tests
|
||||||
|
env:
|
||||||
|
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||||
|
run: |
|
||||||
|
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
||||||
|
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
||||||
|
export COMMIT_INFO_SHA=$GITHUB_SHA
|
||||||
|
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
||||||
|
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||||
|
AWX_IP=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tools_awx_1)
|
||||||
|
printenv > .env
|
||||||
|
echo "Executing tests:"
|
||||||
|
docker run \
|
||||||
|
--network '_sources_default' \
|
||||||
|
--ipc=host \
|
||||||
|
--env-file=.env \
|
||||||
|
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
||||||
|
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
||||||
|
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
||||||
|
-e COMMAND="npm run cypress-concurrently-gha" \
|
||||||
|
-v /dev/shm:/dev/shm \
|
||||||
|
-v $PWD:/e2e \
|
||||||
|
-w /e2e \
|
||||||
|
awx-pf-tests run --project .
|
||||||
|
|
||||||
|
- name: Save AWX logs
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: AWX-logs-${{ matrix.job }}
|
||||||
|
path: make-docker-compose-output.log
|
||||||
|
|
||||||
|
|
||||||
15
.github/workflows/feature_branch_deletion.yml
vendored
15
.github/workflows/feature_branch_deletion.yml
vendored
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
name: Feature branch deletion cleanup
|
name: Feature branch deletion cleanup
|
||||||
env:
|
on:
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
delete:
|
||||||
on: delete
|
branches:
|
||||||
|
- feature_**
|
||||||
jobs:
|
jobs:
|
||||||
branch_delete:
|
push:
|
||||||
if: ${{ github.event.ref_type == 'branch' && startsWith(github.event.ref, 'feature_') }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
@@ -20,4 +19,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
ansible localhost -c local -m aws_s3 \
|
ansible localhost -c local -m aws_s3 \
|
||||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"
|
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
10
.github/workflows/label_issue.yml
vendored
10
.github/workflows/label_issue.yml
vendored
@@ -6,19 +6,14 @@ on:
|
|||||||
- opened
|
- opened
|
||||||
- reopened
|
- reopened
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write # to fetch code
|
|
||||||
issues: write # to label issues
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
name: Label Issue
|
name: Label Issue
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Label Issue
|
- name: Label Issue
|
||||||
uses: github/issue-labeler@v3.1
|
uses: github/issue-labeler@v2.4.1
|
||||||
with:
|
with:
|
||||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
not-before: 2021-12-07T07:00:00Z
|
not-before: 2021-12-07T07:00:00Z
|
||||||
@@ -27,10 +22,9 @@ jobs:
|
|||||||
|
|
||||||
community:
|
community:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
name: Label Issue - Community
|
name: Label Issue - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
8
.github/workflows/label_pr.yml
vendored
8
.github/workflows/label_pr.yml
vendored
@@ -7,14 +7,9 @@ on:
|
|||||||
- reopened
|
- reopened
|
||||||
- synchronize
|
- synchronize
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write # to determine modified files (actions/labeler)
|
|
||||||
pull-requests: write # to add labels to PRs (actions/labeler)
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
name: Label PR
|
name: Label PR
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -26,10 +21,9 @@ jobs:
|
|||||||
|
|
||||||
community:
|
community:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
name: Label PR - Community
|
name: Label PR - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
8
.github/workflows/pr_body_check.yml
vendored
8
.github/workflows/pr_body_check.yml
vendored
@@ -7,10 +7,8 @@ on:
|
|||||||
types: [opened, edited, reopened, synchronize]
|
types: [opened, edited, reopened, synchronize]
|
||||||
jobs:
|
jobs:
|
||||||
pr-check:
|
pr-check:
|
||||||
if: github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')
|
|
||||||
name: Scan PR description for semantic versioning keywords
|
name: Scan PR description for semantic versioning keywords
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
@@ -19,9 +17,9 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
PR_BODY: ${{ github.event.pull_request.body }}
|
PR_BODY: ${{ github.event.pull_request.body }}
|
||||||
run: |
|
run: |
|
||||||
echo "$PR_BODY" | grep "Bug, Docs Fix or other nominal change" > Z
|
echo $PR_BODY | grep "Bug, Docs Fix or other nominal change" > Z
|
||||||
echo "$PR_BODY" | grep "New or Enhanced Feature" > Y
|
echo $PR_BODY | grep "New or Enhanced Feature" > Y
|
||||||
echo "$PR_BODY" | grep "Breaking Change" > X
|
echo $PR_BODY | grep "Breaking Change" > X
|
||||||
exit 0
|
exit 0
|
||||||
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
||||||
|
|||||||
69
.github/workflows/promote.yml
vendored
69
.github/workflows/promote.yml
vendored
@@ -1,44 +1,21 @@
|
|||||||
---
|
---
|
||||||
name: Promote Release
|
name: Promote Release
|
||||||
|
|
||||||
env:
|
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
tag_name:
|
|
||||||
description: 'Name for the tag of the release.'
|
|
||||||
required: true
|
|
||||||
permissions:
|
|
||||||
contents: read # to fetch code (actions/checkout)
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
promote:
|
promote:
|
||||||
if: endsWith(github.repository, '/awx')
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 90
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set GitHub Env vars for workflow_dispatch event
|
|
||||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
|
||||||
run: |
|
|
||||||
echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Set GitHub Env vars if release event
|
|
||||||
if: ${{ github.event_name == 'release' }}
|
|
||||||
run: |
|
|
||||||
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
- name: Install python ${{ env.py_version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
@@ -55,24 +32,11 @@ jobs:
|
|||||||
if: ${{ github.repository_owner != 'ansible' }}
|
if: ${{ github.repository_owner != 'ansible' }}
|
||||||
|
|
||||||
- name: Build collection and publish to galaxy
|
- name: Build collection and publish to galaxy
|
||||||
env:
|
|
||||||
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
|
||||||
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
|
||||||
COLLECTION_TEMPLATE_VERSION: true
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install jq
|
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
|
||||||
make build_collection
|
ansible-galaxy collection publish \
|
||||||
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||||
if [[ "$count" == "1" ]]; then
|
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz
|
||||||
echo "Galaxy release already done";
|
|
||||||
elif [[ "$count" == "0" ]]; then
|
|
||||||
ansible-galaxy collection publish \
|
|
||||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
|
||||||
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
|
||||||
else
|
|
||||||
echo "Unexpected count from galaxy search: $count";
|
|
||||||
exit 1;
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Set official pypi info
|
- name: Set official pypi info
|
||||||
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
run: echo pypi_repo=pypi >> $GITHUB_ENV
|
||||||
@@ -83,11 +47,8 @@ jobs:
|
|||||||
if: ${{ github.repository_owner != 'ansible' }}
|
if: ${{ github.repository_owner != 'ansible' }}
|
||||||
|
|
||||||
- name: Build awxkit and upload to pypi
|
- name: Build awxkit and upload to pypi
|
||||||
env:
|
|
||||||
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.TAG_NAME }}
|
|
||||||
run: |
|
run: |
|
||||||
git reset --hard
|
cd awxkit && python3 setup.py bdist_wheel
|
||||||
cd awxkit && python3 setup.py sdist bdist_wheel
|
|
||||||
twine upload \
|
twine upload \
|
||||||
-r ${{ env.pypi_repo }} \
|
-r ${{ env.pypi_repo }} \
|
||||||
-u ${{ secrets.PYPI_USERNAME }} \
|
-u ${{ secrets.PYPI_USERNAME }} \
|
||||||
@@ -104,15 +65,9 @@ jobs:
|
|||||||
|
|
||||||
- name: Re-tag and promote awx image
|
- name: Re-tag and promote awx image
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools create \
|
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
--tag quay.io/${{ github.repository }}:${{ env.TAG_NAME }}
|
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||||
docker buildx imagetools create \
|
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||||
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
docker push quay.io/${{ github.repository }}:latest
|
||||||
--tag quay.io/${{ github.repository }}:latest
|
|
||||||
|
|
||||||
- name: Re-tag and promote awx-ee image
|
|
||||||
run: |
|
|
||||||
docker buildx imagetools create \
|
|
||||||
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }} \
|
|
||||||
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }}
|
|
||||||
|
|||||||
107
.github/workflows/stage.yml
vendored
107
.github/workflows/stage.yml
vendored
@@ -1,9 +1,5 @@
|
|||||||
---
|
---
|
||||||
name: Stage Release
|
name: Stage Release
|
||||||
|
|
||||||
env:
|
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
@@ -21,9 +17,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stage:
|
stage:
|
||||||
if: endsWith(github.repository, '/awx')
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 90
|
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: write
|
contents: write
|
||||||
@@ -45,100 +39,54 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
- name: Get python version from Makefile
|
||||||
uses: actions/checkout@v3
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
repository: ${{ github.repository_owner }}/awx-operator
|
python-version: ${{ env.py_version }}
|
||||||
path: awx-operator
|
|
||||||
|
|
||||||
- name: Checkout awx-logos
|
- name: Checkout awx-logos
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
repository: ansible/awx-logos
|
repository: ansible/awx-logos
|
||||||
path: awx-logos
|
path: awx-logos
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Checkout awx-operator
|
||||||
working-directory: awx
|
uses: actions/checkout@v2
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
repository: ${{ github.repository_owner }}/awx-operator
|
||||||
|
path: awx-operator
|
||||||
|
|
||||||
- name: Install playbook dependencies
|
- name: Install playbook dependencies
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install docker
|
python3 -m pip install docker
|
||||||
|
|
||||||
- name: Log into registry ghcr.io
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Copy logos for inclusion in sdist for official build
|
|
||||||
working-directory: awx
|
|
||||||
run: |
|
|
||||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
|
||||||
|
|
||||||
- name: Setup node and npm
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: '16.13.1'
|
|
||||||
|
|
||||||
- name: Prebuild UI for awx image (to speed up build process)
|
|
||||||
working-directory: awx
|
|
||||||
run: |
|
|
||||||
sudo apt-get install gettext
|
|
||||||
make ui-release
|
|
||||||
make ui-next
|
|
||||||
|
|
||||||
- name: Set build env variables
|
|
||||||
run: |
|
|
||||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
|
||||||
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
|
||||||
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
|
||||||
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
|
||||||
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
|
|
||||||
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
|
||||||
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
|
|
||||||
env:
|
|
||||||
OWNER: ${{ github.repository_owner }}
|
|
||||||
|
|
||||||
- name: Build and stage AWX
|
- name: Build and stage AWX
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
env:
|
|
||||||
DOCKER_BUILDX_PUSH: true
|
|
||||||
HEADLESS: false
|
|
||||||
PLATFORMS: linux/amd64,linux/arm64
|
|
||||||
run: |
|
run: |
|
||||||
make awx-kube-buildx
|
ansible-playbook -v tools/ansible/build.yml \
|
||||||
|
-e registry=ghcr.io \
|
||||||
|
-e registry_username=${{ github.actor }} \
|
||||||
|
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
|
||||||
|
-e awx_image=${{ github.repository }} \
|
||||||
|
-e awx_version=${{ github.event.inputs.version }} \
|
||||||
|
-e ansible_python_interpreter=$(which python3) \
|
||||||
|
-e push=yes \
|
||||||
|
-e awx_official=yes
|
||||||
|
|
||||||
- name: tag awx-ee:latest with version input
|
- name: Build and stage awx-operator
|
||||||
run: |
|
|
||||||
docker buildx imagetools create \
|
|
||||||
quay.io/ansible/awx-ee:latest \
|
|
||||||
--tag ${AWX_EE_TEST_IMAGE}
|
|
||||||
|
|
||||||
- name: Stage awx-operator image
|
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
run: |
|
run: |
|
||||||
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \
|
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }} \
|
||||||
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
||||||
IMG=${AWX_OPERATOR_TEST_IMAGE} \
|
IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \
|
||||||
make docker-buildx
|
VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push
|
||||||
|
|
||||||
- name: Pulling images for test deployment with awx-operator
|
|
||||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
|
||||||
run: |
|
|
||||||
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
|
||||||
docker pull -q ${AWX_EE_TEST_IMAGE}
|
|
||||||
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
|
||||||
|
|
||||||
- name: Run test deployment with awx-operator
|
- name: Run test deployment with awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
@@ -148,6 +96,9 @@ jobs:
|
|||||||
sudo rm -f $(which kustomize)
|
sudo rm -f $(which kustomize)
|
||||||
make kustomize
|
make kustomize
|
||||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
||||||
|
env:
|
||||||
|
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||||
|
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||||
|
|
||||||
- name: Create draft release for AWX
|
- name: Create draft release for AWX
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
|
|||||||
1
.github/workflows/update_dependabot_prs.yml
vendored
1
.github/workflows/update_dependabot_prs.yml
vendored
@@ -9,7 +9,6 @@ jobs:
|
|||||||
name: Update Dependabot Prs
|
name: Update Dependabot Prs
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
|
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout branch
|
- name: Checkout branch
|
||||||
|
|||||||
11
.github/workflows/upload_schema.yml
vendored
11
.github/workflows/upload_schema.yml
vendored
@@ -1,9 +1,5 @@
|
|||||||
---
|
---
|
||||||
name: Upload API Schema
|
name: Upload API Schema
|
||||||
|
|
||||||
env:
|
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
@@ -13,18 +9,17 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
push:
|
push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 60
|
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
- name: Install python ${{ env.py_version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
@@ -34,7 +29,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Pre-pull image to warm build cache
|
||||||
run: |
|
run: |
|
||||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||||
|
|
||||||
- name: Build image
|
- name: Build image
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -46,11 +46,6 @@ tools/docker-compose/overrides/
|
|||||||
tools/docker-compose-minikube/_sources
|
tools/docker-compose-minikube/_sources
|
||||||
tools/docker-compose/keycloak.awx.realm.json
|
tools/docker-compose/keycloak.awx.realm.json
|
||||||
|
|
||||||
!tools/docker-compose/editable_dependencies
|
|
||||||
tools/docker-compose/editable_dependencies/*
|
|
||||||
!tools/docker-compose/editable_dependencies/README.md
|
|
||||||
!tools/docker-compose/editable_dependencies/install.sh
|
|
||||||
|
|
||||||
# Tower setup playbook testing
|
# Tower setup playbook testing
|
||||||
setup/test/roles/postgresql
|
setup/test/roles/postgresql
|
||||||
**/provision_docker
|
**/provision_docker
|
||||||
@@ -162,18 +157,7 @@ use_dev_supervisor.txt
|
|||||||
*.unison.tmp
|
*.unison.tmp
|
||||||
*.#
|
*.#
|
||||||
/awx/ui/.ui-built
|
/awx/ui/.ui-built
|
||||||
|
/Dockerfile
|
||||||
/_build/
|
/_build/
|
||||||
/_build_kube_dev/
|
/_build_kube_dev/
|
||||||
/Dockerfile
|
|
||||||
/Dockerfile.dev
|
|
||||||
/Dockerfile.kube-dev
|
/Dockerfile.kube-dev
|
||||||
|
|
||||||
awx/ui_next/src
|
|
||||||
awx/ui_next/build
|
|
||||||
|
|
||||||
# Docs build stuff
|
|
||||||
docs/docsite/build/
|
|
||||||
_readthedocs/
|
|
||||||
|
|
||||||
# Pyenv
|
|
||||||
.python-version
|
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
[allowlist]
|
|
||||||
description = "Documentation contains example secrets and passwords"
|
|
||||||
paths = [
|
|
||||||
"docs/docsite/rst/administration/oauth2_token_auth.rst",
|
|
||||||
]
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
[tool.pip-tools]
|
|
||||||
resolver = "backtracking"
|
|
||||||
allow-unsafe = true
|
|
||||||
strip-extras = true
|
|
||||||
quiet = true
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
# Read the Docs configuration file
|
|
||||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
|
||||||
|
|
||||||
version: 2
|
|
||||||
|
|
||||||
build:
|
|
||||||
os: ubuntu-22.04
|
|
||||||
tools:
|
|
||||||
python: >-
|
|
||||||
3.11
|
|
||||||
commands:
|
|
||||||
- pip install --user tox
|
|
||||||
- python3 -m tox -e docs --notest -v
|
|
||||||
- python3 -m tox -e docs --skip-pkg-install -q
|
|
||||||
- mkdir -p _readthedocs/html/
|
|
||||||
- mv docs/docsite/build/html/* _readthedocs/html/
|
|
||||||
113
.vscode/launch.json
vendored
113
.vscode/launch.json
vendored
@@ -1,113 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"name": "run_ws_heartbeat",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["run_ws_heartbeat"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-ws-heartbeat",
|
|
||||||
"postDebugTask": "start awx-ws-heartbeat"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "run_cache_clear",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["run_cache_clear"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-cache-clear",
|
|
||||||
"postDebugTask": "start awx-cache-clear"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "run_callback_receiver",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["run_callback_receiver"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-receiver",
|
|
||||||
"postDebugTask": "start awx-receiver"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "run_dispatcher",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["run_dispatcher"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-dispatcher",
|
|
||||||
"postDebugTask": "start awx-dispatcher"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "run_rsyslog_configurer",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["run_rsyslog_configurer"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-rsyslog-configurer",
|
|
||||||
"postDebugTask": "start awx-rsyslog-configurer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "run_cache_clear",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["run_cache_clear"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-cache-clear",
|
|
||||||
"postDebugTask": "start awx-cache-clear"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "run_wsrelay",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["run_wsrelay"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-wsrelay",
|
|
||||||
"postDebugTask": "start awx-wsrelay"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "daphne",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
|
||||||
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-daphne",
|
|
||||||
"postDebugTask": "start awx-daphne"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "runserver(uwsgi alternative)",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["runserver", "127.0.0.1:8052"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-uwsgi",
|
|
||||||
"postDebugTask": "start awx-uwsgi"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "runserver_plus(uwsgi alternative)",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["runserver_plus", "127.0.0.1:8052"],
|
|
||||||
"django": true,
|
|
||||||
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
|
||||||
"postDebugTask": "start awx-uwsgi"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "shell_plus",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "manage.py",
|
|
||||||
"args": ["shell_plus"],
|
|
||||||
"django": true,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
100
.vscode/tasks.json
vendored
100
.vscode/tasks.json
vendored
@@ -1,100 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "2.0.0",
|
|
||||||
"tasks": [
|
|
||||||
{
|
|
||||||
"label": "start awx-cache-clear",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-cache-clear",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-daphne",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-daphne"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-daphne",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-daphne"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-dispatcher",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-dispatcher",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-receiver",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-receiver"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-receiver",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-receiver"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-rsyslog-configurer",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-rsyslog-configurer",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-rsyslogd",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-rsyslogd",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-uwsgi",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-uwsgi",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-uwsgi and install Werkzeug",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-ws-heartbeat",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-ws-heartbeat",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "start awx-wsrelay",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "stop awx-wsrelay",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -10,9 +10,6 @@ ignore: |
|
|||||||
tools/docker-compose/_sources
|
tools/docker-compose/_sources
|
||||||
# django template files
|
# django template files
|
||||||
awx/api/templates/instance_install_bundle/**
|
awx/api/templates/instance_install_bundle/**
|
||||||
.readthedocs.yaml
|
|
||||||
tools/loki
|
|
||||||
tools/otel
|
|
||||||
|
|
||||||
extends: default
|
extends: default
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,6 @@
|
|||||||
|
|
||||||
Early versions of AWX did not support seamless upgrades between major versions and required the use of a backup and restore tool to perform upgrades.
|
Early versions of AWX did not support seamless upgrades between major versions and required the use of a backup and restore tool to perform upgrades.
|
||||||
|
|
||||||
As of version 18.0, `awx-operator` is the preferred install/upgrade method. Users who wish to upgrade modern AWX installations should follow the instructions at:
|
Users who wish to upgrade modern AWX installations should follow the instructions at:
|
||||||
|
|
||||||
https://github.com/ansible/awx-operator/blob/devel/docs/upgrade/upgrading.md
|
https://github.com/ansible/awx/blob/devel/INSTALL.md#upgrading-from-previous-versions
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ If your issue isn't considered high priority, then please be patient as it may t
|
|||||||
|
|
||||||
`state:needs_info` The issue needs more information. This could be more debug output, more specifics out the system such as version information. Any detail that is currently preventing this issue from moving forward. This should be considered a blocked state.
|
`state:needs_info` The issue needs more information. This could be more debug output, more specifics out the system such as version information. Any detail that is currently preventing this issue from moving forward. This should be considered a blocked state.
|
||||||
|
|
||||||
`state:needs_review` The issue/pull request needs to be reviewed by other maintainers and contributors. This is usually used when there is a question out to another maintainer or when a person is less familiar with an area of the code base the issue is for.
|
`state:needs_review` The issue/pull request needs to be reviewed by other maintainers and contributors. This is usually used when there is a question out to another maintainer or when a person is less familar with an area of the code base the issue is for.
|
||||||
|
|
||||||
`state:needs_revision` More commonly used on pull requests, this state represents that there are changes that are being waited on.
|
`state:needs_revision` More commonly used on pull requests, this state represents that there are changes that are being waited on.
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ If any of those items are missing your pull request will still get the `needs_tr
|
|||||||
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
||||||
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
||||||
|
|
||||||
The `state:needs_triage` label will remain on your pull request until a person has looked at it.
|
The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
|
||||||
|
|
||||||
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
||||||
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
||||||
|
|||||||
@@ -6,14 +6,13 @@ recursive-include awx/templates *.html
|
|||||||
recursive-include awx/api/templates *.md *.html *.yml
|
recursive-include awx/api/templates *.md *.html *.yml
|
||||||
recursive-include awx/ui/build *.html
|
recursive-include awx/ui/build *.html
|
||||||
recursive-include awx/ui/build *
|
recursive-include awx/ui/build *
|
||||||
recursive-include awx/ui_next/build *
|
|
||||||
recursive-include awx/playbooks *.yml
|
recursive-include awx/playbooks *.yml
|
||||||
recursive-include awx/lib/site-packages *
|
recursive-include awx/lib/site-packages *
|
||||||
recursive-include awx/plugins *.ps1
|
recursive-include awx/plugins *.ps1
|
||||||
recursive-include requirements *.txt
|
recursive-include requirements *.txt
|
||||||
recursive-include requirements *.yml
|
recursive-include requirements *.yml
|
||||||
recursive-include config *
|
recursive-include config *
|
||||||
recursive-include licenses *
|
recursive-include docs/licenses *
|
||||||
recursive-exclude awx devonly.py*
|
recursive-exclude awx devonly.py*
|
||||||
recursive-exclude awx/api/tests *
|
recursive-exclude awx/api/tests *
|
||||||
recursive-exclude awx/main/tests *
|
recursive-exclude awx/main/tests *
|
||||||
@@ -22,7 +21,7 @@ recursive-exclude awx/settings local_settings.py*
|
|||||||
include tools/scripts/request_tower_configuration.sh
|
include tools/scripts/request_tower_configuration.sh
|
||||||
include tools/scripts/request_tower_configuration.ps1
|
include tools/scripts/request_tower_configuration.ps1
|
||||||
include tools/scripts/automation-controller-service
|
include tools/scripts/automation-controller-service
|
||||||
include tools/scripts/rsyslog-4xx-recovery
|
include tools/scripts/failure-event-handler
|
||||||
include tools/scripts/awx-python
|
include tools/scripts/awx-python
|
||||||
include awx/playbooks/library/mkfifo.py
|
include awx/playbooks/library/mkfifo.py
|
||||||
include tools/sosreport/*
|
include tools/sosreport/*
|
||||||
|
|||||||
362
Makefile
362
Makefile
@@ -1,36 +1,16 @@
|
|||||||
-include awx/ui_next/Makefile
|
PYTHON ?= python3.9
|
||||||
|
|
||||||
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
|
||||||
SHELL := bash
|
|
||||||
DOCKER_COMPOSE ?= docker compose
|
|
||||||
OFFICIAL ?= no
|
OFFICIAL ?= no
|
||||||
NODE ?= node
|
NODE ?= node
|
||||||
NPM_BIN ?= npm
|
NPM_BIN ?= npm
|
||||||
KIND_BIN ?= $(shell which kind)
|
|
||||||
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
MANAGEMENT_COMMAND ?= awx-manage
|
MANAGEMENT_COMMAND ?= awx-manage
|
||||||
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py 2> /dev/null)
|
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||||
|
COLLECTION_VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||||
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
|
||||||
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
|
||||||
# args for the ansible-test sanity command
|
|
||||||
COLLECTION_SANITY_ARGS ?= --docker
|
|
||||||
# collection unit testing directories
|
|
||||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
|
||||||
# collection integration test directories (defaults to all)
|
|
||||||
COLLECTION_TEST_TARGET ?=
|
|
||||||
# args for collection install
|
|
||||||
COLLECTION_PACKAGE ?= awx
|
|
||||||
COLLECTION_NAMESPACE ?= awx
|
|
||||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
|
||||||
COLLECTION_TEMPLATE_VERSION ?= false
|
|
||||||
|
|
||||||
# NOTE: This defaults the container image version to the branch that's active
|
# NOTE: This defaults the container image version to the branch that's active
|
||||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||||
MAIN_NODE_TYPE ?= hybrid
|
MAIN_NODE_TYPE ?= hybrid
|
||||||
# If set to true docker-compose will also start a pgbouncer instance and use it
|
|
||||||
PGBOUNCER ?= false
|
|
||||||
# If set to true docker-compose will also start a keycloak instance
|
# If set to true docker-compose will also start a keycloak instance
|
||||||
KEYCLOAK ?= false
|
KEYCLOAK ?= false
|
||||||
# If set to true docker-compose will also start an ldap instance
|
# If set to true docker-compose will also start an ldap instance
|
||||||
@@ -41,42 +21,20 @@ SPLUNK ?= false
|
|||||||
PROMETHEUS ?= false
|
PROMETHEUS ?= false
|
||||||
# If set to true docker-compose will also start a grafana instance
|
# If set to true docker-compose will also start a grafana instance
|
||||||
GRAFANA ?= false
|
GRAFANA ?= false
|
||||||
# If set to true docker-compose will also start a hashicorp vault instance
|
|
||||||
VAULT ?= false
|
|
||||||
# If set to true docker-compose will also start a hashicorp vault instance with TLS enabled
|
|
||||||
VAULT_TLS ?= false
|
|
||||||
# If set to true docker-compose will also start a tacacs+ instance
|
|
||||||
TACACS ?= false
|
|
||||||
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
|
||||||
OTEL ?= false
|
|
||||||
# If set to true docker-compose will also start a Loki instance
|
|
||||||
LOKI ?= false
|
|
||||||
# If set to true docker-compose will install editable dependencies
|
|
||||||
EDITABLE_DEPENDENCIES ?= false
|
|
||||||
# If set to true, use tls for postgres connection
|
|
||||||
PG_TLS ?= false
|
|
||||||
|
|
||||||
VENV_BASE ?= /var/lib/awx/venv
|
VENV_BASE ?= /var/lib/awx/venv
|
||||||
|
|
||||||
DEV_DOCKER_OWNER ?= ansible
|
DEV_DOCKER_TAG_BASE ?= ghcr.io/ansible
|
||||||
# Docker will only accept lowercase, so github names like Paul need to be paul
|
|
||||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
|
||||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
|
||||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||||
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
|
||||||
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
|
||||||
|
|
||||||
# Common command to use for running ansible-playbook
|
|
||||||
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
|
||||||
|
|
||||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||||
|
|
||||||
# Python packages to install only from source (not from binary wheels)
|
# Python packages to install only from source (not from binary wheels)
|
||||||
# Comma separated list
|
# Comma separated list
|
||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==58.2.0 setuptools_scm[toml]==6.4.2 wheel==0.36.2
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
|
|
||||||
@@ -88,21 +46,6 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
|||||||
|
|
||||||
I18N_FLAG_FILE = .i18n_built
|
I18N_FLAG_FILE = .i18n_built
|
||||||
|
|
||||||
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
|
||||||
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
|
||||||
|
|
||||||
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
|
|
||||||
# DOCKER_CACHE=--no-cache make docker-compose-build
|
|
||||||
ifeq ($(DOCKER_CACHE),)
|
|
||||||
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
|
|
||||||
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
|
|
||||||
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
|
|
||||||
else
|
|
||||||
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
|
|
||||||
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
|
|
||||||
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||||
develop refresh adduser migrate dbchange \
|
develop refresh adduser migrate dbchange \
|
||||||
receiver test test_unit test_coverage coverage_html \
|
receiver test test_unit test_coverage coverage_html \
|
||||||
@@ -127,7 +70,7 @@ clean-schema:
|
|||||||
|
|
||||||
clean-languages:
|
clean-languages:
|
||||||
rm -f $(I18N_FLAG_FILE)
|
rm -f $(I18N_FLAG_FILE)
|
||||||
find ./awx/locale/ -type f -regex '.*\.mo$$' -delete
|
find ./awx/locale/ -type f -regex ".*\.mo$" -delete
|
||||||
|
|
||||||
## Remove temporary build files, compiled Python files.
|
## Remove temporary build files, compiled Python files.
|
||||||
clean: clean-ui clean-api clean-awxkit clean-dist
|
clean: clean-ui clean-api clean-awxkit clean-dist
|
||||||
@@ -175,7 +118,7 @@ virtualenv_awx:
|
|||||||
fi; \
|
fi; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
## Install third-party requirements needed for AWX's environment.
|
## Install third-party requirements needed for AWX's environment.
|
||||||
# this does not use system site packages intentionally
|
# this does not use system site packages intentionally
|
||||||
requirements_awx: virtualenv_awx
|
requirements_awx: virtualenv_awx
|
||||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||||
@@ -241,14 +184,28 @@ collectstatic:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||||
|
|
||||||
|
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||||
|
|
||||||
uwsgi: collectstatic
|
uwsgi: collectstatic
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
uwsgi /etc/tower/uwsgi.ini
|
uwsgi -b 32768 \
|
||||||
|
--socket 127.0.0.1:8050 \
|
||||||
|
--module=awx.wsgi:application \
|
||||||
|
--home=/var/lib/awx/venv/awx \
|
||||||
|
--chdir=/awx_devel/ \
|
||||||
|
--vacuum \
|
||||||
|
--processes=5 \
|
||||||
|
--harakiri=120 --master \
|
||||||
|
--no-orphans \
|
||||||
|
--max-requests=1000 \
|
||||||
|
--stats /tmp/stats.socket \
|
||||||
|
--lazy-apps \
|
||||||
|
--logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||||
|
|
||||||
awx-autoreload:
|
awx-autoreload:
|
||||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||||
|
|
||||||
daphne:
|
daphne:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
@@ -256,6 +213,12 @@ daphne:
|
|||||||
fi; \
|
fi; \
|
||||||
daphne -b 127.0.0.1 -p 8051 awx.asgi:channel_layer
|
daphne -b 127.0.0.1 -p 8051 awx.asgi:channel_layer
|
||||||
|
|
||||||
|
wsbroadcast:
|
||||||
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
|
fi; \
|
||||||
|
$(PYTHON) manage.py run_wsbroadcast
|
||||||
|
|
||||||
## Run to start the background task dispatcher for development.
|
## Run to start the background task dispatcher for development.
|
||||||
dispatcher:
|
dispatcher:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
@@ -263,6 +226,7 @@ dispatcher:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py run_dispatcher
|
$(PYTHON) manage.py run_dispatcher
|
||||||
|
|
||||||
|
|
||||||
## Run to start the zeromq callback receiver
|
## Run to start the zeromq callback receiver
|
||||||
receiver:
|
receiver:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
@@ -279,34 +243,6 @@ jupyter:
|
|||||||
fi; \
|
fi; \
|
||||||
$(MANAGEMENT_COMMAND) shell_plus --notebook
|
$(MANAGEMENT_COMMAND) shell_plus --notebook
|
||||||
|
|
||||||
## Start the rsyslog configurer process in background in development environment.
|
|
||||||
run-rsyslog-configurer:
|
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
|
||||||
fi; \
|
|
||||||
$(PYTHON) manage.py run_rsyslog_configurer
|
|
||||||
|
|
||||||
## Start cache_clear process in background in development environment.
|
|
||||||
run-cache-clear:
|
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
|
||||||
fi; \
|
|
||||||
$(PYTHON) manage.py run_cache_clear
|
|
||||||
|
|
||||||
## Start the wsrelay process in background in development environment.
|
|
||||||
run-wsrelay:
|
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
|
||||||
fi; \
|
|
||||||
$(PYTHON) manage.py run_wsrelay
|
|
||||||
|
|
||||||
## Start the heartbeat process in background in development environment.
|
|
||||||
run-ws-heartbeat:
|
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
|
||||||
fi; \
|
|
||||||
$(PYTHON) manage.py run_ws_heartbeat
|
|
||||||
|
|
||||||
reports:
|
reports:
|
||||||
mkdir -p $@
|
mkdir -p $@
|
||||||
|
|
||||||
@@ -328,18 +264,18 @@ swagger: reports
|
|||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||||
|
|
||||||
check: black
|
check: black
|
||||||
|
|
||||||
api-lint:
|
api-lint:
|
||||||
BLACK_ARGS="--check" $(MAKE) black
|
BLACK_ARGS="--check" make black
|
||||||
flake8 awx
|
flake8 awx
|
||||||
yamllint -s .
|
yamllint -s .
|
||||||
|
|
||||||
## Run egg_info_dev to generate awx.egg-info for development.
|
|
||||||
awx-link:
|
awx-link:
|
||||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
|
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
|
||||||
|
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/$(PYTHON)/site-packages/awx.egg-link
|
||||||
|
|
||||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||||
PYTEST_ARGS ?= -n auto
|
PYTEST_ARGS ?= -n auto
|
||||||
@@ -352,23 +288,19 @@ test:
|
|||||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||||
|
|
||||||
test_migrations:
|
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
COLLECTION_TEST_TARGET ?=
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
COLLECTION_PACKAGE ?= awx
|
||||||
fi; \
|
COLLECTION_NAMESPACE ?= awx
|
||||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test $(PYTEST_ARGS) $(TEST_DIRS)
|
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||||
|
COLLECTION_TEMPLATE_VERSION ?= false
|
||||||
## Runs AWX_DOCKER_CMD inside a new docker container.
|
|
||||||
docker-runner:
|
|
||||||
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
|
||||||
|
|
||||||
test_collection:
|
test_collection:
|
||||||
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi && \
|
fi && \
|
||||||
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
pip install ansible-core && \
|
||||||
ansible --version
|
|
||||||
py.test $(COLLECTION_TEST_DIRS) -v
|
py.test $(COLLECTION_TEST_DIRS) -v
|
||||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||||
# First we will use anything expility set as PYTHONPATH
|
# First we will use anything expility set as PYTHONPATH
|
||||||
@@ -385,7 +317,7 @@ symlink_collection:
|
|||||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||||
|
|
||||||
awx_collection_build: $(shell find awx_collection -type f)
|
awx_collection_build: $(shell find awx_collection -type f)
|
||||||
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||||
-e collection_version=$(COLLECTION_VERSION) \
|
-e collection_version=$(COLLECTION_VERSION) \
|
||||||
@@ -398,16 +330,11 @@ install_collection: build_collection
|
|||||||
rm -rf $(COLLECTION_INSTALL)
|
rm -rf $(COLLECTION_INSTALL)
|
||||||
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
|
||||||
|
|
||||||
test_collection_sanity:
|
test_collection_sanity: install_collection
|
||||||
rm -rf awx_collection_build/
|
cd $(COLLECTION_INSTALL) && ansible-test sanity
|
||||||
rm -rf $(COLLECTION_INSTALL)
|
|
||||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
|
||||||
ansible --version
|
|
||||||
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
|
|
||||||
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
|
||||||
|
|
||||||
test_collection_integration: install_collection
|
test_collection_integration: install_collection
|
||||||
cd $(COLLECTION_INSTALL) && ansible-test integration -vvv $(COLLECTION_TEST_TARGET)
|
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
|
||||||
|
|
||||||
test_unit:
|
test_unit:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
@@ -462,20 +389,18 @@ $(UI_BUILD_FLAG_FILE):
|
|||||||
$(PYTHON) tools/scripts/compilemessages.py
|
$(PYTHON) tools/scripts/compilemessages.py
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||||
|
mkdir -p /var/lib/awx/public/static/css
|
||||||
|
mkdir -p /var/lib/awx/public/static/js
|
||||||
|
mkdir -p /var/lib/awx/public/static/media
|
||||||
|
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
|
||||||
|
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
|
||||||
|
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
|
||||||
touch $@
|
touch $@
|
||||||
|
|
||||||
ui-release: $(UI_BUILD_FLAG_FILE)
|
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||||
|
|
||||||
ui-devel: awx/ui/node_modules
|
ui-devel: awx/ui/node_modules
|
||||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||||
@if [ -d "/var/lib/awx" ] ; then \
|
|
||||||
mkdir -p /var/lib/awx/public/static/css; \
|
|
||||||
mkdir -p /var/lib/awx/public/static/js; \
|
|
||||||
mkdir -p /var/lib/awx/public/static/media; \
|
|
||||||
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css; \
|
|
||||||
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js; \
|
|
||||||
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
ui-devel-instrumented: awx/ui/node_modules
|
ui-devel-instrumented: awx/ui/node_modules
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
||||||
@@ -502,12 +427,11 @@ ui-test-general:
|
|||||||
$(NPM_BIN) run --prefix awx/ui pretest
|
$(NPM_BIN) run --prefix awx/ui pretest
|
||||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||||
|
|
||||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
|
||||||
HEADLESS ?= no
|
HEADLESS ?= no
|
||||||
ifeq ($(HEADLESS), yes)
|
ifeq ($(HEADLESS), yes)
|
||||||
dist/$(SDIST_TAR_FILE):
|
dist/$(SDIST_TAR_FILE):
|
||||||
else
|
else
|
||||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE)
|
||||||
endif
|
endif
|
||||||
$(PYTHON) -m build -s
|
$(PYTHON) -m build -s
|
||||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||||
@@ -528,7 +452,7 @@ awx/projects:
|
|||||||
COMPOSE_UP_OPTS ?=
|
COMPOSE_UP_OPTS ?=
|
||||||
COMPOSE_OPTS ?=
|
COMPOSE_OPTS ?=
|
||||||
CONTROL_PLANE_NODE_COUNT ?= 1
|
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||||
EXECUTION_NODE_COUNT ?= 0
|
EXECUTION_NODE_COUNT ?= 2
|
||||||
MINIKUBE_CONTAINER_GROUP ?= false
|
MINIKUBE_CONTAINER_GROUP ?= false
|
||||||
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
||||||
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
||||||
@@ -539,57 +463,39 @@ endif
|
|||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
|
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
|
||||||
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
|
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
|
||||||
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
|
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
|
||||||
-e enable_pgbouncer=$(PGBOUNCER) \
|
|
||||||
-e enable_keycloak=$(KEYCLOAK) \
|
-e enable_keycloak=$(KEYCLOAK) \
|
||||||
-e enable_ldap=$(LDAP) \
|
-e enable_ldap=$(LDAP) \
|
||||||
-e enable_splunk=$(SPLUNK) \
|
-e enable_splunk=$(SPLUNK) \
|
||||||
-e enable_prometheus=$(PROMETHEUS) \
|
-e enable_prometheus=$(PROMETHEUS) \
|
||||||
-e enable_grafana=$(GRAFANA) \
|
-e enable_grafana=$(GRAFANA) $(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
-e enable_vault=$(VAULT) \
|
|
||||||
-e vault_tls=$(VAULT_TLS) \
|
|
||||||
-e enable_tacacs=$(TACACS) \
|
|
||||||
-e enable_otel=$(OTEL) \
|
|
||||||
-e enable_loki=$(LOKI) \
|
|
||||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
|
||||||
-e pg_tls=$(PG_TLS) \
|
|
||||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
|
||||||
|
|
||||||
docker-compose: awx/projects docker-compose-sources
|
docker-compose: awx/projects docker-compose-sources
|
||||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
|
||||||
-e enable_vault=$(VAULT) \
|
|
||||||
-e vault_tls=$(VAULT_TLS) \
|
|
||||||
-e enable_ldap=$(LDAP); \
|
|
||||||
$(MAKE) docker-compose-up
|
|
||||||
|
|
||||||
docker-compose-up:
|
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
|
||||||
|
|
||||||
docker-compose-down:
|
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
|
|
||||||
|
|
||||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||||
|
|
||||||
docker-compose-test: awx/projects docker-compose-sources
|
docker-compose-test: awx/projects docker-compose-sources
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||||
|
|
||||||
docker-compose-runtest: awx/projects docker-compose-sources
|
docker-compose-runtest: awx/projects docker-compose-sources
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||||
|
|
||||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||||
|
|
||||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||||
detect-schema-change: genschema
|
detect-schema-change: genschema
|
||||||
@@ -598,7 +504,7 @@ detect-schema-change: genschema
|
|||||||
diff -u -b reference-schema.json schema.json
|
diff -u -b reference-schema.json schema.json
|
||||||
|
|
||||||
docker-compose-clean: awx/projects
|
docker-compose-clean: awx/projects
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||||
|
|
||||||
docker-compose-container-group-clean:
|
docker-compose-container-group-clean:
|
||||||
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
||||||
@@ -606,54 +512,33 @@ docker-compose-container-group-clean:
|
|||||||
fi
|
fi
|
||||||
rm -rf tools/docker-compose-minikube/_sources/
|
rm -rf tools/docker-compose-minikube/_sources/
|
||||||
|
|
||||||
.PHONY: Dockerfile.dev
|
## Base development image build
|
||||||
## Generate Dockerfile.dev for awx_devel image
|
docker-compose-build:
|
||||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
||||||
-e dockerfile_name=Dockerfile.dev \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
-e build_dev=True \
|
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
|
||||||
|
|
||||||
## Build awx_devel image for docker compose development environment
|
|
||||||
docker-compose-build: Dockerfile.dev
|
|
||||||
DOCKER_BUILDKIT=1 docker build \
|
|
||||||
-f Dockerfile.dev \
|
|
||||||
-t $(DEVEL_IMAGE_NAME) \
|
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
|
||||||
$(DOCKER_DEVEL_CACHE_FLAG) .
|
|
||||||
|
|
||||||
.PHONY: docker-compose-buildx
|
|
||||||
## Build awx_devel image for docker compose development environment for multiple architectures
|
|
||||||
docker-compose-buildx: Dockerfile.dev
|
|
||||||
- docker buildx create --name docker-compose-buildx
|
|
||||||
docker buildx use docker-compose-buildx
|
|
||||||
- docker buildx build \
|
|
||||||
--push \
|
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
|
||||||
$(DOCKER_DEVEL_CACHE_FLAG) \
|
|
||||||
--platform=$(PLATFORMS) \
|
|
||||||
--tag $(DEVEL_IMAGE_NAME) \
|
|
||||||
-f Dockerfile.dev .
|
|
||||||
- docker buildx rm docker-compose-buildx
|
|
||||||
|
|
||||||
docker-clean:
|
docker-clean:
|
||||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
if [ "$(shell docker images | grep awx_devel)" ]; then \
|
||||||
|
docker images | grep awx_devel | awk '{print $$3}' | xargs docker rmi --force; \
|
||||||
|
fi
|
||||||
|
|
||||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
|
docker volume rm -f tools_awx_db tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
|
|
||||||
## Docker Development Environment with Elastic Stack Connected
|
## Docker Development Environment with Elastic Stack Connected
|
||||||
docker-compose-elk: awx/projects docker-compose-sources
|
docker-compose-elk: awx/projects docker-compose-sources
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||||
|
|
||||||
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||||
|
|
||||||
docker-compose-container-group:
|
docker-compose-container-group:
|
||||||
MINIKUBE_CONTAINER_GROUP=true $(MAKE) docker-compose
|
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||||
|
|
||||||
clean-elk:
|
clean-elk:
|
||||||
docker stop tools_kibana_1
|
docker stop tools_kibana_1
|
||||||
@@ -663,59 +548,20 @@ clean-elk:
|
|||||||
docker rm tools_elasticsearch_1
|
docker rm tools_elasticsearch_1
|
||||||
docker rm tools_kibana_1
|
docker rm tools_kibana_1
|
||||||
|
|
||||||
|
psql-container:
|
||||||
|
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||||
|
|
||||||
VERSION:
|
VERSION:
|
||||||
@echo "awx: $(VERSION)"
|
@echo "awx: $(VERSION)"
|
||||||
|
|
||||||
PYTHON_VERSION:
|
PYTHON_VERSION:
|
||||||
@echo "$(subst python,,$(PYTHON))"
|
@echo "$(PYTHON)" | sed 's:python::'
|
||||||
|
|
||||||
.PHONY: version-for-buildyml
|
|
||||||
version-for-buildyml:
|
|
||||||
@echo $(firstword $(subst +, ,$(VERSION)))
|
|
||||||
# version-for-buildyml prints a special version string for build.yml,
|
|
||||||
# chopping off the sha after the '+' sign.
|
|
||||||
# tools/ansible/build.yml was doing this: make print-VERSION | cut -d + -f -1
|
|
||||||
# This does the same thing in native make without
|
|
||||||
# the pipe or the extra processes, and now the pb does `make version-for-buildyml`
|
|
||||||
# Example:
|
|
||||||
# 22.1.1.dev38+g523c0d9781 becomes 22.1.1.dev38
|
|
||||||
|
|
||||||
.PHONY: Dockerfile
|
|
||||||
## Generate Dockerfile for awx image
|
|
||||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
ansible-playbook tools/ansible/dockerfile.yml -e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
|
||||||
-e headless=$(HEADLESS)
|
|
||||||
|
|
||||||
## Build awx image for deployment on Kubernetes environment.
|
|
||||||
awx-kube-build: Dockerfile
|
|
||||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
|
|
||||||
--build-arg VERSION=$(VERSION) \
|
|
||||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
|
||||||
--build-arg HEADLESS=$(HEADLESS) \
|
|
||||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
|
||||||
-t $(IMAGE_KUBE) .
|
|
||||||
|
|
||||||
## Build multi-arch awx image for deployment on Kubernetes environment.
|
|
||||||
awx-kube-buildx: Dockerfile
|
|
||||||
- docker buildx create --name awx-kube-buildx
|
|
||||||
docker buildx use awx-kube-buildx
|
|
||||||
- docker buildx build \
|
|
||||||
--push \
|
|
||||||
--build-arg VERSION=$(VERSION) \
|
|
||||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
|
||||||
--build-arg HEADLESS=$(HEADLESS) \
|
|
||||||
--platform=$(PLATFORMS) \
|
|
||||||
$(DOCKER_KUBE_CACHE_FLAG) \
|
|
||||||
--tag $(IMAGE_KUBE) \
|
|
||||||
-f Dockerfile .
|
|
||||||
- docker buildx rm awx-kube-buildx
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: Dockerfile.kube-dev
|
|
||||||
## Generate Docker.kube-dev for awx_kube_devel image
|
|
||||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
ansible-playbook tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.kube-dev \
|
-e dockerfile_name=Dockerfile.kube-dev \
|
||||||
-e kube_dev=True \
|
-e kube_dev=True \
|
||||||
-e template_dest=_build_kube_dev \
|
-e template_dest=_build_kube_dev \
|
||||||
@@ -725,24 +571,16 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
|||||||
awx-kube-dev-build: Dockerfile.kube-dev
|
awx-kube-dev-build: Dockerfile.kube-dev
|
||||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||||
-t $(IMAGE_KUBE_DEV) .
|
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||||
|
|
||||||
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
## Build awx image for deployment on Kubernetes environment.
|
||||||
awx-kube-dev-buildx: Dockerfile.kube-dev
|
awx-kube-build: Dockerfile
|
||||||
- docker buildx create --name awx-kube-dev-buildx
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
|
||||||
docker buildx use awx-kube-dev-buildx
|
--build-arg VERSION=$(VERSION) \
|
||||||
- docker buildx build \
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
--push \
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
|
||||||
--platform=$(PLATFORMS) \
|
|
||||||
--tag $(IMAGE_KUBE_DEV) \
|
|
||||||
-f Dockerfile.kube-dev .
|
|
||||||
- docker buildx rm awx-kube-dev-buildx
|
|
||||||
|
|
||||||
kind-dev-load: awx-kube-dev-build
|
|
||||||
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
|
|
||||||
|
|
||||||
# Translation TASKS
|
# Translation TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
@@ -750,21 +588,19 @@ kind-dev-load: awx-kube-dev-build
|
|||||||
## generate UI .pot file, an empty template of strings yet to be translated
|
## generate UI .pot file, an empty template of strings yet to be translated
|
||||||
pot: $(UI_BUILD_FLAG_FILE)
|
pot: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template --clean
|
|
||||||
|
|
||||||
## generate UI .po files for each locale (will update translated strings for `en`)
|
## generate UI .po files for each locale (will update translated strings for `en`)
|
||||||
po: $(UI_BUILD_FLAG_FILE)
|
po: $(UI_BUILD_FLAG_FILE)
|
||||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
|
||||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings -- --clean
|
|
||||||
|
|
||||||
|
LANG = "en_us"
|
||||||
## generate API django .pot .po
|
## generate API django .pot .po
|
||||||
messages:
|
messages:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py makemessages -l en_us --keep-pot
|
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
||||||
|
|
||||||
.PHONY: print-%
|
|
||||||
print-%:
|
print-%:
|
||||||
@echo $($*)
|
@echo $($*)
|
||||||
|
|
||||||
@@ -776,12 +612,12 @@ HELP_FILTER=.PHONY
|
|||||||
## Display help targets
|
## Display help targets
|
||||||
help:
|
help:
|
||||||
@printf "Available targets:\n"
|
@printf "Available targets:\n"
|
||||||
@$(MAKE) -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
@make -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
||||||
|
|
||||||
## Display help for all targets
|
## Display help for all targets
|
||||||
help/all:
|
help/all:
|
||||||
@printf "Available targets:\n"
|
@printf "Available targets:\n"
|
||||||
@$(MAKE) -s help/generate
|
@make -s help/generate
|
||||||
|
|
||||||
## Generate help output from MAKEFILE_LIST
|
## Generate help output from MAKEFILE_LIST
|
||||||
help/generate:
|
help/generate:
|
||||||
@@ -802,7 +638,3 @@ help/generate:
|
|||||||
} \
|
} \
|
||||||
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||||
@printf "\n"
|
@printf "\n"
|
||||||
|
|
||||||
## Display help for ui-next targets
|
|
||||||
help/ui-next:
|
|
||||||
@$(MAKE) -s help MAKEFILE_LIST="awx/ui_next/Makefile"
|
|
||||||
|
|||||||
10
README.md
10
README.md
@@ -1,5 +1,5 @@
|
|||||||
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
||||||
[](https://chat.ansible.im/#/welcome) [](https://forum.ansible.com)
|
[](https://libera.chat)
|
||||||
|
|
||||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||||
|
|
||||||
@@ -7,7 +7,7 @@ AWX provides a web-based user interface, REST API, and task engine built on top
|
|||||||
|
|
||||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||||
|
|
||||||
To learn more about using AWX, view the [AWX docs site](https://ansible.readthedocs.io/projects/awx/en/latest/).
|
To learn more about using AWX, and Tower, view the [Tower docs site](http://docs.ansible.com/ansible-tower/index.html).
|
||||||
|
|
||||||
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
|
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
|
||||||
|
|
||||||
@@ -30,12 +30,12 @@ If you're experiencing a problem that you feel is a bug in AWX or have ideas for
|
|||||||
Code of Conduct
|
Code of Conduct
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||||
|
|
||||||
Get Involved
|
Get Involved
|
||||||
------------
|
------------
|
||||||
|
|
||||||
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
|
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
|
||||||
|
|
||||||
- Join the [Ansible AWX channel on Matrix](https://matrix.to/#/#awx:ansible.com)
|
- Join the `#ansible-awx` channel on irc.libera.chat
|
||||||
- Join the [Ansible Community Forum](https://forum.ansible.com)
|
- Join the [mailing list](https://groups.google.com/forum/#!forum/awx-project)
|
||||||
|
|||||||
@@ -52,14 +52,40 @@ try:
|
|||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
MODE = 'production'
|
MODE = 'production'
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import django # noqa: F401
|
import django # noqa: F401
|
||||||
|
|
||||||
|
HAS_DJANGO = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
HAS_DJANGO = False
|
||||||
else:
|
else:
|
||||||
|
from django.db.backends.base import schema
|
||||||
|
from django.db.models import indexes
|
||||||
|
from django.db.backends.utils import names_digest
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
|
|
||||||
|
if HAS_DJANGO is True:
|
||||||
|
|
||||||
|
# See upgrade blocker note in requirements/README.md
|
||||||
|
try:
|
||||||
|
names_digest('foo', 'bar', 'baz', length=8)
|
||||||
|
except ValueError:
|
||||||
|
|
||||||
|
def names_digest(*args, length):
|
||||||
|
"""
|
||||||
|
Generate a 32-bit digest of a set of arguments that can be used to shorten
|
||||||
|
identifying names. Support for use in FIPS environments.
|
||||||
|
"""
|
||||||
|
h = hashlib.md5(usedforsecurity=False)
|
||||||
|
for arg in args:
|
||||||
|
h.update(arg.encode())
|
||||||
|
return h.hexdigest()[:length]
|
||||||
|
|
||||||
|
schema.names_digest = names_digest
|
||||||
|
indexes.names_digest = names_digest
|
||||||
|
|
||||||
|
|
||||||
def find_commands(management_dir):
|
def find_commands(management_dir):
|
||||||
# Modified version of function from django/core/management/__init__.py.
|
# Modified version of function from django/core/management/__init__.py.
|
||||||
@@ -154,12 +180,10 @@ def manage():
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||||
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
|
||||||
# The return of connection.pg_version is something like 12013
|
|
||||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||||
if (connection.pg_version // 10000) < 12:
|
if (connection.pg_version // 10000) < 12:
|
||||||
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
sys.stderr.write("Postgres version 12 is required\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
# Django
|
# Django
|
||||||
|
from django.conf import settings
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
@@ -8,7 +9,6 @@ from rest_framework import serializers
|
|||||||
from awx.conf import fields, register, register_validate
|
from awx.conf import fields, register, register_validate
|
||||||
from awx.api.fields import OAuth2ProviderField
|
from awx.api.fields import OAuth2ProviderField
|
||||||
from oauth2_provider.settings import oauth2_settings
|
from oauth2_provider.settings import oauth2_settings
|
||||||
from awx.sso.common import is_remote_auth_enabled
|
|
||||||
|
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -93,24 +93,25 @@ register(
|
|||||||
default='',
|
default='',
|
||||||
label=_('Login redirect override URL'),
|
label=_('Login redirect override URL'),
|
||||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
||||||
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
|
|
||||||
category=_('Authentication'),
|
|
||||||
category_slug='authentication',
|
|
||||||
)
|
|
||||||
register(
|
|
||||||
'ALLOW_METRICS_FOR_ANONYMOUS_USERS',
|
|
||||||
field_class=fields.BooleanField,
|
|
||||||
default=False,
|
|
||||||
label=_('Allow anonymous users to poll metrics'),
|
|
||||||
help_text=_('If true, anonymous users are allowed to poll metrics.'),
|
|
||||||
category=_('Authentication'),
|
category=_('Authentication'),
|
||||||
category_slug='authentication',
|
category_slug='authentication',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def authentication_validate(serializer, attrs):
|
def authentication_validate(serializer, attrs):
|
||||||
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
|
remote_auth_settings = [
|
||||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
'AUTH_LDAP_SERVER_URI',
|
||||||
|
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||||
|
'SOCIAL_AUTH_GITHUB_KEY',
|
||||||
|
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||||
|
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||||
|
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||||
|
'RADIUS_SERVER',
|
||||||
|
'TACACSPLUS_HOST',
|
||||||
|
]
|
||||||
|
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||||
|
if not any(getattr(settings, s, None) for s in remote_auth_settings):
|
||||||
|
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -80,6 +80,7 @@ class VerbatimField(serializers.Field):
|
|||||||
|
|
||||||
|
|
||||||
class OAuth2ProviderField(fields.DictField):
|
class OAuth2ProviderField(fields.DictField):
|
||||||
|
|
||||||
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
||||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||||
child = fields.IntegerField(min_value=1)
|
child = fields.IntegerField(min_value=1)
|
||||||
|
|||||||
451
awx/api/filters.py
Normal file
451
awx/api/filters.py
Normal file
@@ -0,0 +1,451 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
# Python
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models import Q, CharField, IntegerField, BooleanField, TextField, JSONField
|
||||||
|
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey
|
||||||
|
from django.db.models.functions import Cast
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||||
|
from django.utils.encoding import force_str
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
# Django REST Framework
|
||||||
|
from rest_framework.exceptions import ParseError, PermissionDenied
|
||||||
|
from rest_framework.filters import BaseFilterBackend
|
||||||
|
|
||||||
|
# AWX
|
||||||
|
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||||
|
from awx.main.utils.db import get_all_field_names
|
||||||
|
|
||||||
|
|
||||||
|
class TypeFilterBackend(BaseFilterBackend):
|
||||||
|
"""
|
||||||
|
Filter on type field now returned with all objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def filter_queryset(self, request, queryset, view):
|
||||||
|
try:
|
||||||
|
types = None
|
||||||
|
for key, value in request.query_params.items():
|
||||||
|
if key == 'type':
|
||||||
|
if ',' in value:
|
||||||
|
types = value.split(',')
|
||||||
|
else:
|
||||||
|
types = (value,)
|
||||||
|
if types:
|
||||||
|
types_map = {}
|
||||||
|
for ct in ContentType.objects.filter(Q(app_label='main') | Q(app_label='auth', model='user')):
|
||||||
|
ct_model = ct.model_class()
|
||||||
|
if not ct_model:
|
||||||
|
continue
|
||||||
|
ct_type = get_type_for_model(ct_model)
|
||||||
|
types_map[ct_type] = ct.pk
|
||||||
|
model = queryset.model
|
||||||
|
model_type = get_type_for_model(model)
|
||||||
|
if 'polymorphic_ctype' in get_all_field_names(model):
|
||||||
|
types_pks = set([v for k, v in types_map.items() if k in types])
|
||||||
|
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
||||||
|
elif model_type in types:
|
||||||
|
queryset = queryset
|
||||||
|
else:
|
||||||
|
queryset = queryset.none()
|
||||||
|
return queryset
|
||||||
|
except FieldError as e:
|
||||||
|
# Return a 400 for invalid field names.
|
||||||
|
raise ParseError(*e.args)
|
||||||
|
|
||||||
|
|
||||||
|
def get_fields_from_path(model, path):
|
||||||
|
"""
|
||||||
|
Given a Django ORM lookup path (possibly over multiple models)
|
||||||
|
Returns the fields in the line, and also the revised lookup path
|
||||||
|
ex., given
|
||||||
|
model=Organization
|
||||||
|
path='project__timeout'
|
||||||
|
returns tuple of fields traversed as well and a corrected path,
|
||||||
|
for special cases we do substitutions
|
||||||
|
([<IntegerField for timeout>], 'project__timeout')
|
||||||
|
"""
|
||||||
|
# Store of all the fields used to detect repeats
|
||||||
|
field_list = []
|
||||||
|
new_parts = []
|
||||||
|
for name in path.split('__'):
|
||||||
|
if model is None:
|
||||||
|
raise ParseError(_('No related model for field {}.').format(name))
|
||||||
|
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
|
||||||
|
if model._meta.object_name in ('Project', 'InventorySource'):
|
||||||
|
name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
|
||||||
|
name, name
|
||||||
|
)
|
||||||
|
|
||||||
|
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
|
||||||
|
name = 'polymorphic_ctype'
|
||||||
|
new_parts.append('polymorphic_ctype__model')
|
||||||
|
else:
|
||||||
|
new_parts.append(name)
|
||||||
|
|
||||||
|
if name in getattr(model, 'PASSWORD_FIELDS', ()):
|
||||||
|
raise PermissionDenied(_('Filtering on password fields is not allowed.'))
|
||||||
|
elif name == 'pk':
|
||||||
|
field = model._meta.pk
|
||||||
|
else:
|
||||||
|
name_alt = name.replace("_", "")
|
||||||
|
if name_alt in model._meta.fields_map.keys():
|
||||||
|
field = model._meta.fields_map[name_alt]
|
||||||
|
new_parts.pop()
|
||||||
|
new_parts.append(name_alt)
|
||||||
|
else:
|
||||||
|
field = model._meta.get_field(name)
|
||||||
|
if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False):
|
||||||
|
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
||||||
|
elif getattr(field, '__prevent_search__', False):
|
||||||
|
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
||||||
|
if field in field_list:
|
||||||
|
# Field traversed twice, could create infinite JOINs, DoSing Tower
|
||||||
|
raise ParseError(_('Loops not allowed in filters, detected on field {}.').format(field.name))
|
||||||
|
field_list.append(field)
|
||||||
|
model = getattr(field, 'related_model', None)
|
||||||
|
|
||||||
|
return field_list, '__'.join(new_parts)
|
||||||
|
|
||||||
|
|
||||||
|
def get_field_from_path(model, path):
|
||||||
|
"""
|
||||||
|
Given a Django ORM lookup path (possibly over multiple models)
|
||||||
|
Returns the last field in the line, and the revised lookup path
|
||||||
|
ex.
|
||||||
|
(<IntegerField for timeout>, 'project__timeout')
|
||||||
|
"""
|
||||||
|
field_list, new_path = get_fields_from_path(model, path)
|
||||||
|
return (field_list[-1], new_path)
|
||||||
|
|
||||||
|
|
||||||
|
class FieldLookupBackend(BaseFilterBackend):
|
||||||
|
"""
|
||||||
|
Filter using field lookups provided via query string parameters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate', 'limit')
|
||||||
|
|
||||||
|
SUPPORTED_LOOKUPS = (
|
||||||
|
'exact',
|
||||||
|
'iexact',
|
||||||
|
'contains',
|
||||||
|
'icontains',
|
||||||
|
'startswith',
|
||||||
|
'istartswith',
|
||||||
|
'endswith',
|
||||||
|
'iendswith',
|
||||||
|
'regex',
|
||||||
|
'iregex',
|
||||||
|
'gt',
|
||||||
|
'gte',
|
||||||
|
'lt',
|
||||||
|
'lte',
|
||||||
|
'in',
|
||||||
|
'isnull',
|
||||||
|
'search',
|
||||||
|
)
|
||||||
|
|
||||||
|
# A list of fields that we know can be filtered on without the possiblity
|
||||||
|
# of introducing duplicates
|
||||||
|
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||||
|
|
||||||
|
def get_fields_from_lookup(self, model, lookup):
|
||||||
|
|
||||||
|
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
||||||
|
path, suffix = lookup.rsplit('__', 1)
|
||||||
|
else:
|
||||||
|
path = lookup
|
||||||
|
suffix = 'exact'
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
raise ParseError(_('Query string field name not provided.'))
|
||||||
|
|
||||||
|
# FIXME: Could build up a list of models used across relationships, use
|
||||||
|
# those lookups combined with request.user.get_queryset(Model) to make
|
||||||
|
# sure user cannot query using objects he could not view.
|
||||||
|
field_list, new_path = get_fields_from_path(model, path)
|
||||||
|
|
||||||
|
new_lookup = new_path
|
||||||
|
new_lookup = '__'.join([new_path, suffix])
|
||||||
|
return field_list, new_lookup
|
||||||
|
|
||||||
|
def get_field_from_lookup(self, model, lookup):
|
||||||
|
'''Method to match return type of single field, if needed.'''
|
||||||
|
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
||||||
|
return (field_list[-1], new_lookup)
|
||||||
|
|
||||||
|
def to_python_related(self, value):
|
||||||
|
value = force_str(value)
|
||||||
|
if value.lower() in ('none', 'null'):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return int(value)
|
||||||
|
|
||||||
|
def value_to_python_for_field(self, field, value):
|
||||||
|
if isinstance(field, models.BooleanField):
|
||||||
|
return to_python_boolean(value)
|
||||||
|
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
|
||||||
|
try:
|
||||||
|
return self.to_python_related(value)
|
||||||
|
except ValueError:
|
||||||
|
raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
|
||||||
|
else:
|
||||||
|
return field.to_python(value)
|
||||||
|
|
||||||
|
def value_to_python(self, model, lookup, value):
|
||||||
|
try:
|
||||||
|
lookup.encode("ascii")
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
|
||||||
|
|
||||||
|
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
||||||
|
field = field_list[-1]
|
||||||
|
|
||||||
|
needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
|
||||||
|
|
||||||
|
# Type names are stored without underscores internally, but are presented and
|
||||||
|
# and serialized over the API containing underscores so we remove `_`
|
||||||
|
# for polymorphic_ctype__model lookups.
|
||||||
|
if new_lookup.startswith('polymorphic_ctype__model'):
|
||||||
|
value = value.replace('_', '')
|
||||||
|
elif new_lookup.endswith('__isnull'):
|
||||||
|
value = to_python_boolean(value)
|
||||||
|
elif new_lookup.endswith('__in'):
|
||||||
|
items = []
|
||||||
|
if not value:
|
||||||
|
raise ValueError('cannot provide empty value for __in')
|
||||||
|
for item in value.split(','):
|
||||||
|
items.append(self.value_to_python_for_field(field, item))
|
||||||
|
value = items
|
||||||
|
elif new_lookup.endswith('__regex') or new_lookup.endswith('__iregex'):
|
||||||
|
try:
|
||||||
|
re.compile(value)
|
||||||
|
except re.error as e:
|
||||||
|
raise ValueError(e.args[0])
|
||||||
|
elif new_lookup.endswith('__iexact'):
|
||||||
|
if not isinstance(field, (CharField, TextField)):
|
||||||
|
raise ValueError(f'{field.name} is not a text field and cannot be filtered by case-insensitive search')
|
||||||
|
elif new_lookup.endswith('__search'):
|
||||||
|
related_model = getattr(field, 'related_model', None)
|
||||||
|
if not related_model:
|
||||||
|
raise ValueError('%s is not searchable' % new_lookup[:-8])
|
||||||
|
new_lookups = []
|
||||||
|
for rm_field in related_model._meta.fields:
|
||||||
|
if rm_field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description', 'playbook'):
|
||||||
|
new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name))
|
||||||
|
return value, new_lookups, needs_distinct
|
||||||
|
else:
|
||||||
|
if isinstance(field, JSONField):
|
||||||
|
new_lookup = new_lookup.replace(field.name, f'{field.name}_as_txt')
|
||||||
|
value = self.value_to_python_for_field(field, value)
|
||||||
|
return value, new_lookup, needs_distinct
|
||||||
|
|
||||||
|
def filter_queryset(self, request, queryset, view):
|
||||||
|
try:
|
||||||
|
# Apply filters specified via query_params. Each entry in the lists
|
||||||
|
# below is (negate, field, value).
|
||||||
|
and_filters = []
|
||||||
|
or_filters = []
|
||||||
|
chain_filters = []
|
||||||
|
role_filters = []
|
||||||
|
search_filters = {}
|
||||||
|
needs_distinct = False
|
||||||
|
# Can only have two values: 'AND', 'OR'
|
||||||
|
# If 'AND' is used, an item must satisfy all conditions to show up in the results.
|
||||||
|
# If 'OR' is used, an item just needs to satisfy one condition to appear in results.
|
||||||
|
search_filter_relation = 'OR'
|
||||||
|
for key, values in request.query_params.lists():
|
||||||
|
if key in self.RESERVED_NAMES:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# HACK: make `created` available via API for the Django User ORM model
|
||||||
|
# so it keep compatiblity with other objects which exposes the `created` attr.
|
||||||
|
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
|
||||||
|
key = key.replace('created', 'date_joined')
|
||||||
|
|
||||||
|
# HACK: Make job event filtering by host name mostly work even
|
||||||
|
# when not capturing job event hosts M2M.
|
||||||
|
if queryset.model._meta.object_name == 'JobEvent' and key.startswith('hosts__name'):
|
||||||
|
key = key.replace('hosts__name', 'or__host__name')
|
||||||
|
or_filters.append((False, 'host__name__isnull', True))
|
||||||
|
|
||||||
|
# Custom __int filter suffix (internal use only).
|
||||||
|
q_int = False
|
||||||
|
if key.endswith('__int'):
|
||||||
|
key = key[:-5]
|
||||||
|
q_int = True
|
||||||
|
|
||||||
|
# RBAC filtering
|
||||||
|
if key == 'role_level':
|
||||||
|
role_filters.append(values[0])
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Search across related objects.
|
||||||
|
if key.endswith('__search'):
|
||||||
|
if values and ',' in values[0]:
|
||||||
|
search_filter_relation = 'AND'
|
||||||
|
values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values])
|
||||||
|
for value in values:
|
||||||
|
search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_str(value))
|
||||||
|
assert isinstance(new_keys, list)
|
||||||
|
search_filters[search_value] = new_keys
|
||||||
|
# by definition, search *only* joins across relations,
|
||||||
|
# so it _always_ needs a .distinct()
|
||||||
|
needs_distinct = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Custom chain__ and or__ filters, mutually exclusive (both can
|
||||||
|
# precede not__).
|
||||||
|
q_chain = False
|
||||||
|
q_or = False
|
||||||
|
if key.startswith('chain__'):
|
||||||
|
key = key[7:]
|
||||||
|
q_chain = True
|
||||||
|
elif key.startswith('or__'):
|
||||||
|
key = key[4:]
|
||||||
|
q_or = True
|
||||||
|
|
||||||
|
# Custom not__ filter prefix.
|
||||||
|
q_not = False
|
||||||
|
if key.startswith('not__'):
|
||||||
|
key = key[5:]
|
||||||
|
q_not = True
|
||||||
|
|
||||||
|
# Convert value(s) to python and add to the appropriate list.
|
||||||
|
for value in values:
|
||||||
|
if q_int:
|
||||||
|
value = int(value)
|
||||||
|
value, new_key, distinct = self.value_to_python(queryset.model, key, value)
|
||||||
|
if distinct:
|
||||||
|
needs_distinct = True
|
||||||
|
if '_as_txt' in new_key:
|
||||||
|
fname = next(item for item in new_key.split('__') if item.endswith('_as_txt'))
|
||||||
|
queryset = queryset.annotate(**{fname: Cast(fname[:-7], output_field=TextField())})
|
||||||
|
if q_chain:
|
||||||
|
chain_filters.append((q_not, new_key, value))
|
||||||
|
elif q_or:
|
||||||
|
or_filters.append((q_not, new_key, value))
|
||||||
|
else:
|
||||||
|
and_filters.append((q_not, new_key, value))
|
||||||
|
|
||||||
|
# Now build Q objects for database query filter.
|
||||||
|
if and_filters or or_filters or chain_filters or role_filters or search_filters:
|
||||||
|
args = []
|
||||||
|
for n, k, v in and_filters:
|
||||||
|
if n:
|
||||||
|
args.append(~Q(**{k: v}))
|
||||||
|
else:
|
||||||
|
args.append(Q(**{k: v}))
|
||||||
|
for role_name in role_filters:
|
||||||
|
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
||||||
|
raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
|
||||||
|
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
|
||||||
|
if or_filters:
|
||||||
|
q = Q()
|
||||||
|
for n, k, v in or_filters:
|
||||||
|
if n:
|
||||||
|
q |= ~Q(**{k: v})
|
||||||
|
else:
|
||||||
|
q |= Q(**{k: v})
|
||||||
|
args.append(q)
|
||||||
|
if search_filters and search_filter_relation == 'OR':
|
||||||
|
q = Q()
|
||||||
|
for term, constrains in search_filters.items():
|
||||||
|
for constrain in constrains:
|
||||||
|
q |= Q(**{constrain: term})
|
||||||
|
args.append(q)
|
||||||
|
elif search_filters and search_filter_relation == 'AND':
|
||||||
|
for term, constrains in search_filters.items():
|
||||||
|
q_chain = Q()
|
||||||
|
for constrain in constrains:
|
||||||
|
q_chain |= Q(**{constrain: term})
|
||||||
|
queryset = queryset.filter(q_chain)
|
||||||
|
for n, k, v in chain_filters:
|
||||||
|
if n:
|
||||||
|
q = ~Q(**{k: v})
|
||||||
|
else:
|
||||||
|
q = Q(**{k: v})
|
||||||
|
queryset = queryset.filter(q)
|
||||||
|
queryset = queryset.filter(*args)
|
||||||
|
if needs_distinct:
|
||||||
|
queryset = queryset.distinct()
|
||||||
|
return queryset
|
||||||
|
except (FieldError, FieldDoesNotExist, ValueError, TypeError) as e:
|
||||||
|
raise ParseError(e.args[0])
|
||||||
|
except ValidationError as e:
|
||||||
|
raise ParseError(json.dumps(e.messages, ensure_ascii=False))
|
||||||
|
|
||||||
|
|
||||||
|
class OrderByBackend(BaseFilterBackend):
|
||||||
|
"""
|
||||||
|
Filter to apply ordering based on query string parameters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def filter_queryset(self, request, queryset, view):
|
||||||
|
try:
|
||||||
|
order_by = None
|
||||||
|
for key, value in request.query_params.items():
|
||||||
|
if key in ('order', 'order_by'):
|
||||||
|
order_by = value
|
||||||
|
if ',' in value:
|
||||||
|
order_by = value.split(',')
|
||||||
|
else:
|
||||||
|
order_by = (value,)
|
||||||
|
default_order_by = self.get_default_ordering(view)
|
||||||
|
# glue the order by and default order by together so that the default is the backup option
|
||||||
|
order_by = list(order_by or []) + list(default_order_by or [])
|
||||||
|
if order_by:
|
||||||
|
order_by = self._validate_ordering_fields(queryset.model, order_by)
|
||||||
|
# Special handling of the type field for ordering. In this
|
||||||
|
# case, we're not sorting exactly on the type field, but
|
||||||
|
# given the limited number of views with multiple types,
|
||||||
|
# sorting on polymorphic_ctype.model is effectively the same.
|
||||||
|
new_order_by = []
|
||||||
|
if 'polymorphic_ctype' in get_all_field_names(queryset.model):
|
||||||
|
for field in order_by:
|
||||||
|
if field == 'type':
|
||||||
|
new_order_by.append('polymorphic_ctype__model')
|
||||||
|
elif field == '-type':
|
||||||
|
new_order_by.append('-polymorphic_ctype__model')
|
||||||
|
else:
|
||||||
|
new_order_by.append(field)
|
||||||
|
else:
|
||||||
|
for field in order_by:
|
||||||
|
if field not in ('type', '-type'):
|
||||||
|
new_order_by.append(field)
|
||||||
|
queryset = queryset.order_by(*new_order_by)
|
||||||
|
return queryset
|
||||||
|
except FieldError as e:
|
||||||
|
# Return a 400 for invalid field names.
|
||||||
|
raise ParseError(*e.args)
|
||||||
|
|
||||||
|
def get_default_ordering(self, view):
|
||||||
|
ordering = getattr(view, 'ordering', None)
|
||||||
|
if isinstance(ordering, str):
|
||||||
|
return (ordering,)
|
||||||
|
return ordering
|
||||||
|
|
||||||
|
def _validate_ordering_fields(self, model, order_by):
|
||||||
|
for field_name in order_by:
|
||||||
|
# strip off the negation prefix `-` if it exists
|
||||||
|
prefix = ''
|
||||||
|
path = field_name
|
||||||
|
if field_name[0] == '-':
|
||||||
|
prefix = field_name[0]
|
||||||
|
path = field_name[1:]
|
||||||
|
try:
|
||||||
|
field, new_path = get_field_from_path(model, path)
|
||||||
|
new_path = '{}{}'.format(prefix, new_path)
|
||||||
|
except (FieldError, FieldDoesNotExist) as e:
|
||||||
|
raise ParseError(e.args[0])
|
||||||
|
yield new_path
|
||||||
@@ -5,11 +5,13 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import views as auth_views
|
from django.contrib.auth import views as auth_views
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.core.cache import cache
|
||||||
from django.core.exceptions import FieldDoesNotExist
|
from django.core.exceptions import FieldDoesNotExist
|
||||||
from django.db import connection, transaction
|
from django.db import connection, transaction
|
||||||
from django.db.models.fields.related import OneToOneRel
|
from django.db.models.fields.related import OneToOneRel
|
||||||
@@ -26,25 +28,17 @@ from rest_framework import generics
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework import views
|
from rest_framework import views
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.renderers import StaticHTMLRenderer
|
from rest_framework.renderers import StaticHTMLRenderer
|
||||||
from rest_framework.negotiation import DefaultContentNegotiation
|
from rest_framework.negotiation import DefaultContentNegotiation
|
||||||
|
|
||||||
# django-ansible-base
|
|
||||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
|
||||||
from ansible_base.lib.utils.models import get_all_field_names
|
|
||||||
from ansible_base.lib.utils.requests import get_remote_host
|
|
||||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
|
||||||
from ansible_base.rbac.permission_registry import permission_registry
|
|
||||||
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
|
from awx.api.filters import FieldLookupBackend
|
||||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||||
from awx.main.models.rbac import give_creator_permissions
|
from awx.main.access import access_registry
|
||||||
from awx.main.access import optimize_queryset
|
|
||||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||||
|
from awx.main.utils.db import get_all_field_names
|
||||||
from awx.main.utils.licensing import server_product_name
|
from awx.main.utils.licensing import server_product_name
|
||||||
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
|
||||||
from awx.main.views import ApiErrorView
|
from awx.main.views import ApiErrorView
|
||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
@@ -96,31 +90,25 @@ class LoggedLoginView(auth_views.LoginView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||||
ret.set_cookie(
|
ret.set_cookie('userLoggedIn', 'true')
|
||||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
|
||||||
)
|
|
||||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
else:
|
else:
|
||||||
if 'username' in self.request.POST:
|
if 'username' in self.request.POST:
|
||||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||||
ret.status_code = 401
|
ret.status_code = 401
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class LoggedLogoutView(auth_views.LogoutView):
|
class LoggedLogoutView(auth_views.LogoutView):
|
||||||
|
|
||||||
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
original_user = getattr(request, 'user', None)
|
original_user = getattr(request, 'user', None)
|
||||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||||
current_user = getattr(request, 'user', None)
|
current_user = getattr(request, 'user', None)
|
||||||
ret.set_cookie('userLoggedIn', 'false', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
ret.set_cookie('userLoggedIn', 'false')
|
||||||
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
|
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
|
||||||
logger.info("User {} logged out.".format(original_user.username))
|
logger.info("User {} logged out.".format(original_user.username))
|
||||||
return ret
|
return ret
|
||||||
@@ -147,6 +135,7 @@ def get_default_schema():
|
|||||||
|
|
||||||
|
|
||||||
class APIView(views.APIView):
|
class APIView(views.APIView):
|
||||||
|
|
||||||
schema = get_default_schema()
|
schema = get_default_schema()
|
||||||
versioning_class = URLPathVersioning
|
versioning_class = URLPathVersioning
|
||||||
|
|
||||||
@@ -155,23 +144,22 @@ class APIView(views.APIView):
|
|||||||
Store the Django REST Framework Request object as an attribute on the
|
Store the Django REST Framework Request object as an attribute on the
|
||||||
normal Django request, store time the request started.
|
normal Django request, store time the request started.
|
||||||
"""
|
"""
|
||||||
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
|
||||||
|
|
||||||
self.time_started = time.time()
|
self.time_started = time.time()
|
||||||
if getattr(settings, 'SQL_DEBUG', False):
|
if getattr(settings, 'SQL_DEBUG', False):
|
||||||
self.queries_before = len(connection.queries)
|
self.queries_before = len(connection.queries)
|
||||||
|
|
||||||
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
|
||||||
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
|
||||||
remote_headers = settings.REMOTE_HOST_HEADERS
|
|
||||||
else:
|
|
||||||
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
|
||||||
|
|
||||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||||
# they respect the allowed proxy list
|
# they respect the allowed proxy list
|
||||||
if settings.PROXY_IP_ALLOWED_LIST:
|
if all(
|
||||||
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
[
|
||||||
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
settings.PROXY_IP_ALLOWED_LIST,
|
||||||
|
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||||
|
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||||
|
]
|
||||||
|
):
|
||||||
|
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||||
|
if custom_header.startswith('HTTP_'):
|
||||||
|
request.environ.pop(custom_header, None)
|
||||||
|
|
||||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||||
request.drf_request = drf_request
|
request.drf_request = drf_request
|
||||||
@@ -184,7 +172,7 @@ class APIView(views.APIView):
|
|||||||
self.__init_request_error__ = exc
|
self.__init_request_error__ = exc
|
||||||
except UnsupportedMediaType as exc:
|
except UnsupportedMediaType as exc:
|
||||||
exc.detail = _(
|
exc.detail = _(
|
||||||
'You did not use correct Content-Type in your HTTP request. If you are using our REST API, the Content-Type must be application/json'
|
'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
|
||||||
)
|
)
|
||||||
self.__init_request_error__ = exc
|
self.__init_request_error__ = exc
|
||||||
return drf_request
|
return drf_request
|
||||||
@@ -216,21 +204,17 @@ class APIView(views.APIView):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
|
||||||
msg_data = {
|
msg_data = {
|
||||||
'status_code': response.status_code,
|
'status_code': response.status_code,
|
||||||
'user_name': request.user,
|
'user_name': request.user,
|
||||||
'url_path': request.path,
|
'url_path': request.path,
|
||||||
'remote_addr': ip,
|
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||||
}
|
}
|
||||||
|
|
||||||
if type(response.data) is dict:
|
if type(response.data) is dict:
|
||||||
msg_data['error'] = response.data.get('error', response.status_text)
|
msg_data['error'] = response.data.get('error', response.status_text)
|
||||||
elif type(response.data) is list:
|
elif type(response.data) is list:
|
||||||
if len(response.data) > 0 and isinstance(response.data[0], str):
|
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||||
msg_data['error'] = str(response.data[0])
|
|
||||||
else:
|
|
||||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
|
||||||
else:
|
else:
|
||||||
msg_data['error'] = response.status_text
|
msg_data['error'] = response.status_text
|
||||||
|
|
||||||
@@ -251,8 +235,7 @@ class APIView(views.APIView):
|
|||||||
|
|
||||||
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
||||||
time_started = getattr(self, 'time_started', None)
|
time_started = getattr(self, 'time_started', None)
|
||||||
if request.user.is_authenticated:
|
response['X-API-Product-Version'] = get_awx_version()
|
||||||
response['X-API-Product-Version'] = get_awx_version()
|
|
||||||
response['X-API-Product-Name'] = server_product_name()
|
response['X-API-Product-Name'] = server_product_name()
|
||||||
|
|
||||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||||
@@ -382,7 +365,12 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
|||||||
return self.queryset._clone()
|
return self.queryset._clone()
|
||||||
elif self.model is not None:
|
elif self.model is not None:
|
||||||
qs = self.model._default_manager
|
qs = self.model._default_manager
|
||||||
qs = optimize_queryset(qs)
|
if self.model in access_registry:
|
||||||
|
access_class = access_registry[self.model]
|
||||||
|
if access_class.select_related:
|
||||||
|
qs = qs.select_related(*access_class.select_related)
|
||||||
|
if access_class.prefetch_related:
|
||||||
|
qs = qs.prefetch_related(*access_class.prefetch_related)
|
||||||
return qs
|
return qs
|
||||||
else:
|
else:
|
||||||
return super(GenericAPIView, self).get_queryset()
|
return super(GenericAPIView, self).get_queryset()
|
||||||
@@ -490,11 +478,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
|||||||
|
|
||||||
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||||
# Base class for a list view that allows creating new objects.
|
# Base class for a list view that allows creating new objects.
|
||||||
def perform_create(self, serializer):
|
pass
|
||||||
super().perform_create(serializer)
|
|
||||||
if serializer.Meta.model in permission_registry.all_registered_models:
|
|
||||||
if self.request and self.request.user:
|
|
||||||
give_creator_permissions(self.request.user, serializer.instance)
|
|
||||||
|
|
||||||
|
|
||||||
class ParentMixin(object):
|
class ParentMixin(object):
|
||||||
@@ -529,9 +513,6 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
|||||||
# And optionally (user must have given access permission on parent object
|
# And optionally (user must have given access permission on parent object
|
||||||
# to view sublist):
|
# to view sublist):
|
||||||
# parent_access = 'read'
|
# parent_access = 'read'
|
||||||
# filter_read_permission sets whether or not to override the default intersection behavior
|
|
||||||
# implemented here
|
|
||||||
filter_read_permission = True
|
|
||||||
|
|
||||||
def get_description_context(self):
|
def get_description_context(self):
|
||||||
d = super(SubListAPIView, self).get_description_context()
|
d = super(SubListAPIView, self).get_description_context()
|
||||||
@@ -546,16 +527,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
|||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
parent = self.get_parent_object()
|
parent = self.get_parent_object()
|
||||||
self.check_parent_access(parent)
|
self.check_parent_access(parent)
|
||||||
if not self.filter_read_permission:
|
qs = self.request.user.get_queryset(self.model).distinct()
|
||||||
return optimize_queryset(self.get_sublist_queryset(parent))
|
sublist_qs = self.get_sublist_queryset(parent)
|
||||||
qs = self.request.user.get_queryset(self.model)
|
return qs & sublist_qs
|
||||||
if hasattr(self, 'parent_key'):
|
|
||||||
# This is vastly preferable for ReverseForeignKey relationships
|
|
||||||
return qs.filter(**{self.parent_key: parent})
|
|
||||||
return qs.distinct() & self.get_sublist_queryset(parent).distinct()
|
|
||||||
|
|
||||||
def get_sublist_queryset(self, parent):
|
def get_sublist_queryset(self, parent):
|
||||||
return getattrd(parent, self.relationship)
|
return getattrd(parent, self.relationship).distinct()
|
||||||
|
|
||||||
|
|
||||||
class DestroyAPIView(generics.DestroyAPIView):
|
class DestroyAPIView(generics.DestroyAPIView):
|
||||||
@@ -604,6 +581,15 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
|||||||
d.update({'parent_key': getattr(self, 'parent_key', None)})
|
d.update({'parent_key': getattr(self, 'parent_key', None)})
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
if hasattr(self, 'parent_key'):
|
||||||
|
# Prefer this filtering because ForeignKey allows us more assumptions
|
||||||
|
parent = self.get_parent_object()
|
||||||
|
self.check_parent_access(parent)
|
||||||
|
qs = self.request.user.get_queryset(self.model)
|
||||||
|
return qs.filter(**{self.parent_key: parent})
|
||||||
|
return super(SubListCreateAPIView, self).get_queryset()
|
||||||
|
|
||||||
def create(self, request, *args, **kwargs):
|
def create(self, request, *args, **kwargs):
|
||||||
# If the object ID was not specified, it probably doesn't exist in the
|
# If the object ID was not specified, it probably doesn't exist in the
|
||||||
# DB yet. We want to see if we can create it. The URL may choose to
|
# DB yet. We want to see if we can create it. The URL may choose to
|
||||||
@@ -689,7 +675,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
|||||||
location = None
|
location = None
|
||||||
created = True
|
created = True
|
||||||
|
|
||||||
# Retrieve the sub object (whether created or by ID).
|
# Retrive the sub object (whether created or by ID).
|
||||||
sub = get_object_or_400(self.model, pk=sub_id)
|
sub = get_object_or_400(self.model, pk=sub_id)
|
||||||
|
|
||||||
# Verify we have permission to attach.
|
# Verify we have permission to attach.
|
||||||
@@ -814,7 +800,7 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||||
deprecated = True
|
|
||||||
serializer_class = ResourceAccessListElementSerializer
|
serializer_class = ResourceAccessListElementSerializer
|
||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
@@ -822,15 +808,6 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
|||||||
obj = self.get_parent_object()
|
obj = self.get_parent_object()
|
||||||
|
|
||||||
content_type = ContentType.objects.get_for_model(obj)
|
content_type = ContentType.objects.get_for_model(obj)
|
||||||
|
|
||||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
|
||||||
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
|
||||||
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
|
||||||
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
|
|
||||||
if auditor_role:
|
|
||||||
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
|
||||||
return qs.distinct()
|
|
||||||
|
|
||||||
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
||||||
|
|
||||||
ancestors = set()
|
ancestors = set()
|
||||||
@@ -846,8 +823,9 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
class CopyAPIView(GenericAPIView):
|
class CopyAPIView(GenericAPIView):
|
||||||
|
|
||||||
serializer_class = CopySerializer
|
serializer_class = CopySerializer
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (AllowAny,)
|
||||||
copy_return_serializer_class = None
|
copy_return_serializer_class = None
|
||||||
new_in_330 = True
|
new_in_330 = True
|
||||||
new_in_api_v2 = True
|
new_in_api_v2 = True
|
||||||
@@ -990,13 +968,18 @@ class CopyAPIView(GenericAPIView):
|
|||||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
||||||
)
|
)
|
||||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||||
give_creator_permissions(request.user, new_obj)
|
new_obj.admin_role.members.add(request.user)
|
||||||
if sub_objs:
|
if sub_objs:
|
||||||
|
# store the copied object dict into cache, because it's
|
||||||
|
# often too large for postgres' notification bus
|
||||||
|
# (which has a default maximum message size of 8k)
|
||||||
|
key = 'deep-copy-{}'.format(str(uuid.uuid4()))
|
||||||
|
cache.set(key, sub_objs, timeout=3600)
|
||||||
permission_check_func = None
|
permission_check_func = None
|
||||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||||
permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
|
permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
|
||||||
trigger_delayed_deep_copy(
|
trigger_delayed_deep_copy(
|
||||||
self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, permission_check_func=permission_check_func
|
self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, key, permission_check_func=permission_check_func
|
||||||
)
|
)
|
||||||
serializer = self._get_copy_return_serializer(new_obj)
|
serializer = self._get_copy_return_serializer(new_obj)
|
||||||
headers = {'Location': new_obj.get_absolute_url(request=request)}
|
headers = {'Location': new_obj.get_absolute_url(request=request)}
|
||||||
|
|||||||
@@ -36,13 +36,11 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
field_info = OrderedDict()
|
field_info = OrderedDict()
|
||||||
field_info['type'] = self.label_lookup[field]
|
field_info['type'] = self.label_lookup[field]
|
||||||
field_info['required'] = getattr(field, 'required', False)
|
field_info['required'] = getattr(field, 'required', False)
|
||||||
field_info['hidden'] = getattr(field, 'hidden', False)
|
|
||||||
|
|
||||||
text_attrs = [
|
text_attrs = [
|
||||||
'read_only',
|
'read_only',
|
||||||
'label',
|
'label',
|
||||||
'help_text',
|
'help_text',
|
||||||
'warning_text',
|
|
||||||
'min_length',
|
'min_length',
|
||||||
'max_length',
|
'max_length',
|
||||||
'min_value',
|
'min_value',
|
||||||
@@ -73,7 +71,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
'url': _('URL for this {}.'),
|
'url': _('URL for this {}.'),
|
||||||
'related': _('Data structure with URLs of related resources.'),
|
'related': _('Data structure with URLs of related resources.'),
|
||||||
'summary_fields': _(
|
'summary_fields': _(
|
||||||
'Data structure with name/description for related resources. The output for some objects may be limited for performance reasons.'
|
'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
|
||||||
),
|
),
|
||||||
'created': _('Timestamp when this {} was created.'),
|
'created': _('Timestamp when this {} was created.'),
|
||||||
'modified': _('Timestamp when this {} was last modified.'),
|
'modified': _('Timestamp when this {} was last modified.'),
|
||||||
@@ -130,7 +128,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
# Special handling of notification configuration where the required properties
|
# Special handling of notification configuration where the required properties
|
||||||
# are conditional on the type selected.
|
# are conditional on the type selected.
|
||||||
if field.field_name == 'notification_configuration':
|
if field.field_name == 'notification_configuration':
|
||||||
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
||||||
field_info[notification_type_name] = notification_type_class.init_parameters
|
field_info[notification_type_name] = notification_type_class.init_parameters
|
||||||
|
|
||||||
# Special handling of notification messages where the required properties
|
# Special handling of notification messages where the required properties
|
||||||
@@ -140,7 +138,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
except (AttributeError, KeyError):
|
except (AttributeError, KeyError):
|
||||||
view_model = None
|
view_model = None
|
||||||
if view_model == NotificationTemplate and field.field_name == 'messages':
|
if view_model == NotificationTemplate and field.field_name == 'messages':
|
||||||
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES:
|
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
||||||
field_info[notification_type_name] = notification_type_class.default_messages
|
field_info[notification_type_name] = notification_type_class.default_messages
|
||||||
|
|
||||||
# Update type of fields returned...
|
# Update type of fields returned...
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ class DisabledPaginator(DjangoPaginator):
|
|||||||
|
|
||||||
|
|
||||||
class Pagination(pagination.PageNumberPagination):
|
class Pagination(pagination.PageNumberPagination):
|
||||||
|
|
||||||
page_size_query_param = 'page_size'
|
page_size_query_param = 'page_size'
|
||||||
max_page_size = settings.MAX_PAGE_SIZE
|
max_page_size = settings.MAX_PAGE_SIZE
|
||||||
count_disabled = False
|
count_disabled = False
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ __all__ = [
|
|||||||
'UserPermission',
|
'UserPermission',
|
||||||
'IsSystemAdminOrAuditor',
|
'IsSystemAdminOrAuditor',
|
||||||
'WorkflowApprovalPermission',
|
'WorkflowApprovalPermission',
|
||||||
'AnalyticsPermission',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -251,16 +250,3 @@ class IsSystemAdminOrAuditor(permissions.BasePermission):
|
|||||||
class WebhookKeyPermission(permissions.BasePermission):
|
class WebhookKeyPermission(permissions.BasePermission):
|
||||||
def has_object_permission(self, request, view, obj):
|
def has_object_permission(self, request, view, obj):
|
||||||
return request.user.can_access(view.model, 'admin', obj, request.data)
|
return request.user.can_access(view.model, 'admin', obj, request.data)
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsPermission(permissions.BasePermission):
|
|
||||||
"""
|
|
||||||
Allows GET/POST/OPTIONS to system admins and system auditors.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def has_permission(self, request, view):
|
|
||||||
if not (request.user and request.user.is_authenticated):
|
|
||||||
return False
|
|
||||||
if request.method in ["GET", "POST", "OPTIONS"]:
|
|
||||||
return request.user.is_superuser or request.user.is_system_auditor
|
|
||||||
return request.user.is_superuser
|
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ class SurrogateEncoder(encoders.JSONEncoder):
|
|||||||
|
|
||||||
|
|
||||||
class DefaultJSONRenderer(renderers.JSONRenderer):
|
class DefaultJSONRenderer(renderers.JSONRenderer):
|
||||||
|
|
||||||
encoder_class = SurrogateEncoder
|
encoder_class = SurrogateEncoder
|
||||||
|
|
||||||
|
|
||||||
@@ -60,7 +61,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
|||||||
delattr(renderer_context['view'], '_request')
|
delattr(renderer_context['view'], '_request')
|
||||||
|
|
||||||
def get_raw_data_form(self, data, view, method, request):
|
def get_raw_data_form(self, data, view, method, request):
|
||||||
# Set a flag on the view to indicate to the view/serializer that we're
|
# Set a flag on the view to indiciate to the view/serializer that we're
|
||||||
# creating a raw data form for the browsable API. Store the original
|
# creating a raw data form for the browsable API. Store the original
|
||||||
# request method to determine how to populate the raw data form.
|
# request method to determine how to populate the raw data form.
|
||||||
if request.method in {'OPTIONS', 'DELETE'}:
|
if request.method in {'OPTIONS', 'DELETE'}:
|
||||||
@@ -94,6 +95,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
|||||||
|
|
||||||
|
|
||||||
class PlainTextRenderer(renderers.BaseRenderer):
|
class PlainTextRenderer(renderers.BaseRenderer):
|
||||||
|
|
||||||
media_type = 'text/plain'
|
media_type = 'text/plain'
|
||||||
format = 'txt'
|
format = 'txt'
|
||||||
|
|
||||||
@@ -104,15 +106,18 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
|||||||
|
|
||||||
|
|
||||||
class DownloadTextRenderer(PlainTextRenderer):
|
class DownloadTextRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
format = "txt_download"
|
format = "txt_download"
|
||||||
|
|
||||||
|
|
||||||
class AnsiTextRenderer(PlainTextRenderer):
|
class AnsiTextRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
media_type = 'text/plain'
|
media_type = 'text/plain'
|
||||||
format = 'ansi'
|
format = 'ansi'
|
||||||
|
|
||||||
|
|
||||||
class AnsiDownloadRenderer(PlainTextRenderer):
|
class AnsiDownloadRenderer(PlainTextRenderer):
|
||||||
|
|
||||||
format = "ansi_download"
|
format = "ansi_download"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,16 @@
|
|||||||
|
import json
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from rest_framework.permissions import AllowAny
|
from coreapi.document import Object, Link
|
||||||
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
|
|
||||||
|
|
||||||
from drf_yasg.views import get_schema_view
|
from rest_framework import exceptions
|
||||||
from drf_yasg import openapi
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework.renderers import CoreJSONRenderer
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from rest_framework_swagger import renderers
|
||||||
|
|
||||||
|
|
||||||
class SuperUserSchemaGenerator(SchemaGenerator):
|
class SuperUserSchemaGenerator(SchemaGenerator):
|
||||||
@@ -49,15 +55,43 @@ class AutoSchema(DRFAuthSchema):
|
|||||||
return description
|
return description
|
||||||
|
|
||||||
|
|
||||||
schema_view = get_schema_view(
|
class SwaggerSchemaView(APIView):
|
||||||
openapi.Info(
|
_ignore_model_permissions = True
|
||||||
title="Snippets API",
|
exclude_from_schema = True
|
||||||
default_version='v1',
|
permission_classes = [AllowAny]
|
||||||
description="Test description",
|
renderer_classes = [CoreJSONRenderer, renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer]
|
||||||
terms_of_service="https://www.google.com/policies/terms/",
|
|
||||||
contact=openapi.Contact(email="contact@snippets.local"),
|
def get(self, request):
|
||||||
license=openapi.License(name="BSD License"),
|
generator = SuperUserSchemaGenerator(title='Ansible Automation Platform controller API', patterns=None, urlconf=None)
|
||||||
),
|
schema = generator.get_schema(request=request)
|
||||||
public=True,
|
# python core-api doesn't support the deprecation yet, so track it
|
||||||
permission_classes=[AllowAny],
|
# ourselves and return it in a response header
|
||||||
)
|
_deprecated = []
|
||||||
|
|
||||||
|
# By default, DRF OpenAPI serialization places all endpoints in
|
||||||
|
# a single node based on their root path (/api). Instead, we want to
|
||||||
|
# group them by topic/tag so that they're categorized in the rendered
|
||||||
|
# output
|
||||||
|
document = schema._data.pop('api')
|
||||||
|
for path, node in document.items():
|
||||||
|
if isinstance(node, Object):
|
||||||
|
for action in node.values():
|
||||||
|
topic = getattr(action, 'topic', None)
|
||||||
|
if topic:
|
||||||
|
schema._data.setdefault(topic, Object())
|
||||||
|
schema._data[topic]._data[path] = node
|
||||||
|
|
||||||
|
if isinstance(action, Object):
|
||||||
|
for link in action.links.values():
|
||||||
|
if link.deprecated:
|
||||||
|
_deprecated.append(link.url)
|
||||||
|
elif isinstance(node, Link):
|
||||||
|
topic = getattr(node, 'topic', None)
|
||||||
|
if topic:
|
||||||
|
schema._data.setdefault(topic, Object())
|
||||||
|
schema._data[topic]._data[path] = node
|
||||||
|
|
||||||
|
if not schema:
|
||||||
|
raise exceptions.ValidationError('The schema generator did not return a schema Document')
|
||||||
|
|
||||||
|
return Response(schema, headers={'X-Deprecated-Paths': json.dumps(_deprecated)})
|
||||||
|
|||||||
@@ -7,12 +7,10 @@ the following fields (some fields may not be visible to all users):
|
|||||||
* `project_base_dir`: Path on the server where projects and playbooks are \
|
* `project_base_dir`: Path on the server where projects and playbooks are \
|
||||||
stored.
|
stored.
|
||||||
* `project_local_paths`: List of directories beneath `project_base_dir` to
|
* `project_local_paths`: List of directories beneath `project_base_dir` to
|
||||||
use when creating/editing a manual project.
|
use when creating/editing a project.
|
||||||
* `time_zone`: The configured time zone for the server.
|
* `time_zone`: The configured time zone for the server.
|
||||||
* `license_info`: Information about the current license.
|
* `license_info`: Information about the current license.
|
||||||
* `version`: Version of Ansible Tower package installed.
|
* `version`: Version of Ansible Tower package installed.
|
||||||
* `custom_virtualenvs`: Deprecated venv locations from before migration to
|
|
||||||
execution environments. Export tooling is in `awx-manage` commands.
|
|
||||||
* `eula`: The current End-User License Agreement
|
* `eula`: The current End-User License Agreement
|
||||||
{% endifmeth %}
|
{% endifmeth %}
|
||||||
|
|
||||||
4
awx/api/templates/api/api_v1_root_view.md
Normal file
4
awx/api/templates/api/api_v1_root_view.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
Version 1 of the Ansible Tower REST API.
|
||||||
|
|
||||||
|
Make a GET request to this resource to obtain a list of all child resources
|
||||||
|
available via the API.
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# Bulk Host Create
|
|
||||||
|
|
||||||
This endpoint allows the client to create multiple hosts and associate them with an inventory. They may do this by providing the inventory ID and a list of json that would normally be provided to create hosts.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
{
|
|
||||||
"inventory": 1,
|
|
||||||
"hosts": [
|
|
||||||
{"name": "example1.com", "variables": "ansible_connection: local"},
|
|
||||||
{"name": "example2.com"}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
Return data:
|
|
||||||
|
|
||||||
{
|
|
||||||
"url": "/api/v2/inventories/3/hosts/",
|
|
||||||
"hosts": [
|
|
||||||
{
|
|
||||||
"name": "example1.com",
|
|
||||||
"enabled": true,
|
|
||||||
"instance_id": "",
|
|
||||||
"description": "",
|
|
||||||
"variables": "ansible_connection: local",
|
|
||||||
"id": 1255,
|
|
||||||
"url": "/api/v2/hosts/1255/",
|
|
||||||
"inventory": "/api/v2/inventories/3/"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "example2.com",
|
|
||||||
"enabled": true,
|
|
||||||
"instance_id": "",
|
|
||||||
"description": "",
|
|
||||||
"variables": "",
|
|
||||||
"id": 1256,
|
|
||||||
"url": "/api/v2/hosts/1256/",
|
|
||||||
"inventory": "/api/v2/inventories/3/"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
# Bulk Host Delete
|
|
||||||
|
|
||||||
This endpoint allows the client to delete multiple hosts from inventories.
|
|
||||||
They may do this by providing a list of hosts ID's to be deleted.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
{
|
|
||||||
"hosts": [1, 2, 3, 4, 5]
|
|
||||||
}
|
|
||||||
|
|
||||||
Return data:
|
|
||||||
|
|
||||||
{
|
|
||||||
"hosts": {
|
|
||||||
"1": "The host a1 was deleted",
|
|
||||||
"2": "The host a2 was deleted",
|
|
||||||
"3": "The host a3 was deleted",
|
|
||||||
"4": "The host a4 was deleted",
|
|
||||||
"5": "The host a5 was deleted",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
# Bulk Job Launch
|
|
||||||
|
|
||||||
This endpoint allows the client to launch multiple UnifiedJobTemplates at a time, along side any launch time parameters that they would normally set at launch time.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
{
|
|
||||||
"name": "my bulk job",
|
|
||||||
"jobs": [
|
|
||||||
{"unified_job_template": 7, "inventory": 2},
|
|
||||||
{"unified_job_template": 7, "credentials": [3]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# Bulk Actions
|
|
||||||
|
|
||||||
This endpoint lists available bulk action APIs.
|
|
||||||
@@ -3,7 +3,7 @@ Make a GET request to this resource to retrieve aggregate statistics about inven
|
|||||||
Including fetching the number of total hosts tracked by Tower over an amount of time and the current success or
|
Including fetching the number of total hosts tracked by Tower over an amount of time and the current success or
|
||||||
failed status of hosts which have run jobs within an Inventory.
|
failed status of hosts which have run jobs within an Inventory.
|
||||||
|
|
||||||
## Parameters and Filtering
|
## Parmeters and Filtering
|
||||||
|
|
||||||
The `period` of the data can be adjusted with:
|
The `period` of the data can be adjusted with:
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ Data about the number of hosts will be returned in the following format:
|
|||||||
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
||||||
the number of hosts that exist at a given moment
|
the number of hosts that exist at a given moment
|
||||||
|
|
||||||
Data about failed and successful hosts by inventory will be given as:
|
Data about failed and successfull hosts by inventory will be given as:
|
||||||
|
|
||||||
{
|
{
|
||||||
"sources": [
|
"sources": [
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
||||||
|
|
||||||
## Parameters and Filtering
|
## Parmeters and Filtering
|
||||||
|
|
||||||
The `period` of the data can be adjusted with:
|
The `period` of the data can be adjusted with:
|
||||||
|
|
||||||
|
|||||||
11
awx/api/templates/api/host_fact_compare_view.md
Normal file
11
awx/api/templates/api/host_fact_compare_view.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# List Fact Scans for a Host Specific Host Scan
|
||||||
|
|
||||||
|
Make a GET request to this resource to retrieve system tracking data for a particular scan
|
||||||
|
|
||||||
|
You may filter by datetime:
|
||||||
|
|
||||||
|
`?datetime=2015-06-01`
|
||||||
|
|
||||||
|
and module
|
||||||
|
|
||||||
|
`?datetime=2015-06-01&module=ansible`
|
||||||
11
awx/api/templates/api/host_fact_versions_list.md
Normal file
11
awx/api/templates/api/host_fact_versions_list.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# List Fact Scans for a Host by Module and Date
|
||||||
|
|
||||||
|
Make a GET request to this resource to retrieve system tracking scans by module and date/time
|
||||||
|
|
||||||
|
You may filter scan runs using the `from` and `to` properties:
|
||||||
|
|
||||||
|
`?from=2015-06-01%2012:00:00&to=2015-06-03`
|
||||||
|
|
||||||
|
You may also filter by module
|
||||||
|
|
||||||
|
`?module=packages`
|
||||||
1
awx/api/templates/api/host_insights.md
Normal file
1
awx/api/templates/api/host_insights.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# List Red Hat Insights for a Host
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
{% ifmeth GET %}
|
|
||||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
|
||||||
|
|
||||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
|
||||||
record containing the following fields:
|
|
||||||
|
|
||||||
{% include "api/_result_fields_common.md" %}
|
|
||||||
{% endifmeth %}
|
|
||||||
|
|
||||||
{% ifmeth DELETE %}
|
|
||||||
# Delete {{ model_verbose_name|title|anora }}:
|
|
||||||
|
|
||||||
Make a DELETE request to this resource to soft-delete this {{ model_verbose_name }}.
|
|
||||||
|
|
||||||
A soft deletion will mark the `deleted` field as true and exclude the host
|
|
||||||
metric from license calculations.
|
|
||||||
This may be undone later if the same hostname is automated again afterwards.
|
|
||||||
{% endifmeth %}
|
|
||||||
@@ -18,7 +18,7 @@ inventory sources:
|
|||||||
* `inventory_update`: ID of the inventory update job that was started.
|
* `inventory_update`: ID of the inventory update job that was started.
|
||||||
(integer, read-only)
|
(integer, read-only)
|
||||||
* `project_update`: ID of the project update job that was started if this inventory source is an SCM source.
|
* `project_update`: ID of the project update job that was started if this inventory source is an SCM source.
|
||||||
(integer, read-only, optional)
|
(interger, read-only, optional)
|
||||||
|
|
||||||
Note: All manual inventory sources (source="") will be ignored by the update_inventory_sources endpoint. This endpoint will not update inventory sources for Smart Inventories.
|
Note: All manual inventory sources (source="") will be ignored by the update_inventory_sources endpoint. This endpoint will not update inventory sources for Smart Inventories.
|
||||||
|
|
||||||
|
|||||||
21
awx/api/templates/api/job_start.md
Normal file
21
awx/api/templates/api/job_start.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{% ifmeth GET %}
|
||||||
|
# Determine if a Job can be started
|
||||||
|
|
||||||
|
Make a GET request to this resource to determine if the job can be started and
|
||||||
|
whether any passwords are required to start the job. The response will include
|
||||||
|
the following fields:
|
||||||
|
|
||||||
|
* `can_start`: Flag indicating if this job can be started (boolean, read-only)
|
||||||
|
* `passwords_needed_to_start`: Password names required to start the job (array,
|
||||||
|
read-only)
|
||||||
|
{% endifmeth %}
|
||||||
|
|
||||||
|
{% ifmeth POST %}
|
||||||
|
# Start a Job
|
||||||
|
Make a POST request to this resource to start the job. If any passwords are
|
||||||
|
required, they must be passed via POST data.
|
||||||
|
|
||||||
|
If successful, the response status code will be 202. If any required passwords
|
||||||
|
are not provided, a 400 status code will be returned. If the job cannot be
|
||||||
|
started, a 405 status code will be returned.
|
||||||
|
{% endifmeth %}
|
||||||
@@ -2,35 +2,21 @@ receptor_user: awx
|
|||||||
receptor_group: awx
|
receptor_group: awx
|
||||||
receptor_verify: true
|
receptor_verify: true
|
||||||
receptor_tls: true
|
receptor_tls: true
|
||||||
receptor_mintls13: false
|
|
||||||
{% if instance.node_type == "execution" %}
|
|
||||||
receptor_work_commands:
|
receptor_work_commands:
|
||||||
ansible-runner:
|
ansible-runner:
|
||||||
command: ansible-runner
|
command: ansible-runner
|
||||||
params: worker
|
params: worker
|
||||||
allowruntimeparams: true
|
allowruntimeparams: true
|
||||||
verifysignature: true
|
verifysignature: true
|
||||||
additional_python_packages:
|
custom_worksign_public_keyfile: receptor/work-public-key.pem
|
||||||
- ansible-runner
|
|
||||||
{% endif %}
|
|
||||||
custom_worksign_public_keyfile: receptor/work_public_key.pem
|
|
||||||
custom_tls_certfile: receptor/tls/receptor.crt
|
custom_tls_certfile: receptor/tls/receptor.crt
|
||||||
custom_tls_keyfile: receptor/tls/receptor.key
|
custom_tls_keyfile: receptor/tls/receptor.key
|
||||||
custom_ca_certfile: receptor/tls/ca/mesh-CA.crt
|
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
|
||||||
{% if listener_port %}
|
receptor_protocol: 'tcp'
|
||||||
receptor_protocol: {{ listener_protocol }}
|
|
||||||
receptor_listener: true
|
receptor_listener: true
|
||||||
receptor_port: {{ listener_port }}
|
receptor_port: {{ instance.listener_port }}
|
||||||
{% else %}
|
receptor_dependencies:
|
||||||
receptor_listener: false
|
- python39-pip
|
||||||
{% endif %}
|
|
||||||
{% if peers %}
|
|
||||||
receptor_peers:
|
|
||||||
{% for peer in peers %}
|
|
||||||
- address: {{ peer.address }}
|
|
||||||
protocol: {{ peer.protocol }}
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{% verbatim %}
|
{% verbatim %}
|
||||||
podman_user: "{{ receptor_user }}"
|
podman_user: "{{ receptor_user }}"
|
||||||
podman_group: "{{ receptor_group }}"
|
podman_group: "{{ receptor_group }}"
|
||||||
|
|||||||
@@ -1,16 +1,20 @@
|
|||||||
|
{% verbatim %}
|
||||||
---
|
---
|
||||||
- hosts: all
|
- hosts: all
|
||||||
become: yes
|
become: yes
|
||||||
tasks:
|
tasks:
|
||||||
- name: Create the receptor user
|
- name: Create the receptor user
|
||||||
user:
|
user:
|
||||||
{% verbatim %}
|
|
||||||
name: "{{ receptor_user }}"
|
name: "{{ receptor_user }}"
|
||||||
{% endverbatim %}
|
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
{% if instance.node_type == "execution" %}
|
- name: Enable Copr repo for Receptor
|
||||||
|
command: dnf copr enable ansible-awx/receptor -y
|
||||||
- import_role:
|
- import_role:
|
||||||
name: ansible.receptor.podman
|
name: ansible.receptor.podman
|
||||||
{% endif %}
|
|
||||||
- import_role:
|
- import_role:
|
||||||
name: ansible.receptor.setup
|
name: ansible.receptor.setup
|
||||||
|
- name: Install ansible-runner
|
||||||
|
pip:
|
||||||
|
name: ansible-runner
|
||||||
|
executable: pip3.9
|
||||||
|
{% endverbatim %}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
---
|
---
|
||||||
collections:
|
collections:
|
||||||
- name: ansible.receptor
|
- name: ansible.receptor
|
||||||
version: 2.0.3
|
version: 1.1.0
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
# Copyright (c) 2017 Ansible, Inc.
|
|
||||||
# All Rights Reserved.
|
|
||||||
|
|
||||||
from django.urls import re_path
|
|
||||||
|
|
||||||
import awx.api.views.analytics as analytics
|
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
|
||||||
re_path(r'^$', analytics.AnalyticsRootView.as_view(), name='analytics_root_view'),
|
|
||||||
re_path(r'^authorized/$', analytics.AnalyticsAuthorizedView.as_view(), name='analytics_authorized'),
|
|
||||||
re_path(r'^reports/$', analytics.AnalyticsReportsList.as_view(), name='analytics_reports_list'),
|
|
||||||
re_path(r'^report/(?P<slug>[\w-]+)/$', analytics.AnalyticsReportDetail.as_view(), name='analytics_report_detail'),
|
|
||||||
re_path(r'^report_options/$', analytics.AnalyticsReportOptionsList.as_view(), name='analytics_report_options_list'),
|
|
||||||
re_path(r'^adoption_rate/$', analytics.AnalyticsAdoptionRateList.as_view(), name='analytics_adoption_rate'),
|
|
||||||
re_path(r'^adoption_rate_options/$', analytics.AnalyticsAdoptionRateList.as_view(), name='analytics_adoption_rate_options'),
|
|
||||||
re_path(r'^event_explorer/$', analytics.AnalyticsEventExplorerList.as_view(), name='analytics_event_explorer'),
|
|
||||||
re_path(r'^event_explorer_options/$', analytics.AnalyticsEventExplorerList.as_view(), name='analytics_event_explorer_options'),
|
|
||||||
re_path(r'^host_explorer/$', analytics.AnalyticsHostExplorerList.as_view(), name='analytics_host_explorer'),
|
|
||||||
re_path(r'^host_explorer_options/$', analytics.AnalyticsHostExplorerList.as_view(), name='analytics_host_explorer_options'),
|
|
||||||
re_path(r'^job_explorer/$', analytics.AnalyticsJobExplorerList.as_view(), name='analytics_job_explorer'),
|
|
||||||
re_path(r'^job_explorer_options/$', analytics.AnalyticsJobExplorerList.as_view(), name='analytics_job_explorer_options'),
|
|
||||||
re_path(r'^probe_templates/$', analytics.AnalyticsProbeTemplatesList.as_view(), name='analytics_probe_templates_explorer'),
|
|
||||||
re_path(r'^probe_templates_options/$', analytics.AnalyticsProbeTemplatesList.as_view(), name='analytics_probe_templates_options'),
|
|
||||||
re_path(r'^probe_template_for_hosts/$', analytics.AnalyticsProbeTemplateForHostsList.as_view(), name='analytics_probe_template_for_hosts_explorer'),
|
|
||||||
re_path(r'^probe_template_for_hosts_options/$', analytics.AnalyticsProbeTemplateForHostsList.as_view(), name='analytics_probe_template_for_hosts_options'),
|
|
||||||
re_path(r'^roi_templates/$', analytics.AnalyticsRoiTemplatesList.as_view(), name='analytics_roi_templates_explorer'),
|
|
||||||
re_path(r'^roi_templates_options/$', analytics.AnalyticsRoiTemplatesList.as_view(), name='analytics_roi_templates_options'),
|
|
||||||
]
|
|
||||||
|
|
||||||
__all__ = ['urls']
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
# Copyright (c) 2017 Ansible, Inc.
|
|
||||||
# All Rights Reserved.
|
|
||||||
|
|
||||||
from django.urls import re_path
|
|
||||||
|
|
||||||
from awx.api.views import HostMetricList, HostMetricDetail
|
|
||||||
|
|
||||||
urls = [re_path(r'^$', HostMetricList.as_view(), name='host_metric_list'), re_path(r'^(?P<pk>[0-9]+)/$', HostMetricDetail.as_view(), name='host_metric_detail')]
|
|
||||||
|
|
||||||
__all__ = ['urls']
|
|
||||||
@@ -10,7 +10,6 @@ from awx.api.views import (
|
|||||||
InstanceInstanceGroupsList,
|
InstanceInstanceGroupsList,
|
||||||
InstanceHealthCheck,
|
InstanceHealthCheck,
|
||||||
InstancePeersList,
|
InstancePeersList,
|
||||||
InstanceReceptorAddressesList,
|
|
||||||
)
|
)
|
||||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||||
|
|
||||||
@@ -22,7 +21,6 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'),
|
re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/receptor_addresses/$', InstanceReceptorAddressesList.as_view(), name='instance_receptor_addresses_list'),
|
|
||||||
re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'),
|
re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -3,14 +3,7 @@
|
|||||||
|
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views import (
|
from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList
|
||||||
InstanceGroupList,
|
|
||||||
InstanceGroupDetail,
|
|
||||||
InstanceGroupUnifiedJobsList,
|
|
||||||
InstanceGroupInstanceList,
|
|
||||||
InstanceGroupAccessList,
|
|
||||||
InstanceGroupObjectRolesList,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
@@ -18,8 +11,6 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),
|
re_path(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'),
|
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'),
|
re_path(r'^(?P<pk>[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/access_list/$', InstanceGroupAccessList.as_view(), name='instance_group_access_list'),
|
|
||||||
re_path(r'^(?P<pk>[0-9]+)/object_roles/$', InstanceGroupObjectRolesList.as_view(), name='instance_group_object_role_list'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
__all__ = ['urls']
|
__all__ = ['urls']
|
||||||
|
|||||||
@@ -6,10 +6,7 @@ from django.urls import re_path
|
|||||||
from awx.api.views.inventory import (
|
from awx.api.views.inventory import (
|
||||||
InventoryList,
|
InventoryList,
|
||||||
InventoryDetail,
|
InventoryDetail,
|
||||||
ConstructedInventoryDetail,
|
|
||||||
ConstructedInventoryList,
|
|
||||||
InventoryActivityStreamList,
|
InventoryActivityStreamList,
|
||||||
InventoryInputInventoriesList,
|
|
||||||
InventoryJobTemplateList,
|
InventoryJobTemplateList,
|
||||||
InventoryAccessList,
|
InventoryAccessList,
|
||||||
InventoryObjectRolesList,
|
InventoryObjectRolesList,
|
||||||
@@ -40,7 +37,6 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
|
re_path(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
|
re_path(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
|
re_path(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/input_inventories/$', InventoryInputInventoriesList.as_view(), name='inventory_input_inventories'),
|
|
||||||
re_path(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
|
re_path(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
|
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
|
re_path(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
|
||||||
@@ -52,10 +48,4 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
re_path(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Constructed inventory special views
|
__all__ = ['urls']
|
||||||
constructed_inventory_urls = [
|
|
||||||
re_path(r'^$', ConstructedInventoryList.as_view(), name='constructed_inventory_list'),
|
|
||||||
re_path(r'^(?P<pk>[0-9]+)/$', ConstructedInventoryDetail.as_view(), name='constructed_inventory_detail'),
|
|
||||||
]
|
|
||||||
|
|
||||||
__all__ = ['urls', 'constructed_inventory_urls']
|
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
# Copyright (c) 2017 Ansible, Inc.
|
|
||||||
# All Rights Reserved.
|
|
||||||
|
|
||||||
from django.urls import re_path
|
|
||||||
|
|
||||||
from awx.api.views import (
|
|
||||||
ReceptorAddressesList,
|
|
||||||
ReceptorAddressDetail,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
|
||||||
re_path(r'^$', ReceptorAddressesList.as_view(), name='receptor_addresses_list'),
|
|
||||||
re_path(r'^(?P<pk>[0-9]+)/$', ReceptorAddressDetail.as_view(), name='receptor_address_detail'),
|
|
||||||
]
|
|
||||||
|
|
||||||
__all__ = ['urls']
|
|
||||||
@@ -30,30 +30,19 @@ from awx.api.views import (
|
|||||||
OAuth2TokenList,
|
OAuth2TokenList,
|
||||||
ApplicationOAuth2TokenList,
|
ApplicationOAuth2TokenList,
|
||||||
OAuth2ApplicationDetail,
|
OAuth2ApplicationDetail,
|
||||||
HostMetricSummaryMonthlyList,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from awx.api.views.bulk import (
|
|
||||||
BulkView,
|
|
||||||
BulkHostCreateView,
|
|
||||||
BulkHostDeleteView,
|
|
||||||
BulkJobLaunchView,
|
|
||||||
)
|
|
||||||
|
|
||||||
from awx.api.views.mesh_visualizer import MeshVisualizer
|
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||||
|
|
||||||
from awx.api.views.metrics import MetricsView
|
from awx.api.views.metrics import MetricsView
|
||||||
from awx.api.views.analytics import AWX_ANALYTICS_API_PREFIX
|
|
||||||
|
|
||||||
from .organization import urls as organization_urls
|
from .organization import urls as organization_urls
|
||||||
from .user import urls as user_urls
|
from .user import urls as user_urls
|
||||||
from .project import urls as project_urls
|
from .project import urls as project_urls
|
||||||
from .project_update import urls as project_update_urls
|
from .project_update import urls as project_update_urls
|
||||||
from .inventory import urls as inventory_urls, constructed_inventory_urls
|
from .inventory import urls as inventory_urls
|
||||||
from .execution_environments import urls as execution_environment_urls
|
from .execution_environments import urls as execution_environment_urls
|
||||||
from .team import urls as team_urls
|
from .team import urls as team_urls
|
||||||
from .host import urls as host_urls
|
from .host import urls as host_urls
|
||||||
from .host_metric import urls as host_metric_urls
|
|
||||||
from .group import urls as group_urls
|
from .group import urls as group_urls
|
||||||
from .inventory_source import urls as inventory_source_urls
|
from .inventory_source import urls as inventory_source_urls
|
||||||
from .inventory_update import urls as inventory_update_urls
|
from .inventory_update import urls as inventory_update_urls
|
||||||
@@ -84,8 +73,7 @@ from .oauth2 import urls as oauth2_urls
|
|||||||
from .oauth2_root import urls as oauth2_root_urls
|
from .oauth2_root import urls as oauth2_root_urls
|
||||||
from .workflow_approval_template import urls as workflow_approval_template_urls
|
from .workflow_approval_template import urls as workflow_approval_template_urls
|
||||||
from .workflow_approval import urls as workflow_approval_urls
|
from .workflow_approval import urls as workflow_approval_urls
|
||||||
from .analytics import urls as analytics_urls
|
|
||||||
from .receptor_address import urls as receptor_address_urls
|
|
||||||
|
|
||||||
v2_urls = [
|
v2_urls = [
|
||||||
re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
||||||
@@ -122,10 +110,7 @@ v2_urls = [
|
|||||||
re_path(r'^project_updates/', include(project_update_urls)),
|
re_path(r'^project_updates/', include(project_update_urls)),
|
||||||
re_path(r'^teams/', include(team_urls)),
|
re_path(r'^teams/', include(team_urls)),
|
||||||
re_path(r'^inventories/', include(inventory_urls)),
|
re_path(r'^inventories/', include(inventory_urls)),
|
||||||
re_path(r'^constructed_inventories/', include(constructed_inventory_urls)),
|
|
||||||
re_path(r'^hosts/', include(host_urls)),
|
re_path(r'^hosts/', include(host_urls)),
|
||||||
re_path(r'^host_metrics/', include(host_metric_urls)),
|
|
||||||
re_path(r'^host_metric_summary_monthly/$', HostMetricSummaryMonthlyList.as_view(), name='host_metric_summary_monthly_list'),
|
|
||||||
re_path(r'^groups/', include(group_urls)),
|
re_path(r'^groups/', include(group_urls)),
|
||||||
re_path(r'^inventory_sources/', include(inventory_source_urls)),
|
re_path(r'^inventory_sources/', include(inventory_source_urls)),
|
||||||
re_path(r'^inventory_updates/', include(inventory_update_urls)),
|
re_path(r'^inventory_updates/', include(inventory_update_urls)),
|
||||||
@@ -149,14 +134,8 @@ v2_urls = [
|
|||||||
re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'),
|
re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'),
|
||||||
re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'),
|
re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'),
|
||||||
re_path(r'^activity_stream/', include(activity_stream_urls)),
|
re_path(r'^activity_stream/', include(activity_stream_urls)),
|
||||||
re_path(rf'^{AWX_ANALYTICS_API_PREFIX}/', include(analytics_urls)),
|
|
||||||
re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)),
|
re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)),
|
||||||
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
||||||
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
|
|
||||||
re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'),
|
|
||||||
re_path(r'^bulk/host_delete/$', BulkHostDeleteView.as_view(), name='bulk_host_delete'),
|
|
||||||
re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'),
|
|
||||||
re_path(r'^receptor_addresses/', include(receptor_address_urls)),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -170,13 +149,10 @@ urlpatterns = [
|
|||||||
]
|
]
|
||||||
if MODE == 'development':
|
if MODE == 'development':
|
||||||
# Only include these if we are in the development environment
|
# Only include these if we are in the development environment
|
||||||
from awx.api.swagger import schema_view
|
from awx.api.swagger import SwaggerSchemaView
|
||||||
|
|
||||||
|
urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
||||||
|
|
||||||
from awx.api.urls.debug import urls as debug_urls
|
from awx.api.urls.debug import urls as debug_urls
|
||||||
|
|
||||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||||
urlpatterns += [
|
|
||||||
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
|
||||||
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
|
||||||
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver, BitbucketDcWebhookReceiver
|
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
||||||
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
||||||
re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
|
re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
|
||||||
re_path(r'^bitbucket_dc/$', BitbucketDcWebhookReceiver.as_view(), name='webhook_receiver_bitbucket_dc'),
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ class HostnameRegexValidator(RegexValidator):
|
|||||||
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
|
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
|
||||||
|
|
||||||
def __validate(self, value):
|
def __validate(self, value):
|
||||||
|
|
||||||
if ' ' in value:
|
if ' ' in value:
|
||||||
return False, ValidationError("whitespaces in hostnames are illegal")
|
return False, ValidationError("whitespaces in hostnames are illegal")
|
||||||
|
|
||||||
|
|||||||
@@ -2,21 +2,28 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.urls import NoReverseMatch
|
||||||
|
|
||||||
from rest_framework.reverse import reverse as drf_reverse
|
from rest_framework.reverse import _reverse
|
||||||
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
||||||
|
|
||||||
|
|
||||||
def is_optional_api_urlpattern_prefix_request(request):
|
def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
||||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
"""
|
||||||
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
|
||||||
return True
|
query string parameters.
|
||||||
return False
|
"""
|
||||||
|
scheme = getattr(request, 'versioning_scheme', None)
|
||||||
|
if scheme is not None:
|
||||||
|
try:
|
||||||
|
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
|
||||||
|
except NoReverseMatch:
|
||||||
|
# In case the versioning scheme reversal fails, fallback to the
|
||||||
|
# default implementation
|
||||||
|
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||||
|
else:
|
||||||
|
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||||
|
|
||||||
|
|
||||||
def transform_optional_api_urlpattern_prefix_url(request, url):
|
|
||||||
if is_optional_api_urlpattern_prefix_request(request):
|
|
||||||
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,296 +0,0 @@
|
|||||||
import requests
|
|
||||||
import logging
|
|
||||||
import urllib.parse as urlparse
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.utils import translation
|
|
||||||
|
|
||||||
from awx.api.generics import APIView, Response
|
|
||||||
from awx.api.permissions import AnalyticsPermission
|
|
||||||
from awx.api.versioning import reverse
|
|
||||||
from awx.main.utils import get_awx_version
|
|
||||||
from rest_framework import status
|
|
||||||
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
AUTOMATION_ANALYTICS_API_URL_PATH = "/api/tower-analytics/v1"
|
|
||||||
AWX_ANALYTICS_API_PREFIX = 'analytics'
|
|
||||||
|
|
||||||
ERROR_UPLOAD_NOT_ENABLED = "analytics-upload-not-enabled"
|
|
||||||
ERROR_MISSING_URL = "missing-url"
|
|
||||||
ERROR_MISSING_USER = "missing-user"
|
|
||||||
ERROR_MISSING_PASSWORD = "missing-password"
|
|
||||||
ERROR_NO_DATA_OR_ENTITLEMENT = "no-data-or-entitlement"
|
|
||||||
ERROR_NOT_FOUND = "not-found"
|
|
||||||
ERROR_UNAUTHORIZED = "unauthorized"
|
|
||||||
ERROR_UNKNOWN = "unknown"
|
|
||||||
ERROR_UNSUPPORTED_METHOD = "unsupported-method"
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.analytics')
|
|
||||||
|
|
||||||
|
|
||||||
class MissingSettings(Exception):
|
|
||||||
"""Settings are not correct Exception"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class GetNotAllowedMixin(object):
|
|
||||||
def get(self, request, format=None):
|
|
||||||
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsRootView(APIView):
|
|
||||||
permission_classes = (AnalyticsPermission,)
|
|
||||||
name = _('Automation Analytics')
|
|
||||||
swagger_topic = 'Automation Analytics'
|
|
||||||
|
|
||||||
def get(self, request, format=None):
|
|
||||||
data = OrderedDict()
|
|
||||||
data['authorized'] = reverse('api:analytics_authorized', request=request)
|
|
||||||
data['reports'] = reverse('api:analytics_reports_list', request=request)
|
|
||||||
data['report_options'] = reverse('api:analytics_report_options_list', request=request)
|
|
||||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request)
|
|
||||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request)
|
|
||||||
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request)
|
|
||||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request)
|
|
||||||
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request)
|
|
||||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request)
|
|
||||||
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request)
|
|
||||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request)
|
|
||||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request)
|
|
||||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request)
|
|
||||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request)
|
|
||||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request)
|
|
||||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request)
|
|
||||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request)
|
|
||||||
return Response(data)
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsGenericView(APIView):
|
|
||||||
"""
|
|
||||||
Example:
|
|
||||||
headers = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}
|
|
||||||
|
|
||||||
params = {
|
|
||||||
'limit': '20',
|
|
||||||
'offset': '0',
|
|
||||||
'sort_by': 'name:asc',
|
|
||||||
}
|
|
||||||
|
|
||||||
json_data = {
|
|
||||||
'limit': '20',
|
|
||||||
'offset': '0',
|
|
||||||
'sort_options': 'name',
|
|
||||||
'sort_order': 'asc',
|
|
||||||
'tags': [],
|
|
||||||
'slug': [],
|
|
||||||
'name': [],
|
|
||||||
'description': '',
|
|
||||||
}
|
|
||||||
|
|
||||||
response = requests.post(f'{AUTOMATION_ANALYTICS_API_URL}/reports/', params=params,
|
|
||||||
headers=headers, json=json_data)
|
|
||||||
|
|
||||||
return Response(response.json(), status=response.status_code)
|
|
||||||
"""
|
|
||||||
|
|
||||||
permission_classes = (AnalyticsPermission,)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _request_headers(request):
|
|
||||||
headers = {}
|
|
||||||
for header in ['Content-Type', 'Content-Length', 'Accept-Encoding', 'User-Agent', 'Accept']:
|
|
||||||
if request.headers.get(header, None):
|
|
||||||
headers[header] = request.headers.get(header)
|
|
||||||
headers['X-Rh-Analytics-Source'] = 'controller'
|
|
||||||
headers['X-Rh-Analytics-Source-Version'] = get_awx_version()
|
|
||||||
headers['Accept-Language'] = translation.get_language()
|
|
||||||
|
|
||||||
return headers
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_analytics_path(request_path):
|
|
||||||
parts = request_path.split(f'{AWX_ANALYTICS_API_PREFIX}/')
|
|
||||||
path_specific = parts[-1]
|
|
||||||
return f"{AUTOMATION_ANALYTICS_API_URL_PATH}/{path_specific}"
|
|
||||||
|
|
||||||
def _get_analytics_url(self, request_path):
|
|
||||||
analytics_path = self._get_analytics_path(request_path)
|
|
||||||
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
|
|
||||||
if not url:
|
|
||||||
raise MissingSettings(ERROR_MISSING_URL)
|
|
||||||
url_parts = urlparse.urlsplit(url)
|
|
||||||
analytics_url = urlparse.urlunsplit([url_parts.scheme, url_parts.netloc, analytics_path, url_parts.query, url_parts.fragment])
|
|
||||||
return analytics_url
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_setting(setting_name, default, error_message):
|
|
||||||
setting = getattr(settings, setting_name, default)
|
|
||||||
if not setting:
|
|
||||||
raise MissingSettings(error_message)
|
|
||||||
return setting
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _error_response(keyword, message=None, remote=True, remote_status_code=None, status_code=status.HTTP_403_FORBIDDEN):
|
|
||||||
text = {"error": {"remote": remote, "remote_status": remote_status_code, "keyword": keyword}}
|
|
||||||
if message:
|
|
||||||
text["error"]["message"] = message
|
|
||||||
return Response(text, status=status_code)
|
|
||||||
|
|
||||||
def _error_response_404(self, response):
|
|
||||||
try:
|
|
||||||
json_response = response.json()
|
|
||||||
# Subscription/entitlement problem or missing tenant data in AA db => HTTP 403
|
|
||||||
message = json_response.get('error', None)
|
|
||||||
if message:
|
|
||||||
return self._error_response(ERROR_NO_DATA_OR_ENTITLEMENT, message, remote=True, remote_status_code=response.status_code)
|
|
||||||
|
|
||||||
# Standard 404 problem => HTTP 404
|
|
||||||
message = json_response.get('detail', None) or response.text
|
|
||||||
except requests.exceptions.JSONDecodeError:
|
|
||||||
# Unexpected text => still HTTP 404
|
|
||||||
message = response.text
|
|
||||||
|
|
||||||
return self._error_response(ERROR_NOT_FOUND, message, remote=True, remote_status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _update_response_links(json_response):
|
|
||||||
if not json_response.get('links', None):
|
|
||||||
return
|
|
||||||
|
|
||||||
for key, value in json_response['links'].items():
|
|
||||||
if value:
|
|
||||||
json_response['links'][key] = value.replace(AUTOMATION_ANALYTICS_API_URL_PATH, f"/api/v2/{AWX_ANALYTICS_API_PREFIX}")
|
|
||||||
|
|
||||||
def _forward_response(self, response):
|
|
||||||
try:
|
|
||||||
content_type = response.headers.get('content-type', '')
|
|
||||||
if content_type.find('application/json') != -1:
|
|
||||||
json_response = response.json()
|
|
||||||
self._update_response_links(json_response)
|
|
||||||
|
|
||||||
return Response(json_response, status=response.status_code)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Analytics API: Response error: {e}")
|
|
||||||
|
|
||||||
return Response(response.content, status=response.status_code)
|
|
||||||
|
|
||||||
def _send_to_analytics(self, request, method):
|
|
||||||
try:
|
|
||||||
headers = self._request_headers(request)
|
|
||||||
|
|
||||||
self._get_setting('INSIGHTS_TRACKING_STATE', False, ERROR_UPLOAD_NOT_ENABLED)
|
|
||||||
url = self._get_analytics_url(request.path)
|
|
||||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
|
||||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
|
||||||
|
|
||||||
if method not in ["GET", "POST", "OPTIONS"]:
|
|
||||||
return self._error_response(ERROR_UNSUPPORTED_METHOD, method, remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
||||||
else:
|
|
||||||
response = requests.request(
|
|
||||||
method,
|
|
||||||
url,
|
|
||||||
auth=(rh_user, rh_password),
|
|
||||||
verify=settings.INSIGHTS_CERT_PATH,
|
|
||||||
params=request.query_params,
|
|
||||||
headers=headers,
|
|
||||||
json=request.data,
|
|
||||||
timeout=(31, 31),
|
|
||||||
)
|
|
||||||
#
|
|
||||||
# Missing or wrong user/pass
|
|
||||||
#
|
|
||||||
if response.status_code == status.HTTP_401_UNAUTHORIZED:
|
|
||||||
text = (response.text or '').rstrip("\n")
|
|
||||||
return self._error_response(ERROR_UNAUTHORIZED, text, remote=True, remote_status_code=response.status_code)
|
|
||||||
#
|
|
||||||
# Not found, No entitlement or No data in Analytics
|
|
||||||
#
|
|
||||||
elif response.status_code == status.HTTP_404_NOT_FOUND:
|
|
||||||
return self._error_response_404(response)
|
|
||||||
#
|
|
||||||
# Success or not a 401/404 errors are just forwarded
|
|
||||||
#
|
|
||||||
else:
|
|
||||||
return self._forward_response(response)
|
|
||||||
|
|
||||||
except MissingSettings as e:
|
|
||||||
logger.warning(f"Analytics API: Setting missing: {e.args[0]}")
|
|
||||||
return self._error_response(e.args[0], remote=False)
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
logger.error(f"Analytics API: Request error: {e}")
|
|
||||||
return self._error_response(ERROR_UNKNOWN, str(e), remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Analytics API: Error: {e}")
|
|
||||||
return self._error_response(ERROR_UNKNOWN, str(e), remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsGenericListView(AnalyticsGenericView):
|
|
||||||
def get(self, request, format=None):
|
|
||||||
return self._send_to_analytics(request, method="GET")
|
|
||||||
|
|
||||||
def post(self, request, format=None):
|
|
||||||
return self._send_to_analytics(request, method="POST")
|
|
||||||
|
|
||||||
def options(self, request, format=None):
|
|
||||||
return self._send_to_analytics(request, method="OPTIONS")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsGenericDetailView(AnalyticsGenericView):
|
|
||||||
def get(self, request, slug, format=None):
|
|
||||||
return self._send_to_analytics(request, method="GET")
|
|
||||||
|
|
||||||
def post(self, request, slug, format=None):
|
|
||||||
return self._send_to_analytics(request, method="POST")
|
|
||||||
|
|
||||||
def options(self, request, slug, format=None):
|
|
||||||
return self._send_to_analytics(request, method="OPTIONS")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsAuthorizedView(AnalyticsGenericListView):
|
|
||||||
name = _("Authorized")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsReportsList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("Reports")
|
|
||||||
swagger_topic = "Automation Analytics"
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsReportDetail(AnalyticsGenericDetailView):
|
|
||||||
name = _("Report")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsReportOptionsList(AnalyticsGenericListView):
|
|
||||||
name = _("Report Options")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsAdoptionRateList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("Adoption Rate")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsEventExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("Event Explorer")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsHostExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("Host Explorer")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsJobExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("Job Explorer")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsProbeTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("Probe Templates")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsProbeTemplateForHostsList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("Probe Template For Hosts")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsRoiTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
|
|
||||||
name = _("ROI Templates")
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
|
|
||||||
from rest_framework.permissions import IsAuthenticated
|
|
||||||
from rest_framework.renderers import JSONRenderer
|
|
||||||
from rest_framework.reverse import reverse
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
|
|
||||||
from awx.main.models import UnifiedJob, Host
|
|
||||||
from awx.api.generics import (
|
|
||||||
GenericAPIView,
|
|
||||||
APIView,
|
|
||||||
)
|
|
||||||
from awx.api import (
|
|
||||||
serializers,
|
|
||||||
renderers,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BulkView(APIView):
|
|
||||||
name = _('Bulk')
|
|
||||||
swagger_topic = 'Bulk'
|
|
||||||
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
renderer_classes = [
|
|
||||||
renderers.BrowsableAPIRenderer,
|
|
||||||
JSONRenderer,
|
|
||||||
]
|
|
||||||
allowed_methods = ['GET', 'OPTIONS']
|
|
||||||
|
|
||||||
def get(self, request, format=None):
|
|
||||||
'''List top level resources'''
|
|
||||||
data = OrderedDict()
|
|
||||||
data['host_create'] = reverse('api:bulk_host_create', request=request)
|
|
||||||
data['host_delete'] = reverse('api:bulk_host_delete', request=request)
|
|
||||||
data['job_launch'] = reverse('api:bulk_job_launch', request=request)
|
|
||||||
return Response(data)
|
|
||||||
|
|
||||||
|
|
||||||
class BulkJobLaunchView(GenericAPIView):
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
model = UnifiedJob
|
|
||||||
serializer_class = serializers.BulkJobLaunchSerializer
|
|
||||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
data = OrderedDict()
|
|
||||||
data['detail'] = "Specify a list of unified job templates to launch alongside their launchtime parameters"
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
bulkjob_serializer = serializers.BulkJobLaunchSerializer(data=request.data, context={'request': request})
|
|
||||||
if bulkjob_serializer.is_valid():
|
|
||||||
result = bulkjob_serializer.create(bulkjob_serializer.validated_data)
|
|
||||||
return Response(result, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(bulkjob_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
class BulkHostCreateView(GenericAPIView):
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
model = Host
|
|
||||||
serializer_class = serializers.BulkHostCreateSerializer
|
|
||||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
return Response({"detail": "Bulk create hosts with this endpoint"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
serializer = serializers.BulkHostCreateSerializer(data=request.data, context={'request': request})
|
|
||||||
if serializer.is_valid():
|
|
||||||
result = serializer.create(serializer.validated_data)
|
|
||||||
return Response(result, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
class BulkHostDeleteView(GenericAPIView):
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
model = Host
|
|
||||||
serializer_class = serializers.BulkHostDeleteSerializer
|
|
||||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
return Response({"detail": "Bulk delete hosts with this endpoint"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
serializer = serializers.BulkHostDeleteSerializer(data=request.data, context={'request': request})
|
|
||||||
if serializer.is_valid():
|
|
||||||
result = serializer.delete(serializer.validated_data)
|
|
||||||
return Response(result, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
@@ -6,8 +6,6 @@ import io
|
|||||||
import ipaddress
|
import ipaddress
|
||||||
import os
|
import os
|
||||||
import tarfile
|
import tarfile
|
||||||
import time
|
|
||||||
import re
|
|
||||||
|
|
||||||
import asn1
|
import asn1
|
||||||
from awx.api import serializers
|
from awx.api import serializers
|
||||||
@@ -27,7 +25,6 @@ from rest_framework import status
|
|||||||
# Red Hat has an OID namespace (RHANANA). Receptor has its own designation under that.
|
# Red Hat has an OID namespace (RHANANA). Receptor has its own designation under that.
|
||||||
RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
|
RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
|
||||||
|
|
||||||
|
|
||||||
# generate install bundle for the instance
|
# generate install bundle for the instance
|
||||||
# install bundle directory structure
|
# install bundle directory structure
|
||||||
# ├── install_receptor.yml (playbook)
|
# ├── install_receptor.yml (playbook)
|
||||||
@@ -42,9 +39,8 @@ RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
|
|||||||
# │ │ └── receptor.key
|
# │ │ └── receptor.key
|
||||||
# │ └── work-public-key.pem
|
# │ └── work-public-key.pem
|
||||||
# └── requirements.yml
|
# └── requirements.yml
|
||||||
|
|
||||||
|
|
||||||
class InstanceInstallBundle(GenericAPIView):
|
class InstanceInstallBundle(GenericAPIView):
|
||||||
|
|
||||||
name = _('Install Bundle')
|
name = _('Install Bundle')
|
||||||
model = models.Instance
|
model = models.Instance
|
||||||
serializer_class = serializers.InstanceSerializer
|
serializer_class = serializers.InstanceSerializer
|
||||||
@@ -53,54 +49,56 @@ class InstanceInstallBundle(GenericAPIView):
|
|||||||
def get(self, request, *args, **kwargs):
|
def get(self, request, *args, **kwargs):
|
||||||
instance_obj = self.get_object()
|
instance_obj = self.get_object()
|
||||||
|
|
||||||
if instance_obj.node_type not in ('execution', 'hop'):
|
if instance_obj.node_type not in ('execution',):
|
||||||
return Response(
|
return Response(
|
||||||
data=dict(msg=_('Install bundle can only be generated for execution or hop nodes.')),
|
data=dict(msg=_('Install bundle can only be generated for execution nodes.')),
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
with io.BytesIO() as f:
|
with io.BytesIO() as f:
|
||||||
with tarfile.open(fileobj=f, mode='w:gz') as tar:
|
with tarfile.open(fileobj=f, mode='w:gz') as tar:
|
||||||
# copy /etc/receptor/tls/ca/mesh-CA.crt to receptor/tls/ca in the tar file
|
# copy /etc/receptor/tls/ca/receptor-ca.crt to receptor/tls/ca in the tar file
|
||||||
tar.add(os.path.realpath('/etc/receptor/tls/ca/mesh-CA.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/mesh-CA.crt")
|
tar.add(
|
||||||
|
os.path.realpath('/etc/receptor/tls/ca/receptor-ca.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/receptor-ca.crt"
|
||||||
|
)
|
||||||
|
|
||||||
# copy /etc/receptor/work_public_key.pem to receptor/work_public_key.pem
|
# copy /etc/receptor/signing/work-public-key.pem to receptor/work-public-key.pem
|
||||||
tar.add('/etc/receptor/work_public_key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work_public_key.pem")
|
tar.add('/etc/receptor/signing/work-public-key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work-public-key.pem")
|
||||||
|
|
||||||
# generate and write the receptor key to receptor/tls/receptor.key in the tar file
|
# generate and write the receptor key to receptor/tls/receptor.key in the tar file
|
||||||
key, cert = generate_receptor_tls(instance_obj)
|
key, cert = generate_receptor_tls(instance_obj)
|
||||||
|
|
||||||
def tar_addfile(tarinfo, filecontent):
|
|
||||||
tarinfo.mtime = time.time()
|
|
||||||
tarinfo.size = len(filecontent)
|
|
||||||
tar.addfile(tarinfo, io.BytesIO(filecontent))
|
|
||||||
|
|
||||||
key_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.key")
|
key_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.key")
|
||||||
tar_addfile(key_tarinfo, key)
|
key_tarinfo.size = len(key)
|
||||||
|
tar.addfile(key_tarinfo, io.BytesIO(key))
|
||||||
|
|
||||||
cert_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.crt")
|
cert_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/receptor/tls/receptor.crt")
|
||||||
cert_tarinfo.size = len(cert)
|
cert_tarinfo.size = len(cert)
|
||||||
tar_addfile(cert_tarinfo, cert)
|
tar.addfile(cert_tarinfo, io.BytesIO(cert))
|
||||||
|
|
||||||
# generate and write install_receptor.yml to the tar file
|
# generate and write install_receptor.yml to the tar file
|
||||||
playbook = generate_playbook(instance_obj).encode('utf-8')
|
playbook = generate_playbook().encode('utf-8')
|
||||||
playbook_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/install_receptor.yml")
|
playbook_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/install_receptor.yml")
|
||||||
tar_addfile(playbook_tarinfo, playbook)
|
playbook_tarinfo.size = len(playbook)
|
||||||
|
tar.addfile(playbook_tarinfo, io.BytesIO(playbook))
|
||||||
|
|
||||||
# generate and write inventory.yml to the tar file
|
# generate and write inventory.yml to the tar file
|
||||||
inventory_yml = generate_inventory_yml(instance_obj).encode('utf-8')
|
inventory_yml = generate_inventory_yml(instance_obj).encode('utf-8')
|
||||||
inventory_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/inventory.yml")
|
inventory_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/inventory.yml")
|
||||||
tar_addfile(inventory_yml_tarinfo, inventory_yml)
|
inventory_yml_tarinfo.size = len(inventory_yml)
|
||||||
|
tar.addfile(inventory_yml_tarinfo, io.BytesIO(inventory_yml))
|
||||||
|
|
||||||
# generate and write group_vars/all.yml to the tar file
|
# generate and write group_vars/all.yml to the tar file
|
||||||
group_vars = generate_group_vars_all_yml(instance_obj).encode('utf-8')
|
group_vars = generate_group_vars_all_yml(instance_obj).encode('utf-8')
|
||||||
group_vars_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/group_vars/all.yml")
|
group_vars_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/group_vars/all.yml")
|
||||||
tar_addfile(group_vars_tarinfo, group_vars)
|
group_vars_tarinfo.size = len(group_vars)
|
||||||
|
tar.addfile(group_vars_tarinfo, io.BytesIO(group_vars))
|
||||||
|
|
||||||
# generate and write requirements.yml to the tar file
|
# generate and write requirements.yml to the tar file
|
||||||
requirements_yml = generate_requirements_yml().encode('utf-8')
|
requirements_yml = generate_requirements_yml().encode('utf-8')
|
||||||
requirements_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/requirements.yml")
|
requirements_yml_tarinfo = tarfile.TarInfo(f"{instance_obj.hostname}_install_bundle/requirements.yml")
|
||||||
tar_addfile(requirements_yml_tarinfo, requirements_yml)
|
requirements_yml_tarinfo.size = len(requirements_yml)
|
||||||
|
tar.addfile(requirements_yml_tarinfo, io.BytesIO(requirements_yml))
|
||||||
|
|
||||||
# respond with the tarfile
|
# respond with the tarfile
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
@@ -109,10 +107,8 @@ class InstanceInstallBundle(GenericAPIView):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def generate_playbook(instance_obj):
|
def generate_playbook():
|
||||||
playbook_yaml = render_to_string("instance_install_bundle/install_receptor.yml", context=dict(instance=instance_obj))
|
return render_to_string("instance_install_bundle/install_receptor.yml")
|
||||||
# convert consecutive newlines with a single newline
|
|
||||||
return re.sub(r'\n+', '\n', playbook_yaml)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_requirements_yml():
|
def generate_requirements_yml():
|
||||||
@@ -124,21 +120,7 @@ def generate_inventory_yml(instance_obj):
|
|||||||
|
|
||||||
|
|
||||||
def generate_group_vars_all_yml(instance_obj):
|
def generate_group_vars_all_yml(instance_obj):
|
||||||
# get peers
|
return render_to_string("instance_install_bundle/group_vars/all.yml", context=dict(instance=instance_obj))
|
||||||
peers = []
|
|
||||||
for addr in instance_obj.peers.select_related('instance'):
|
|
||||||
peers.append(dict(address=addr.get_full_address(), protocol=addr.protocol))
|
|
||||||
context = dict(instance=instance_obj, peers=peers)
|
|
||||||
|
|
||||||
canonical_addr = instance_obj.canonical_address
|
|
||||||
if canonical_addr:
|
|
||||||
context['listener_port'] = canonical_addr.port
|
|
||||||
protocol = canonical_addr.protocol if canonical_addr.protocol != 'wss' else 'ws'
|
|
||||||
context['listener_protocol'] = protocol
|
|
||||||
|
|
||||||
all_yaml = render_to_string("instance_install_bundle/group_vars/all.yml", context=context)
|
|
||||||
# convert consecutive newlines with a single newline
|
|
||||||
return re.sub(r'\n+', '\n', all_yaml)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_receptor_tls(instance_obj):
|
def generate_receptor_tls(instance_obj):
|
||||||
@@ -179,14 +161,14 @@ def generate_receptor_tls(instance_obj):
|
|||||||
.sign(key, hashes.SHA256())
|
.sign(key, hashes.SHA256())
|
||||||
)
|
)
|
||||||
|
|
||||||
# sign csr with the receptor ca key from /etc/receptor/ca/mesh-CA.key
|
# sign csr with the receptor ca key from /etc/receptor/ca/receptor-ca.key
|
||||||
with open('/etc/receptor/tls/ca/mesh-CA.key', 'rb') as f:
|
with open('/etc/receptor/tls/ca/receptor-ca.key', 'rb') as f:
|
||||||
ca_key = serialization.load_pem_private_key(
|
ca_key = serialization.load_pem_private_key(
|
||||||
f.read(),
|
f.read(),
|
||||||
password=None,
|
password=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
with open('/etc/receptor/tls/ca/mesh-CA.crt', 'rb') as f:
|
with open('/etc/receptor/tls/ca/receptor-ca.crt', 'rb') as f:
|
||||||
ca_cert = x509.load_pem_x509_certificate(f.read())
|
ca_cert = x509.load_pem_x509_certificate(f.read())
|
||||||
|
|
||||||
cert = (
|
cert = (
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework import serializers
|
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
||||||
@@ -32,7 +31,6 @@ from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
|||||||
|
|
||||||
from awx.api.serializers import (
|
from awx.api.serializers import (
|
||||||
InventorySerializer,
|
InventorySerializer,
|
||||||
ConstructedInventorySerializer,
|
|
||||||
ActivityStreamSerializer,
|
ActivityStreamSerializer,
|
||||||
RoleSerializer,
|
RoleSerializer,
|
||||||
InstanceGroupSerializer,
|
InstanceGroupSerializer,
|
||||||
@@ -48,6 +46,7 @@ logger = logging.getLogger('awx.api.views.organization')
|
|||||||
|
|
||||||
|
|
||||||
class InventoryUpdateEventsList(SubListAPIView):
|
class InventoryUpdateEventsList(SubListAPIView):
|
||||||
|
|
||||||
model = InventoryUpdateEvent
|
model = InventoryUpdateEvent
|
||||||
serializer_class = InventoryUpdateEventSerializer
|
serializer_class = InventoryUpdateEventSerializer
|
||||||
parent_model = InventoryUpdate
|
parent_model = InventoryUpdate
|
||||||
@@ -67,11 +66,13 @@ class InventoryUpdateEventsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryList(ListCreateAPIView):
|
class InventoryList(ListCreateAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
|
|
||||||
|
|
||||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
|
|
||||||
@@ -81,9 +82,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
|||||||
|
|
||||||
# Do not allow changes to an Inventory kind.
|
# Do not allow changes to an Inventory kind.
|
||||||
if kind is not None and obj.kind != kind:
|
if kind is not None and obj.kind != kind:
|
||||||
return Response(
|
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||||
dict(error=_('You cannot turn a regular inventory into a "smart" or "constructed" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED
|
|
||||||
)
|
|
||||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||||
|
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
@@ -98,30 +97,8 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
|||||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
class ConstructedInventoryDetail(InventoryDetail):
|
|
||||||
serializer_class = ConstructedInventorySerializer
|
|
||||||
|
|
||||||
|
|
||||||
class ConstructedInventoryList(InventoryList):
|
|
||||||
serializer_class = ConstructedInventorySerializer
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
r = super().get_queryset()
|
|
||||||
return r.filter(kind='constructed')
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryInputInventoriesList(SubListAttachDetachAPIView):
|
|
||||||
model = Inventory
|
|
||||||
serializer_class = InventorySerializer
|
|
||||||
parent_model = Inventory
|
|
||||||
relationship = 'input_inventories'
|
|
||||||
|
|
||||||
def is_valid_relation(self, parent, sub, created=False):
|
|
||||||
if sub.kind == 'constructed':
|
|
||||||
raise serializers.ValidationError({'error': 'You cannot add a constructed inventory to another constructed inventory.'})
|
|
||||||
|
|
||||||
|
|
||||||
class InventoryActivityStreamList(SubListAPIView):
|
class InventoryActivityStreamList(SubListAPIView):
|
||||||
|
|
||||||
model = ActivityStream
|
model = ActivityStream
|
||||||
serializer_class = ActivityStreamSerializer
|
serializer_class = ActivityStreamSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -136,6 +113,7 @@ class InventoryActivityStreamList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
serializer_class = InstanceGroupSerializer
|
serializer_class = InstanceGroupSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -143,16 +121,17 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryAccessList(ResourceAccessList):
|
class InventoryAccessList(ResourceAccessList):
|
||||||
|
|
||||||
model = User # needs to be User for AccessLists's
|
model = User # needs to be User for AccessLists's
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
|
|
||||||
|
|
||||||
class InventoryObjectRolesList(SubListAPIView):
|
class InventoryObjectRolesList(SubListAPIView):
|
||||||
|
|
||||||
model = Role
|
model = Role
|
||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
deprecated = True
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -161,6 +140,7 @@ class InventoryObjectRolesList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryJobTemplateList(SubListAPIView):
|
class InventoryJobTemplateList(SubListAPIView):
|
||||||
|
|
||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
serializer_class = JobTemplateSerializer
|
serializer_class = JobTemplateSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
@@ -174,9 +154,11 @@ class InventoryJobTemplateList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InventoryLabelList(LabelSubListCreateAttachDetachView):
|
class InventoryLabelList(LabelSubListCreateAttachDetachView):
|
||||||
|
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
|
|
||||||
|
|
||||||
class InventoryCopy(CopyAPIView):
|
class InventoryCopy(CopyAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
copy_return_serializer_class = InventorySerializer
|
copy_return_serializer_class = InventorySerializer
|
||||||
|
|||||||
@@ -59,11 +59,13 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class LabelDetail(RetrieveUpdateAPIView):
|
class LabelDetail(RetrieveUpdateAPIView):
|
||||||
|
|
||||||
model = Label
|
model = Label
|
||||||
serializer_class = LabelSerializer
|
serializer_class = LabelSerializer
|
||||||
|
|
||||||
|
|
||||||
class LabelList(ListCreateAPIView):
|
class LabelList(ListCreateAPIView):
|
||||||
|
|
||||||
name = _("Labels")
|
name = _("Labels")
|
||||||
model = Label
|
model = Label
|
||||||
serializer_class = LabelSerializer
|
serializer_class = LabelSerializer
|
||||||
|
|||||||
@@ -10,14 +10,16 @@ from awx.main.models import InstanceLink, Instance
|
|||||||
|
|
||||||
|
|
||||||
class MeshVisualizer(APIView):
|
class MeshVisualizer(APIView):
|
||||||
|
|
||||||
name = _("Mesh Visualizer")
|
name = _("Mesh Visualizer")
|
||||||
permission_classes = (IsSystemAdminOrAuditor,)
|
permission_classes = (IsSystemAdminOrAuditor,)
|
||||||
swagger_topic = "System Configuration"
|
swagger_topic = "System Configuration"
|
||||||
|
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
||||||
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target__instance', 'source'), many=True).data,
|
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
|
||||||
}
|
}
|
||||||
|
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|||||||
@@ -5,11 +5,9 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
from django.conf import settings
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
from rest_framework.permissions import AllowAny
|
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
@@ -27,19 +25,15 @@ logger = logging.getLogger('awx.analytics')
|
|||||||
|
|
||||||
|
|
||||||
class MetricsView(APIView):
|
class MetricsView(APIView):
|
||||||
|
|
||||||
name = _('Metrics')
|
name = _('Metrics')
|
||||||
swagger_topic = 'Metrics'
|
swagger_topic = 'Metrics'
|
||||||
|
|
||||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||||
|
|
||||||
def initialize_request(self, request, *args, **kwargs):
|
|
||||||
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS:
|
|
||||||
self.permission_classes = (AllowAny,)
|
|
||||||
return super(APIView, self).initialize_request(request, *args, **kwargs)
|
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
'''Show Metrics Details'''
|
'''Show Metrics Details'''
|
||||||
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor:
|
if request.user.is_superuser or request.user.is_system_auditor:
|
||||||
metrics_to_show = ''
|
metrics_to_show = ''
|
||||||
if not request.query_params.get('subsystemonly', "0") == "1":
|
if not request.query_params.get('subsystemonly', "0") == "1":
|
||||||
metrics_to_show += metrics().decode('UTF-8')
|
metrics_to_show += metrics().decode('UTF-8')
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from rest_framework import status
|
|||||||
|
|
||||||
from awx.main.constants import ACTIVE_STATES
|
from awx.main.constants import ACTIVE_STATES
|
||||||
from awx.main.utils import get_object_or_400
|
from awx.main.utils import get_object_or_400
|
||||||
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task
|
from awx.main.models.ha import Instance, InstanceGroup
|
||||||
from awx.main.models.organization import Team
|
from awx.main.models.organization import Team
|
||||||
from awx.main.models.projects import Project
|
from awx.main.models.projects import Project
|
||||||
from awx.main.models.inventory import Inventory
|
from awx.main.models.inventory import Inventory
|
||||||
@@ -50,7 +50,7 @@ class UnifiedJobDeletionMixin(object):
|
|||||||
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
else:
|
else:
|
||||||
# if it has been > 1 minute, events are probably lost
|
# if it has been > 1 minute, events are probably lost
|
||||||
logger.warning('Allowing deletion of {} through the API without all events processed.'.format(obj.log_format))
|
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
|
||||||
|
|
||||||
# Manually cascade delete events if unpartitioned job
|
# Manually cascade delete events if unpartitioned job
|
||||||
if obj.has_unpartitioned_events:
|
if obj.has_unpartitioned_events:
|
||||||
@@ -107,11 +107,6 @@ class InstanceGroupMembershipMixin(object):
|
|||||||
if inst_name in ig_obj.policy_instance_list:
|
if inst_name in ig_obj.policy_instance_list:
|
||||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||||
ig_obj.save(update_fields=['policy_instance_list'])
|
ig_obj.save(update_fields=['policy_instance_list'])
|
||||||
|
|
||||||
# sometimes removing an instance has a non-obvious consequence
|
|
||||||
# this is almost always true if policy_instance_percentage or _minimum is non-zero
|
|
||||||
# after removing a single instance, the other memberships need to be re-balanced
|
|
||||||
schedule_policy_task()
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -53,19 +53,24 @@ from awx.api.serializers import (
|
|||||||
CredentialSerializer,
|
CredentialSerializer,
|
||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||||
from awx.api.views import immutablesharedfields
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.organization')
|
logger = logging.getLogger('awx.api.views.organization')
|
||||||
|
|
||||||
|
|
||||||
@immutablesharedfields
|
|
||||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||||
|
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = Organization.accessible_objects(self.request.user, 'read_role')
|
||||||
|
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
|
||||||
|
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
|
||||||
@immutablesharedfields
|
|
||||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
|
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
@@ -101,14 +106,15 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationInventoriesList(SubListAPIView):
|
class OrganizationInventoriesList(SubListAPIView):
|
||||||
|
|
||||||
model = Inventory
|
model = Inventory
|
||||||
serializer_class = InventorySerializer
|
serializer_class = InventorySerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
relationship = 'inventories'
|
relationship = 'inventories'
|
||||||
|
|
||||||
|
|
||||||
@immutablesharedfields
|
|
||||||
class OrganizationUsersList(BaseUsersList):
|
class OrganizationUsersList(BaseUsersList):
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -116,8 +122,8 @@ class OrganizationUsersList(BaseUsersList):
|
|||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
|
|
||||||
@immutablesharedfields
|
|
||||||
class OrganizationAdminsList(BaseUsersList):
|
class OrganizationAdminsList(BaseUsersList):
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -126,6 +132,7 @@ class OrganizationAdminsList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationProjectsList(SubListCreateAPIView):
|
class OrganizationProjectsList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = Project
|
model = Project
|
||||||
serializer_class = ProjectSerializer
|
serializer_class = ProjectSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -133,6 +140,7 @@ class OrganizationProjectsList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = ExecutionEnvironment
|
model = ExecutionEnvironment
|
||||||
serializer_class = ExecutionEnvironmentSerializer
|
serializer_class = ExecutionEnvironmentSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -142,6 +150,7 @@ class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
serializer_class = JobTemplateSerializer
|
serializer_class = JobTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -149,14 +158,15 @@ class OrganizationJobTemplatesList(SubListCreateAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||||
|
|
||||||
model = WorkflowJobTemplate
|
model = WorkflowJobTemplate
|
||||||
serializer_class = WorkflowJobTemplateSerializer
|
serializer_class = WorkflowJobTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
parent_key = 'organization'
|
parent_key = 'organization'
|
||||||
|
|
||||||
|
|
||||||
@immutablesharedfields
|
|
||||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = Team
|
model = Team
|
||||||
serializer_class = TeamSerializer
|
serializer_class = TeamSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -165,6 +175,7 @@ class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationActivityStreamList(SubListAPIView):
|
class OrganizationActivityStreamList(SubListAPIView):
|
||||||
|
|
||||||
model = ActivityStream
|
model = ActivityStream
|
||||||
serializer_class = ActivityStreamSerializer
|
serializer_class = ActivityStreamSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -173,6 +184,7 @@ class OrganizationActivityStreamList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
serializer_class = NotificationTemplateSerializer
|
serializer_class = NotificationTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
@@ -181,41 +193,46 @@ class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
serializer_class = NotificationTemplateSerializer
|
serializer_class = NotificationTemplateSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_started'
|
relationship = 'notification_templates_started'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_error'
|
relationship = 'notification_templates_error'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_success'
|
relationship = 'notification_templates_success'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
|
||||||
|
|
||||||
relationship = 'notification_templates_approvals'
|
relationship = 'notification_templates_approvals'
|
||||||
|
|
||||||
|
|
||||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = InstanceGroup
|
model = InstanceGroup
|
||||||
serializer_class = InstanceGroupSerializer
|
serializer_class = InstanceGroupSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
relationship = 'instance_groups'
|
relationship = 'instance_groups'
|
||||||
filter_read_permission = False
|
|
||||||
|
|
||||||
|
|
||||||
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||||
|
|
||||||
model = Credential
|
model = Credential
|
||||||
serializer_class = CredentialSerializer
|
serializer_class = CredentialSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
relationship = 'galaxy_credentials'
|
relationship = 'galaxy_credentials'
|
||||||
filter_read_permission = False
|
|
||||||
|
|
||||||
def is_valid_relation(self, parent, sub, created=False):
|
def is_valid_relation(self, parent, sub, created=False):
|
||||||
if sub.kind != 'galaxy_api_token':
|
if sub.kind != 'galaxy_api_token':
|
||||||
@@ -223,16 +240,17 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class OrganizationAccessList(ResourceAccessList):
|
class OrganizationAccessList(ResourceAccessList):
|
||||||
|
|
||||||
model = User # needs to be User for AccessLists's
|
model = User # needs to be User for AccessLists's
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
|
|
||||||
|
|
||||||
class OrganizationObjectRolesList(SubListAPIView):
|
class OrganizationObjectRolesList(SubListAPIView):
|
||||||
|
|
||||||
model = Role
|
model = Role
|
||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
deprecated = True
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from django.utils.decorators import method_decorator
|
|||||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.urls import reverse as django_reverse
|
|
||||||
|
|
||||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@@ -21,14 +20,13 @@ from rest_framework import status
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from awx import MODE
|
|
||||||
from awx.api.generics import APIView
|
from awx.api.generics import APIView
|
||||||
from awx.conf.registry import settings_registry
|
from awx.conf.registry import settings_registry
|
||||||
from awx.main.analytics import all_collectors
|
from awx.main.analytics import all_collectors
|
||||||
from awx.main.ha import is_ha_environment
|
from awx.main.ha import is_ha_environment
|
||||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||||
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse
|
from awx.api.versioning import reverse, drf_reverse
|
||||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
@@ -38,30 +36,30 @@ logger = logging.getLogger('awx.api.views.root')
|
|||||||
|
|
||||||
|
|
||||||
class ApiRootView(APIView):
|
class ApiRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _('REST API')
|
name = _('REST API')
|
||||||
versioning_class = URLPathVersioning
|
versioning_class = None
|
||||||
swagger_topic = 'Versioning'
|
swagger_topic = 'Versioning'
|
||||||
|
|
||||||
@method_decorator(ensure_csrf_cookie)
|
@method_decorator(ensure_csrf_cookie)
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
'''List supported API versions'''
|
'''List supported API versions'''
|
||||||
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
|
|
||||||
|
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
data['description'] = _('AWX REST API')
|
data['description'] = _('AWX REST API')
|
||||||
data['current_version'] = v2
|
data['current_version'] = v2
|
||||||
data['available_versions'] = dict(v2=v2)
|
data['available_versions'] = dict(v2=v2)
|
||||||
if not is_optional_api_urlpattern_prefix_request(request):
|
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
|
||||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||||
if MODE == 'development':
|
|
||||||
data['swagger'] = drf_reverse('api:schema-swagger-ui')
|
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
class ApiOAuthAuthorizationRootView(APIView):
|
class ApiOAuthAuthorizationRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _("API OAuth 2 Authorization Root")
|
name = _("API OAuth 2 Authorization Root")
|
||||||
versioning_class = None
|
versioning_class = None
|
||||||
@@ -76,6 +74,7 @@ class ApiOAuthAuthorizationRootView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiVersionRootView(APIView):
|
class ApiVersionRootView(APIView):
|
||||||
|
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
swagger_topic = 'Versioning'
|
swagger_topic = 'Versioning'
|
||||||
|
|
||||||
@@ -85,7 +84,6 @@ class ApiVersionRootView(APIView):
|
|||||||
data['ping'] = reverse('api:api_v2_ping_view', request=request)
|
data['ping'] = reverse('api:api_v2_ping_view', request=request)
|
||||||
data['instances'] = reverse('api:instance_list', request=request)
|
data['instances'] = reverse('api:instance_list', request=request)
|
||||||
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
||||||
data['receptor_addresses'] = reverse('api:receptor_addresses_list', request=request)
|
|
||||||
data['config'] = reverse('api:api_v2_config_view', request=request)
|
data['config'] = reverse('api:api_v2_config_view', request=request)
|
||||||
data['settings'] = reverse('api:setting_category_list', request=request)
|
data['settings'] = reverse('api:setting_category_list', request=request)
|
||||||
data['me'] = reverse('api:user_me_list', request=request)
|
data['me'] = reverse('api:user_me_list', request=request)
|
||||||
@@ -103,13 +101,10 @@ class ApiVersionRootView(APIView):
|
|||||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||||
data['metrics'] = reverse('api:metrics_view', request=request)
|
data['metrics'] = reverse('api:metrics_view', request=request)
|
||||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||||
data['constructed_inventory'] = reverse('api:constructed_inventory_list', request=request)
|
|
||||||
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
||||||
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
||||||
data['groups'] = reverse('api:group_list', request=request)
|
data['groups'] = reverse('api:group_list', request=request)
|
||||||
data['hosts'] = reverse('api:host_list', request=request)
|
data['hosts'] = reverse('api:host_list', request=request)
|
||||||
data['host_metrics'] = reverse('api:host_metric_list', request=request)
|
|
||||||
data['host_metric_summary_monthly'] = reverse('api:host_metric_summary_monthly_list', request=request)
|
|
||||||
data['job_templates'] = reverse('api:job_template_list', request=request)
|
data['job_templates'] = reverse('api:job_template_list', request=request)
|
||||||
data['jobs'] = reverse('api:job_list', request=request)
|
data['jobs'] = reverse('api:job_list', request=request)
|
||||||
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
|
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
|
||||||
@@ -129,12 +124,6 @@ class ApiVersionRootView(APIView):
|
|||||||
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
||||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||||
data['bulk'] = reverse('api:bulk', request=request)
|
|
||||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
|
||||||
data['service_index'] = django_reverse('service-index-root')
|
|
||||||
data['role_definitions'] = django_reverse('roledefinition-list')
|
|
||||||
data['role_user_assignments'] = django_reverse('roleuserassignment-list')
|
|
||||||
data['role_team_assignments'] = django_reverse('roleteamassignment-list')
|
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
@@ -183,6 +172,7 @@ class ApiV2PingView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2SubscriptionView(APIView):
|
class ApiV2SubscriptionView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Subscriptions')
|
name = _('Subscriptions')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
@@ -222,6 +212,7 @@ class ApiV2SubscriptionView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2AttachView(APIView):
|
class ApiV2AttachView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Attach Subscription')
|
name = _('Attach Subscription')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
@@ -239,6 +230,7 @@ class ApiV2AttachView(APIView):
|
|||||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||||
if pool_id and user and pw:
|
if pool_id and user and pw:
|
||||||
|
|
||||||
data = request.data.copy()
|
data = request.data.copy()
|
||||||
try:
|
try:
|
||||||
with set_environ(**settings.AWX_TASK_ENV):
|
with set_environ(**settings.AWX_TASK_ENV):
|
||||||
@@ -266,6 +258,7 @@ class ApiV2AttachView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class ApiV2ConfigView(APIView):
|
class ApiV2ConfigView(APIView):
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
name = _('Configuration')
|
name = _('Configuration')
|
||||||
swagger_topic = 'System Configuration'
|
swagger_topic = 'System Configuration'
|
||||||
@@ -285,9 +278,6 @@ class ApiV2ConfigView(APIView):
|
|||||||
|
|
||||||
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
||||||
|
|
||||||
# Guarding against settings.UI_NEXT being set to a non-boolean value
|
|
||||||
ui_next_state = settings.UI_NEXT if settings.UI_NEXT in (True, False) else False
|
|
||||||
|
|
||||||
data = dict(
|
data = dict(
|
||||||
time_zone=settings.TIME_ZONE,
|
time_zone=settings.TIME_ZONE,
|
||||||
license_info=license_data,
|
license_info=license_data,
|
||||||
@@ -296,7 +286,6 @@ class ApiV2ConfigView(APIView):
|
|||||||
analytics_status=pendo_state,
|
analytics_status=pendo_state,
|
||||||
analytics_collectors=all_collectors(),
|
analytics_collectors=all_collectors(),
|
||||||
become_methods=PRIVILEGE_ESCALATION_METHODS,
|
become_methods=PRIVILEGE_ESCALATION_METHODS,
|
||||||
ui_next=ui_next_state,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from hashlib import sha1, sha256
|
from hashlib import sha1
|
||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
@@ -99,31 +99,14 @@ class WebhookReceiverBase(APIView):
|
|||||||
def get_signature(self):
|
def get_signature(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def must_check_signature(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def is_ignored_request(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def check_signature(self, obj):
|
def check_signature(self, obj):
|
||||||
if not obj.webhook_key:
|
if not obj.webhook_key:
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
if not self.must_check_signature():
|
|
||||||
logger.debug("skipping signature validation")
|
|
||||||
return
|
|
||||||
|
|
||||||
hash_alg, expected_digest = self.get_signature()
|
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
||||||
if hash_alg == 'sha1':
|
logger.debug("header signature: %s", self.get_signature())
|
||||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
|
||||||
elif hash_alg == 'sha256':
|
|
||||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha256)
|
|
||||||
else:
|
|
||||||
logger.debug("Unsupported signature type, supported: sha1, sha256, received: {}".format(hash_alg))
|
|
||||||
raise PermissionDenied
|
|
||||||
|
|
||||||
logger.debug("header signature: %s", expected_digest)
|
|
||||||
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
|
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
|
||||||
if not hmac.compare_digest(force_bytes(mac.hexdigest()), expected_digest):
|
if not hmac.compare_digest(force_bytes(mac.hexdigest()), self.get_signature()):
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
|
|
||||||
@csrf_exempt
|
@csrf_exempt
|
||||||
@@ -131,14 +114,10 @@ class WebhookReceiverBase(APIView):
|
|||||||
# Ensure that the full contents of the request are captured for multiple uses.
|
# Ensure that the full contents of the request are captured for multiple uses.
|
||||||
request.body
|
request.body
|
||||||
|
|
||||||
logger.debug("headers: {}\ndata: {}\n".format(request.headers, request.data))
|
logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
self.check_signature(obj)
|
self.check_signature(obj)
|
||||||
|
|
||||||
if self.is_ignored_request():
|
|
||||||
# This was an ignored request type (e.g. ping), don't act on it
|
|
||||||
return Response({'message': _("Webhook ignored")}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
event_type = self.get_event_type()
|
event_type = self.get_event_type()
|
||||||
event_guid = self.get_event_guid()
|
event_guid = self.get_event_guid()
|
||||||
event_ref = self.get_event_ref()
|
event_ref = self.get_event_ref()
|
||||||
@@ -207,7 +186,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
|
|||||||
if hash_alg != 'sha1':
|
if hash_alg != 'sha1':
|
||||||
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
|
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
return hash_alg, force_bytes(signature)
|
return force_bytes(signature)
|
||||||
|
|
||||||
|
|
||||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||||
@@ -235,73 +214,15 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
|
|||||||
|
|
||||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||||
|
|
||||||
|
def get_signature(self):
|
||||||
|
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||||
|
|
||||||
def check_signature(self, obj):
|
def check_signature(self, obj):
|
||||||
if not obj.webhook_key:
|
if not obj.webhook_key:
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
|
|
||||||
token_from_request = force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
|
||||||
|
|
||||||
# GitLab only returns the secret token, not an hmac hash. Use
|
# GitLab only returns the secret token, not an hmac hash. Use
|
||||||
# the hmac `compare_digest` helper function to prevent timing
|
# the hmac `compare_digest` helper function to prevent timing
|
||||||
# analysis by attackers.
|
# analysis by attackers.
|
||||||
if not hmac.compare_digest(force_bytes(obj.webhook_key), token_from_request):
|
if not hmac.compare_digest(force_bytes(obj.webhook_key), self.get_signature()):
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
|
|
||||||
|
|
||||||
class BitbucketDcWebhookReceiver(WebhookReceiverBase):
|
|
||||||
service = 'bitbucket_dc'
|
|
||||||
|
|
||||||
ref_keys = {
|
|
||||||
'repo:refs_changed': 'changes.0.toHash',
|
|
||||||
'mirror:repo_synchronized': 'changes.0.toHash',
|
|
||||||
'pr:opened': 'pullRequest.toRef.latestCommit',
|
|
||||||
'pr:from_ref_updated': 'pullRequest.toRef.latestCommit',
|
|
||||||
'pr:modified': 'pullRequest.toRef.latestCommit',
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_event_type(self):
|
|
||||||
return self.request.META.get('HTTP_X_EVENT_KEY')
|
|
||||||
|
|
||||||
def get_event_guid(self):
|
|
||||||
return self.request.META.get('HTTP_X_REQUEST_ID')
|
|
||||||
|
|
||||||
def get_event_status_api(self):
|
|
||||||
# https://<bitbucket-base-url>/rest/build-status/1.0/commits/<commit-hash>
|
|
||||||
if self.get_event_type() not in self.ref_keys.keys():
|
|
||||||
return
|
|
||||||
if self.get_event_ref() is None:
|
|
||||||
return
|
|
||||||
any_url = None
|
|
||||||
if 'actor' in self.request.data:
|
|
||||||
any_url = self.request.data['actor'].get('links', {}).get('self')
|
|
||||||
if any_url is None and 'repository' in self.request.data:
|
|
||||||
any_url = self.request.data['repository'].get('links', {}).get('self')
|
|
||||||
if any_url is None:
|
|
||||||
return
|
|
||||||
any_url = any_url[0].get('href')
|
|
||||||
if any_url is None:
|
|
||||||
return
|
|
||||||
parsed = urllib.parse.urlparse(any_url)
|
|
||||||
|
|
||||||
return "{}://{}/rest/build-status/1.0/commits/{}".format(parsed.scheme, parsed.netloc, self.get_event_ref())
|
|
||||||
|
|
||||||
def is_ignored_request(self):
|
|
||||||
return self.get_event_type() not in [
|
|
||||||
'repo:refs_changed',
|
|
||||||
'mirror:repo_synchronized',
|
|
||||||
'pr:opened',
|
|
||||||
'pr:from_ref_updated',
|
|
||||||
'pr:modified',
|
|
||||||
]
|
|
||||||
|
|
||||||
def must_check_signature(self):
|
|
||||||
# Bitbucket does not sign ping requests...
|
|
||||||
return self.get_event_type() != 'diagnostics:ping'
|
|
||||||
|
|
||||||
def get_signature(self):
|
|
||||||
header_sig = self.request.META.get('HTTP_X_HUB_SIGNATURE')
|
|
||||||
if not header_sig:
|
|
||||||
logger.debug("Expected signature missing from header key HTTP_X_HUB_SIGNATURE")
|
|
||||||
raise PermissionDenied
|
|
||||||
hash_alg, signature = header_sig.split('=')
|
|
||||||
return hash_alg, force_bytes(signature)
|
|
||||||
|
|||||||
@@ -8,13 +8,15 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
|
|
||||||
|
|
||||||
class ConfConfig(AppConfig):
|
class ConfConfig(AppConfig):
|
||||||
|
|
||||||
name = 'awx.conf'
|
name = 'awx.conf'
|
||||||
verbose_name = _('Configuration')
|
verbose_name = _('Configuration')
|
||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
self.module.autodiscover()
|
self.module.autodiscover()
|
||||||
|
|
||||||
if not set(sys.argv) & {'migrate', 'check_migrations', 'showmigrations'}:
|
if not set(sys.argv) & {'migrate', 'check_migrations'}:
|
||||||
|
|
||||||
from .settings import SettingsWrapper
|
from .settings import SettingsWrapper
|
||||||
|
|
||||||
SettingsWrapper.initialize()
|
SettingsWrapper.initialize()
|
||||||
|
|||||||
@@ -55,7 +55,6 @@ register(
|
|||||||
# Optional; category_slug will be slugified version of category if not
|
# Optional; category_slug will be slugified version of category if not
|
||||||
# explicitly provided.
|
# explicitly provided.
|
||||||
category_slug='cows',
|
category_slug='cows',
|
||||||
hidden=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ logger = logging.getLogger('awx.conf.fields')
|
|||||||
# Use DRF fields to convert/validate settings:
|
# Use DRF fields to convert/validate settings:
|
||||||
# - to_representation(obj) should convert a native Python object to a primitive
|
# - to_representation(obj) should convert a native Python object to a primitive
|
||||||
# serializable type. This primitive type will be what is presented in the API
|
# serializable type. This primitive type will be what is presented in the API
|
||||||
# and stored in the JSON field in the database.
|
# and stored in the JSON field in the datbase.
|
||||||
# - to_internal_value(data) should convert the primitive type back into the
|
# - to_internal_value(data) should convert the primitive type back into the
|
||||||
# appropriate Python type to be used in settings.
|
# appropriate Python type to be used in settings.
|
||||||
|
|
||||||
@@ -47,6 +47,7 @@ class IntegerField(IntegerField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListField(ListField):
|
class StringListField(ListField):
|
||||||
|
|
||||||
child = CharField()
|
child = CharField()
|
||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
@@ -56,15 +57,12 @@ class StringListField(ListField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListBooleanField(ListField):
|
class StringListBooleanField(ListField):
|
||||||
|
|
||||||
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
||||||
child = CharField()
|
child = CharField()
|
||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
try:
|
try:
|
||||||
if isinstance(value, str):
|
|
||||||
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
|
|
||||||
# DRF changed truthy and falsy lists to be capitalized
|
|
||||||
value = value.lower()
|
|
||||||
if isinstance(value, (list, tuple)):
|
if isinstance(value, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_representation(value)
|
return super(StringListBooleanField, self).to_representation(value)
|
||||||
elif value in BooleanField.TRUE_VALUES:
|
elif value in BooleanField.TRUE_VALUES:
|
||||||
@@ -82,8 +80,6 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
def to_internal_value(self, data):
|
def to_internal_value(self, data):
|
||||||
try:
|
try:
|
||||||
if isinstance(data, str):
|
|
||||||
data = data.lower()
|
|
||||||
if isinstance(data, (list, tuple)):
|
if isinstance(data, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_internal_value(data)
|
return super(StringListBooleanField, self).to_internal_value(data)
|
||||||
elif data in BooleanField.TRUE_VALUES:
|
elif data in BooleanField.TRUE_VALUES:
|
||||||
@@ -100,6 +96,7 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
|
|
||||||
class StringListPathField(StringListField):
|
class StringListPathField(StringListField):
|
||||||
|
|
||||||
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
||||||
|
|
||||||
def to_internal_value(self, paths):
|
def to_internal_value(self, paths):
|
||||||
@@ -129,6 +126,7 @@ class StringListIsolatedPathField(StringListField):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def to_internal_value(self, paths):
|
def to_internal_value(self, paths):
|
||||||
|
|
||||||
if isinstance(paths, (list, tuple)):
|
if isinstance(paths, (list, tuple)):
|
||||||
for p in paths:
|
for p in paths:
|
||||||
if not isinstance(p, str):
|
if not isinstance(p, str):
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import awx.main.fields
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ def revert_tower_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
||||||
|
|
||||||
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import awx.main.fields
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
||||||
|
|
||||||
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]
|
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ def reverse_copy_session_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0004_v320_reencrypt')]
|
dependencies = [('conf', '0004_v320_reencrypt')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ def copy_allowed_ips(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(copy_allowed_ips)]
|
operations = [migrations.RunPython(copy_allowed_ips)]
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ def _noop(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ def rename_proot_settings(apps, schema_editor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [('conf', '0008_subscriptions')]
|
dependencies = [('conf', '0008_subscriptions')]
|
||||||
|
|
||||||
operations = [migrations.RunPython(rename_proot_settings)]
|
operations = [migrations.RunPython(rename_proot_settings)]
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
# Generated by Django 4.2 on 2023-06-09 19:51
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
('conf', '0009_rename_proot_settings'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='setting',
|
|
||||||
name='value',
|
|
||||||
field=models.JSONField(null=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,11 +1,7 @@
|
|||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.utils.timezone import now
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.conf.migrations')
|
|
||||||
|
|
||||||
|
|
||||||
def fill_ldap_group_type_params(apps, schema_editor):
|
def fill_ldap_group_type_params(apps, schema_editor):
|
||||||
@@ -19,7 +15,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
|||||||
entry = qs[0]
|
entry = qs[0]
|
||||||
group_type_params = entry.value
|
group_type_params = entry.value
|
||||||
else:
|
else:
|
||||||
return # for new installs we prefer to use the default value
|
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||||
|
|
||||||
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
||||||
for k in list(group_type_params.keys()):
|
for k in list(group_type_params.keys()):
|
||||||
@@ -27,5 +23,4 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
|||||||
del group_type_params[k]
|
del group_type_params[k]
|
||||||
|
|
||||||
entry.value = group_type_params
|
entry.value = group_type_params
|
||||||
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
|
|
||||||
entry.save()
|
entry.save()
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ __all__ = ['rename_setting']
|
|||||||
|
|
||||||
|
|
||||||
def rename_setting(apps, schema_editor, old_key, new_key):
|
def rename_setting(apps, schema_editor, old_key, new_key):
|
||||||
|
|
||||||
old_setting = None
|
old_setting = None
|
||||||
Setting = apps.get_model('conf', 'Setting')
|
Setting = apps.get_model('conf', 'Setting')
|
||||||
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
||||||
|
|||||||
@@ -7,10 +7,9 @@ import json
|
|||||||
# Django
|
# Django
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
from ansible_base.lib.utils.models import prevent_search
|
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.base import CreatedModifiedModel
|
from awx.main.fields import JSONBlob
|
||||||
|
from awx.main.models.base import CreatedModifiedModel, prevent_search
|
||||||
from awx.main.utils import encrypt_field
|
from awx.main.utils import encrypt_field
|
||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
|
|
||||||
@@ -18,8 +17,9 @@ __all__ = ['Setting']
|
|||||||
|
|
||||||
|
|
||||||
class Setting(CreatedModifiedModel):
|
class Setting(CreatedModifiedModel):
|
||||||
|
|
||||||
key = models.CharField(max_length=255)
|
key = models.CharField(max_length=255)
|
||||||
value = models.JSONField(null=True)
|
value = JSONBlob(null=True)
|
||||||
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|||||||
@@ -127,8 +127,6 @@ class SettingsRegistry(object):
|
|||||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||||
unit = field_kwargs.pop('unit', None)
|
unit = field_kwargs.pop('unit', None)
|
||||||
hidden = field_kwargs.pop('hidden', False)
|
|
||||||
warning_text = field_kwargs.pop('warning_text', None)
|
|
||||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||||
field_kwargs['child'].source = None
|
field_kwargs['child'].source = None
|
||||||
field_instance = field_class(**field_kwargs)
|
field_instance = field_class(**field_kwargs)
|
||||||
@@ -136,14 +134,12 @@ class SettingsRegistry(object):
|
|||||||
field_instance.category = category
|
field_instance.category = category
|
||||||
field_instance.depends_on = depends_on
|
field_instance.depends_on = depends_on
|
||||||
field_instance.unit = unit
|
field_instance.unit = unit
|
||||||
field_instance.hidden = hidden
|
|
||||||
if placeholder is not empty:
|
if placeholder is not empty:
|
||||||
field_instance.placeholder = placeholder
|
field_instance.placeholder = placeholder
|
||||||
field_instance.defined_in_file = defined_in_file
|
field_instance.defined_in_file = defined_in_file
|
||||||
if field_instance.defined_in_file:
|
if field_instance.defined_in_file:
|
||||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||||
field_instance.encrypted = encrypted
|
field_instance.encrypted = encrypted
|
||||||
field_instance.warning_text = warning_text
|
|
||||||
original_field_instance = field_instance
|
original_field_instance = field_instance
|
||||||
if field_class != original_field_class:
|
if field_class != original_field_class:
|
||||||
original_field_instance = original_field_class(**field_kwargs)
|
original_field_instance = original_field_class(**field_kwargs)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user