mirror of
https://github.com/ansible/awx.git
synced 2026-01-09 23:12:08 -03:30
Merge remote-tracking branch 'tower/test_stable-2.6' into merge_26_2
This commit is contained in:
commit
8fb6a3a633
22
.github/actions/awx_devel_image/action.yml
vendored
22
.github/actions/awx_devel_image/action.yml
vendored
@ -24,9 +24,31 @@ runs:
|
|||||||
run: |
|
run: |
|
||||||
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-ssh-agent
|
- uses: ./.github/actions/setup-ssh-agent
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ inputs.private-github-key }}
|
ssh-private-key: ${{ inputs.private-github-key }}
|
||||||
|
=======
|
||||||
|
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||||
|
id: generate_key
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ -z "${{ inputs.private-github-key }}" ]]; then
|
||||||
|
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "${{ inputs.private-github-key }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Add private GitHub key to SSH agent
|
||||||
|
uses: webfactory/ssh-agent@v0.9.0
|
||||||
|
with:
|
||||||
|
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
- name: Pre-pull latest devel image to warm cache
|
- name: Pre-pull latest devel image to warm cache
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
2
.github/actions/run_awx_devel/action.yml
vendored
2
.github/actions/run_awx_devel/action.yml
vendored
@ -36,7 +36,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
shell: bash
|
shell: bash
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
run: python3 -m pip install --upgrade 'ansible-core<2.18.0'
|
||||||
|
|
||||||
- name: Install system deps
|
- name: Install system deps
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
47
.github/workflows/ci.yml
vendored
47
.github/workflows/ci.yml
vendored
@ -39,12 +39,16 @@ jobs:
|
|||||||
command: /start_tests.sh test_collection_all
|
command: /start_tests.sh test_collection_all
|
||||||
coverage-upload-name: "awx-collection"
|
coverage-upload-name: "awx-collection"
|
||||||
- name: api-schema
|
- name: api-schema
|
||||||
|
<<<<<<< HEAD
|
||||||
command: >-
|
command: >-
|
||||||
/start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{
|
/start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{
|
||||||
github.event.pull_request.base.ref || github.ref_name
|
github.event.pull_request.base.ref || github.ref_name
|
||||||
}}
|
}}
|
||||||
coverage-upload-name: ""
|
coverage-upload-name: ""
|
||||||
|
|
||||||
|
=======
|
||||||
|
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@ -130,9 +134,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
show-progress: false
|
show-progress: false
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-python
|
- uses: ./.github/actions/setup-python
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
=======
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@ -143,11 +153,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Run live dev env tests
|
- name: Run live dev env tests
|
||||||
run: docker exec tools_awx_1 /bin/bash -c "make live_test"
|
run: docker exec tools_awx_1 /bin/bash -c "make live_test"
|
||||||
|
<<<<<<< HEAD
|
||||||
|
|
||||||
- uses: ./.github/actions/upload_awx_devel_logs
|
- uses: ./.github/actions/upload_awx_devel_logs
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
log-filename: live-tests.log
|
log-filename: live-tests.log
|
||||||
|
=======
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
awx-operator:
|
awx-operator:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -180,6 +193,26 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python3 -m pip install docker
|
python3 -m pip install docker
|
||||||
|
|
||||||
|
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||||
|
id: generate_key
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
|
||||||
|
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Add private GitHub key to SSH agent
|
||||||
|
uses: webfactory/ssh-agent@v0.9.0
|
||||||
|
with:
|
||||||
|
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||||
|
|
||||||
- name: Build AWX image
|
- name: Build AWX image
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
run: |
|
run: |
|
||||||
@ -278,9 +311,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
show-progress: false
|
show-progress: false
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-python
|
- uses: ./.github/actions/setup-python
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
=======
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@ -356,12 +395,18 @@ jobs:
|
|||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
show-progress: false
|
show-progress: false
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-python
|
- uses: ./.github/actions/setup-python
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
=======
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
run: python3 -m pip install --upgrade "ansible-core<2.19"
|
||||||
|
|
||||||
- name: Download coverage artifacts
|
- name: Download coverage artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
|
|||||||
23
.github/workflows/devel_images.yml
vendored
23
.github/workflows/devel_images.yml
vendored
@ -10,6 +10,7 @@ on:
|
|||||||
- devel
|
- devel
|
||||||
- release_*
|
- release_*
|
||||||
- feature_*
|
- feature_*
|
||||||
|
- stable-*
|
||||||
jobs:
|
jobs:
|
||||||
push-development-images:
|
push-development-images:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -69,9 +70,31 @@ jobs:
|
|||||||
make ui
|
make ui
|
||||||
if: matrix.build-targets.image-name == 'awx'
|
if: matrix.build-targets.image-name == 'awx'
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-ssh-agent
|
- uses: ./.github/actions/setup-ssh-agent
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||||
|
=======
|
||||||
|
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||||
|
id: generate_key
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
|
||||||
|
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Add private GitHub key to SSH agent
|
||||||
|
uses: webfactory/ssh-agent@v0.9.0
|
||||||
|
with:
|
||||||
|
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
- name: Build and push AWX devel images
|
- name: Build and push AWX devel images
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@ -12,7 +12,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
show-progress: false
|
show-progress: false
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-python
|
- uses: ./.github/actions/setup-python
|
||||||
|
=======
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
|
||||||
|
|||||||
35
.github/workflows/feature_branch_sync.yml
vendored
Normal file
35
.github/workflows/feature_branch_sync.yml
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
name: Rebase release_4.6-next and stable-2.6
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- release_4.6
|
||||||
|
workflow_dispatch:
|
||||||
|
# Allows manual triggering of the workflow from the GitHub UI
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
rebase:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout stable-2.6 branch
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: stable-2.6
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Fetch release_4.6 branch for rebase
|
||||||
|
run: git fetch origin release_4.6:release_4.6
|
||||||
|
|
||||||
|
- name: Attempt Rebase release_4.6 into stable-2.6
|
||||||
|
id: rebase_attempt
|
||||||
|
run: |
|
||||||
|
git config user.name "GitHub Actions"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git checkout stable-2.6
|
||||||
|
git rebase release_4.6
|
||||||
|
|
||||||
|
- name: Force Push Rebased stable-2.6 Branch
|
||||||
|
run: |
|
||||||
|
git push --force origin stable-2.6
|
||||||
4
.github/workflows/label_pr.yml
vendored
4
.github/workflows/label_pr.yml
vendored
@ -33,7 +33,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
show-progress: false
|
show-progress: false
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-python
|
- uses: ./.github/actions/setup-python
|
||||||
|
=======
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
|
||||||
|
|||||||
55
.github/workflows/upload_schema.yml
vendored
55
.github/workflows/upload_schema.yml
vendored
@ -11,6 +11,7 @@ on:
|
|||||||
- devel
|
- devel
|
||||||
- release_**
|
- release_**
|
||||||
- feature_**
|
- feature_**
|
||||||
|
- stable-**
|
||||||
jobs:
|
jobs:
|
||||||
push:
|
push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -23,28 +24,76 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
show-progress: false
|
show-progress: false
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-python
|
- uses: ./.github/actions/setup-python
|
||||||
|
=======
|
||||||
|
- name: Set lower case owner name
|
||||||
|
shell: bash
|
||||||
|
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
OWNER: '${{ github.repository_owner }}'
|
||||||
|
|
||||||
|
- name: Get python version from Makefile
|
||||||
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install python ${{ env.py_version }}
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.py_version }}
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
- name: Log in to registry
|
- name: Log in to registry
|
||||||
run: |
|
run: |
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
- uses: ./.github/actions/setup-ssh-agent
|
- uses: ./.github/actions/setup-ssh-agent
|
||||||
with:
|
with:
|
||||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Pre-pull image to warm build cache
|
||||||
|
=======
|
||||||
|
- name: Pre-pull latest devel image to warm cache
|
||||||
|
shell: bash
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
run: |
|
run: |
|
||||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||||
|
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||||
|
docker pull -q `make print-DEVEL_IMAGE_NAME`
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||||
|
id: generate_key
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
|
||||||
|
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Add private GitHub key to SSH agent
|
||||||
|
uses: webfactory/ssh-agent@v0.9.0
|
||||||
|
with:
|
||||||
|
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||||
|
|
||||||
- name: Build image
|
- name: Build image
|
||||||
run: |
|
run: |
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||||
|
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||||
|
make docker-compose-build
|
||||||
|
|
||||||
- name: Generate API Schema
|
- name: Generate API Schema
|
||||||
run: |
|
run: |
|
||||||
|
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||||
|
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||||
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
|
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
|
||||||
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
|
--workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema
|
||||||
|
|
||||||
- name: Upload API Schema
|
- name: Upload API Schema
|
||||||
env:
|
env:
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -122,6 +122,7 @@ reports
|
|||||||
local/
|
local/
|
||||||
*.mo
|
*.mo
|
||||||
requirements/vendor
|
requirements/vendor
|
||||||
|
requirements/requirements_git.credentials.txt
|
||||||
.i18n_built
|
.i18n_built
|
||||||
.idea/*
|
.idea/*
|
||||||
*credentials*.y*ml*
|
*credentials*.y*ml*
|
||||||
|
|||||||
@ -28,3 +28,4 @@ include COPYING
|
|||||||
include Makefile
|
include Makefile
|
||||||
prune awx/public
|
prune awx/public
|
||||||
prune awx/projects
|
prune awx/projects
|
||||||
|
prune requirements/requirements_git.credentials.txt
|
||||||
|
|||||||
6
Makefile
6
Makefile
@ -77,7 +77,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
|||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==70.3.0 setuptools_scm[toml]==8.1.0 wheel==0.45.1 cython==3.0.11
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==80.9.0 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==3.1.3
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
|
|
||||||
@ -378,7 +378,7 @@ test_collection:
|
|||||||
if [ "$(VENV_BASE)" ]; then \
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi && \
|
fi && \
|
||||||
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install "ansible-core<2.19"; fi
|
||||||
ansible --version
|
ansible --version
|
||||||
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
|
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
|
||||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||||
@ -417,7 +417,7 @@ install_collection: build_collection
|
|||||||
test_collection_sanity:
|
test_collection_sanity:
|
||||||
rm -rf awx_collection_build/
|
rm -rf awx_collection_build/
|
||||||
rm -rf $(COLLECTION_INSTALL)
|
rm -rf $(COLLECTION_INSTALL)
|
||||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install "ansible-core<2.19"; fi
|
||||||
ansible --version
|
ansible --version
|
||||||
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
|
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
|
||||||
cd $(COLLECTION_INSTALL) && \
|
cd $(COLLECTION_INSTALL) && \
|
||||||
|
|||||||
@ -162,9 +162,9 @@ def get_view_description(view, html=False):
|
|||||||
|
|
||||||
def get_default_schema():
|
def get_default_schema():
|
||||||
if settings.DYNACONF.is_development_mode:
|
if settings.DYNACONF.is_development_mode:
|
||||||
from awx.api.swagger import schema_view
|
from awx.api.swagger import AutoSchema
|
||||||
|
|
||||||
return schema_view
|
return AutoSchema()
|
||||||
else:
|
else:
|
||||||
return views.APIView.schema
|
return views.APIView.schema
|
||||||
|
|
||||||
@ -844,7 +844,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
|||||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
||||||
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
||||||
auditor_role = RoleDefinition.objects.filter(name="Controller System Auditor").first()
|
auditor_role = RoleDefinition.objects.filter(name="Platform Auditor").first()
|
||||||
if auditor_role:
|
if auditor_role:
|
||||||
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
||||||
return qs.distinct()
|
return qs.distinct()
|
||||||
|
|||||||
@ -234,6 +234,13 @@ class UserPermission(ModelAccessPermission):
|
|||||||
raise PermissionDenied()
|
raise PermissionDenied()
|
||||||
|
|
||||||
|
|
||||||
|
class IsSystemAdmin(permissions.BasePermission):
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
if not (request.user and request.user.is_authenticated):
|
||||||
|
return False
|
||||||
|
return request.user.is_superuser
|
||||||
|
|
||||||
|
|
||||||
class IsSystemAdminOrAuditor(permissions.BasePermission):
|
class IsSystemAdminOrAuditor(permissions.BasePermission):
|
||||||
"""
|
"""
|
||||||
Allows write access only to system admin users.
|
Allows write access only to system admin users.
|
||||||
|
|||||||
@ -2839,7 +2839,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
{
|
{
|
||||||
"role": {
|
"role": {
|
||||||
"id": None,
|
"id": None,
|
||||||
"name": _("Controller System Auditor"),
|
"name": _("Platform Auditor"),
|
||||||
"description": _("Can view all aspects of the system"),
|
"description": _("Can view all aspects of the system"),
|
||||||
"user_capabilities": {"unattach": False},
|
"user_capabilities": {"unattach": False},
|
||||||
},
|
},
|
||||||
@ -5998,7 +5998,7 @@ class InstanceGroupSerializer(BaseSerializer):
|
|||||||
if self.instance and not self.instance.is_container_group:
|
if self.instance and not self.instance.is_container_group:
|
||||||
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
|
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
|
||||||
|
|
||||||
pod_spec_override_json = None
|
pod_spec_override_json = {}
|
||||||
# defect if the value is yaml or json if yaml convert to json
|
# defect if the value is yaml or json if yaml convert to json
|
||||||
try:
|
try:
|
||||||
# convert yaml to json
|
# convert yaml to json
|
||||||
|
|||||||
@ -55,7 +55,7 @@ from wsgiref.util import FileWrapper
|
|||||||
|
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
from ansible_base.lib.utils.requests import get_remote_hosts
|
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
from ansible_base.rbac import permission_registry
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
@ -85,7 +85,6 @@ from awx.api.generics import (
|
|||||||
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
from awx.main.models.rbac import get_role_definition
|
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
extract_ansible_vars,
|
extract_ansible_vars,
|
||||||
@ -751,17 +750,9 @@ class TeamProjectsList(SubListAPIView):
|
|||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
team = self.get_parent_object()
|
team = self.get_parent_object()
|
||||||
self.check_parent_access(team)
|
self.check_parent_access(team)
|
||||||
model_ct = permission_registry.content_type_model.objects.get_for_model(self.model)
|
my_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||||
parent_ct = permission_registry.content_type_model.objects.get_for_model(self.parent_model)
|
team_qs = models.Project.accessible_objects(team, 'read_role')
|
||||||
|
return my_qs & team_qs
|
||||||
rd = get_role_definition(team.member_role)
|
|
||||||
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
|
|
||||||
if role is None:
|
|
||||||
# Team has no permissions, therefore team has no projects
|
|
||||||
return self.model.objects.none()
|
|
||||||
else:
|
|
||||||
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
|
||||||
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
|
|
||||||
|
|
||||||
|
|
||||||
class TeamActivityStreamList(SubListAPIView):
|
class TeamActivityStreamList(SubListAPIView):
|
||||||
@ -876,13 +867,23 @@ class ProjectTeamsList(ListAPIView):
|
|||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
p = get_object_or_404(models.Project, pk=self.kwargs['pk'])
|
parent = get_object_or_404(models.Project, pk=self.kwargs['pk'])
|
||||||
if not self.request.user.can_access(models.Project, 'read', p):
|
if not self.request.user.can_access(models.Project, 'read', parent):
|
||||||
raise PermissionDenied()
|
raise PermissionDenied()
|
||||||
project_ct = ContentType.objects.get_for_model(models.Project)
|
|
||||||
|
project_ct = ContentType.objects.get_for_model(parent)
|
||||||
team_ct = ContentType.objects.get_for_model(self.model)
|
team_ct = ContentType.objects.get_for_model(self.model)
|
||||||
all_roles = models.Role.objects.filter(Q(descendents__content_type=project_ct) & Q(descendents__object_id=p.pk), content_type=team_ct)
|
|
||||||
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in all_roles])
|
roles_on_project = models.Role.objects.filter(
|
||||||
|
content_type=project_ct,
|
||||||
|
object_id=parent.pk,
|
||||||
|
)
|
||||||
|
|
||||||
|
team_member_parent_roles = models.Role.objects.filter(children__in=roles_on_project, role_field='member_role', content_type=team_ct).distinct()
|
||||||
|
|
||||||
|
team_ids = team_member_parent_roles.values_list('object_id', flat=True)
|
||||||
|
my_qs = self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=team_ids)
|
||||||
|
return my_qs
|
||||||
|
|
||||||
|
|
||||||
class ProjectSchedulesList(SubListCreateAPIView):
|
class ProjectSchedulesList(SubListCreateAPIView):
|
||||||
|
|||||||
@ -12,7 +12,7 @@ import re
|
|||||||
import asn1
|
import asn1
|
||||||
from awx.api import serializers
|
from awx.api import serializers
|
||||||
from awx.api.generics import GenericAPIView, Response
|
from awx.api.generics import GenericAPIView, Response
|
||||||
from awx.api.permissions import IsSystemAdminOrAuditor
|
from awx.api.permissions import IsSystemAdmin
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
from cryptography import x509
|
from cryptography import x509
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
@ -48,7 +48,7 @@ class InstanceInstallBundle(GenericAPIView):
|
|||||||
name = _('Install Bundle')
|
name = _('Install Bundle')
|
||||||
model = models.Instance
|
model = models.Instance
|
||||||
serializer_class = serializers.InstanceSerializer
|
serializer_class = serializers.InstanceSerializer
|
||||||
permission_classes = (IsSystemAdminOrAuditor,)
|
permission_classes = (IsSystemAdmin,)
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
def get(self, request, *args, **kwargs):
|
||||||
instance_obj = self.get_object()
|
instance_obj = self.get_object()
|
||||||
|
|||||||
@ -1094,3 +1094,13 @@ register(
|
|||||||
category=('PolicyAsCode'),
|
category=('PolicyAsCode'),
|
||||||
category_slug='policyascode',
|
category_slug='policyascode',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def policy_as_code_validate(serializer, attrs):
|
||||||
|
opa_host = attrs.get('OPA_HOST', '')
|
||||||
|
if opa_host and (opa_host.startswith('http://') or opa_host.startswith('https://')):
|
||||||
|
raise serializers.ValidationError({'OPA_HOST': _("OPA_HOST should not include 'http://' or 'https://' prefixes. Please enter only the hostname.")})
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
register_validate('policyascode', policy_as_code_validate)
|
||||||
|
|||||||
140
awx/main/credential_plugins/aim.py
Normal file
140
awx/main/credential_plugins/aim.py
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||||
|
|
||||||
|
from urllib.parse import quote, urlencode, urljoin
|
||||||
|
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
import requests as requests
|
||||||
|
|
||||||
|
aim_inputs = {
|
||||||
|
'fields': [
|
||||||
|
{
|
||||||
|
'id': 'url',
|
||||||
|
'label': _('CyberArk CCP URL'),
|
||||||
|
'type': 'string',
|
||||||
|
'format': 'url',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'webservice_id',
|
||||||
|
'label': _('Web Service ID'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'app_id',
|
||||||
|
'label': _('Application ID'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'client_key',
|
||||||
|
'label': _('Client Key'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
'multiline': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'client_cert',
|
||||||
|
'label': _('Client Certificate'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
'multiline': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'verify',
|
||||||
|
'label': _('Verify SSL Certificates'),
|
||||||
|
'type': 'boolean',
|
||||||
|
'default': True,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'metadata': [
|
||||||
|
{
|
||||||
|
'id': 'object_query',
|
||||||
|
'label': _('Object Query'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
|
||||||
|
},
|
||||||
|
{'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']},
|
||||||
|
{
|
||||||
|
'id': 'object_property',
|
||||||
|
'label': _('Object Property'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('The property of the object to return. Available properties: Username, Password and Address.'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'reason',
|
||||||
|
'label': _('Reason'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.'),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'required': ['url', 'app_id', 'object_query'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def aim_backend(**kwargs):
|
||||||
|
url = kwargs['url']
|
||||||
|
client_cert = kwargs.get('client_cert', None)
|
||||||
|
client_key = kwargs.get('client_key', None)
|
||||||
|
verify = kwargs['verify']
|
||||||
|
webservice_id = kwargs.get('webservice_id', '')
|
||||||
|
app_id = kwargs['app_id']
|
||||||
|
object_query = kwargs['object_query']
|
||||||
|
object_query_format = kwargs['object_query_format']
|
||||||
|
object_property = kwargs.get('object_property', '')
|
||||||
|
reason = kwargs.get('reason', None)
|
||||||
|
if webservice_id == '':
|
||||||
|
webservice_id = 'AIMWebService'
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
'AppId': app_id,
|
||||||
|
'Query': object_query,
|
||||||
|
'QueryFormat': object_query_format,
|
||||||
|
}
|
||||||
|
if reason:
|
||||||
|
query_params['reason'] = reason
|
||||||
|
|
||||||
|
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||||
|
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
|
||||||
|
|
||||||
|
with CertFiles(client_cert, client_key) as cert:
|
||||||
|
res = requests.get(
|
||||||
|
request_url + request_qs,
|
||||||
|
timeout=30,
|
||||||
|
cert=cert,
|
||||||
|
verify=verify,
|
||||||
|
allow_redirects=False,
|
||||||
|
)
|
||||||
|
sensitive_query_params = {
|
||||||
|
'AppId': '****',
|
||||||
|
'Query': '****',
|
||||||
|
'QueryFormat': object_query_format,
|
||||||
|
}
|
||||||
|
if reason:
|
||||||
|
sensitive_query_params['reason'] = '****'
|
||||||
|
sensitive_request_qs = urlencode(
|
||||||
|
sensitive_query_params,
|
||||||
|
safe='*',
|
||||||
|
quote_via=quote,
|
||||||
|
)
|
||||||
|
res.url = f'{request_url}?{sensitive_request_qs}'
|
||||||
|
|
||||||
|
raise_for_status(res)
|
||||||
|
# CCP returns the property name capitalized, username is camel case
|
||||||
|
# so we need to handle that case
|
||||||
|
if object_property == '':
|
||||||
|
object_property = 'Content'
|
||||||
|
elif object_property.lower() == 'username':
|
||||||
|
object_property = 'UserName'
|
||||||
|
elif object_property.lower() == 'password':
|
||||||
|
object_property = 'Content'
|
||||||
|
elif object_property.lower() == 'address':
|
||||||
|
object_property = 'Address'
|
||||||
|
elif object_property not in res:
|
||||||
|
raise KeyError('Property {} not found in object, available properties: Username, Password and Address'.format(object_property))
|
||||||
|
else:
|
||||||
|
object_property = object_property.capitalize()
|
||||||
|
|
||||||
|
return res.json()[object_property]
|
||||||
|
|
||||||
|
|
||||||
|
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||||
114
awx/main/credential_plugins/azure_kv.py
Normal file
114
awx/main/credential_plugins/azure_kv.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
from azure.keyvault.secrets import SecretClient
|
||||||
|
from azure.identity import (
|
||||||
|
ClientSecretCredential,
|
||||||
|
CredentialUnavailableError,
|
||||||
|
ManagedIdentityCredential,
|
||||||
|
)
|
||||||
|
from azure.core.credentials import TokenCredential
|
||||||
|
from msrestazure import azure_cloud
|
||||||
|
|
||||||
|
from .plugin import CredentialPlugin
|
||||||
|
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
|
||||||
|
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||||
|
clouds = [vars(azure_cloud)[n] for n in dir(azure_cloud) if n.startswith("AZURE_") and n.endswith("_CLOUD")]
|
||||||
|
default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"]
|
||||||
|
|
||||||
|
|
||||||
|
azure_keyvault_inputs = {
|
||||||
|
'fields': [
|
||||||
|
{
|
||||||
|
'id': 'url',
|
||||||
|
'label': _('Vault URL (DNS Name)'),
|
||||||
|
'type': 'string',
|
||||||
|
'format': 'url',
|
||||||
|
},
|
||||||
|
{'id': 'client', 'label': _('Client ID'), 'type': 'string'},
|
||||||
|
{
|
||||||
|
'id': 'secret',
|
||||||
|
'label': _('Client Secret'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
},
|
||||||
|
{'id': 'tenant', 'label': _('Tenant ID'), 'type': 'string'},
|
||||||
|
{
|
||||||
|
'id': 'cloud_name',
|
||||||
|
'label': _('Cloud Environment'),
|
||||||
|
'help_text': _('Specify which azure cloud environment to use.'),
|
||||||
|
'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
|
||||||
|
'default': default_cloud.name,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'metadata': [
|
||||||
|
{
|
||||||
|
'id': 'secret_field',
|
||||||
|
'label': _('Secret Name'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('The name of the secret to look up.'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'secret_version',
|
||||||
|
'label': _('Secret Version'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'required': ['url', 'secret_field'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _initialize_credential(
|
||||||
|
tenant: str = '',
|
||||||
|
client: str = '',
|
||||||
|
secret: str = '',
|
||||||
|
) -> TokenCredential:
|
||||||
|
explicit_credentials_provided = all((tenant, client, secret))
|
||||||
|
|
||||||
|
if explicit_credentials_provided:
|
||||||
|
return ClientSecretCredential(
|
||||||
|
tenant_id=tenant,
|
||||||
|
client_id=client,
|
||||||
|
client_secret=secret,
|
||||||
|
)
|
||||||
|
|
||||||
|
return ManagedIdentityCredential()
|
||||||
|
|
||||||
|
|
||||||
|
def azure_keyvault_backend(
|
||||||
|
*, url: str, client: str = '', secret: str = '', tenant: str = '', secret_field: str, secret_version: str = '', **kwargs
|
||||||
|
) -> str | None:
|
||||||
|
"""Get a credential and retrieve a secret from an Azure Key Vault.
|
||||||
|
|
||||||
|
An empty string for an optional parameter counts as not provided.
|
||||||
|
|
||||||
|
:param url: An Azure Key Vault URI.
|
||||||
|
:param client: The Client ID (optional).
|
||||||
|
:param secret: The Client Secret (optional).
|
||||||
|
:param tenant: The Tenant ID (optional).
|
||||||
|
:param secret_field: The name of the secret to retrieve from the
|
||||||
|
vault.
|
||||||
|
:param secret_version: The version of the secret to retrieve
|
||||||
|
(optional).
|
||||||
|
:returns: The secret from the Key Vault.
|
||||||
|
:raises RuntimeError: If the software is not being run on an Azure
|
||||||
|
VM.
|
||||||
|
"""
|
||||||
|
chosen_credential = _initialize_credential(tenant, client, secret)
|
||||||
|
keyvault = SecretClient(credential=chosen_credential, vault_url=url)
|
||||||
|
try:
|
||||||
|
keyvault_secret = keyvault.get_secret(
|
||||||
|
name=secret_field,
|
||||||
|
version=secret_version,
|
||||||
|
)
|
||||||
|
except CredentialUnavailableError as secret_lookup_err:
|
||||||
|
raise RuntimeError(
|
||||||
|
'You are not operating on an Azure VM, so the Managed Identity '
|
||||||
|
'feature is unavailable. Please provide the full Client ID, '
|
||||||
|
'Client Secret, and Tenant ID or run the software on an Azure VM.',
|
||||||
|
) from secret_lookup_err
|
||||||
|
return keyvault_secret.value
|
||||||
|
|
||||||
|
|
||||||
|
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||||
176
awx/main/credential_plugins/github_app.py
Normal file
176
awx/main/credential_plugins/github_app.py
Normal file
@ -0,0 +1,176 @@
|
|||||||
|
"""GitHub App Installation Access Token Credential Plugin.
|
||||||
|
|
||||||
|
This module defines a credential plugin for making use of the
|
||||||
|
GitHub Apps mechanism, allowing authentication via GitHub App
|
||||||
|
installation-scoped access tokens.
|
||||||
|
|
||||||
|
Functions:
|
||||||
|
|
||||||
|
- :func:`extract_github_app_install_token`: Generates a GitHub App
|
||||||
|
Installation token.
|
||||||
|
- ``github_app_lookup``: Defines the credential plugin interface.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from github import Auth as Auth, Github
|
||||||
|
from github.Consts import DEFAULT_BASE_URL as PUBLIC_GH_API_URL
|
||||||
|
from github.GithubException import (
|
||||||
|
BadAttributeException,
|
||||||
|
GithubException,
|
||||||
|
UnknownObjectException,
|
||||||
|
)
|
||||||
|
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from .plugin import CredentialPlugin
|
||||||
|
|
||||||
|
github_app_inputs = {
|
||||||
|
'fields': [
|
||||||
|
{
|
||||||
|
'id': 'github_api_url',
|
||||||
|
'label': _('GitHub API endpoint URL'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _(
|
||||||
|
'Specify the GitHub API URL here. In the case of an Enterprise: '
|
||||||
|
'https://gh.your.org/api/v3 (self-hosted) '
|
||||||
|
'or https://api.SUBDOMAIN.ghe.com (cloud)',
|
||||||
|
),
|
||||||
|
'default': 'https://api.github.com',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'app_or_client_id',
|
||||||
|
'label': _('GitHub App ID'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _(
|
||||||
|
'The GitHub App ID created by the GitHub Admin. '
|
||||||
|
'Example App ID: 1121547 '
|
||||||
|
'found on https://github.com/settings/apps/ '
|
||||||
|
'required for creating a JWT token for authentication.',
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'install_id',
|
||||||
|
'label': _('GitHub App Installation ID'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _(
|
||||||
|
'The Installation ID from the GitHub App installation '
|
||||||
|
'generated by the GitHub Admin. '
|
||||||
|
'Example: 59980338 extracted from the installation link '
|
||||||
|
'https://github.com/settings/installations/59980338 '
|
||||||
|
'required for creating a limited GitHub app token.',
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'private_rsa_key',
|
||||||
|
'label': _('RSA Private Key'),
|
||||||
|
'type': 'string',
|
||||||
|
'format': 'ssh_private_key',
|
||||||
|
'secret': True,
|
||||||
|
'multiline': True,
|
||||||
|
'help_text': _(
|
||||||
|
'Paste the contents of the PEM file that the GitHub Admin provided to you with the app and installation IDs.',
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'metadata': [
|
||||||
|
{
|
||||||
|
'id': 'description',
|
||||||
|
'label': _('Description (Optional)'),
|
||||||
|
'type': 'string',
|
||||||
|
'help_text': _('To be removed after UI is updated'),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'required': ['app_or_client_id', 'install_id', 'private_rsa_key'],
|
||||||
|
}
|
||||||
|
|
||||||
|
GH_CLIENT_ID_TRAILER_LENGTH = 16
|
||||||
|
HEXADECIMAL_BASE = 16
|
||||||
|
|
||||||
|
|
||||||
|
def _is_intish(app_id_candidate):
|
||||||
|
return isinstance(app_id_candidate, int) or app_id_candidate.isdigit()
|
||||||
|
|
||||||
|
|
||||||
|
def _is_client_id(client_id_candidate):
|
||||||
|
client_id_prefix = 'Iv1.'
|
||||||
|
if not client_id_candidate.startswith(client_id_prefix):
|
||||||
|
return False
|
||||||
|
|
||||||
|
client_id_trailer = client_id_candidate[len(client_id_prefix) :]
|
||||||
|
|
||||||
|
if len(client_id_trailer) != GH_CLIENT_ID_TRAILER_LENGTH:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
int(client_id_trailer, base=HEXADECIMAL_BASE)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _is_app_or_client_id(app_or_client_id_candidate):
|
||||||
|
if _is_intish(app_or_client_id_candidate):
|
||||||
|
return True
|
||||||
|
return _is_client_id(app_or_client_id_candidate)
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_ids_look_acceptable(app_or_client_id, install_id):
|
||||||
|
if not _is_app_or_client_id(app_or_client_id):
|
||||||
|
raise ValueError(
|
||||||
|
'Expected GitHub App or Client ID to be an integer or a string '
|
||||||
|
f'starting with `Iv1.` followed by 16 hexadecimal digits, '
|
||||||
|
f'but got {app_or_client_id !r}',
|
||||||
|
)
|
||||||
|
if isinstance(app_or_client_id, str) and _is_client_id(app_or_client_id):
|
||||||
|
raise ValueError(
|
||||||
|
'Expected GitHub App ID must be an integer or a string '
|
||||||
|
f'with an all-digit value, but got {app_or_client_id !r}. '
|
||||||
|
'Client IDs are currently unsupported.',
|
||||||
|
)
|
||||||
|
if not _is_intish(install_id):
|
||||||
|
raise ValueError(
|
||||||
|
'Expected GitHub App Installation ID to be an integer' f' but got {install_id !r}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_github_app_install_token(github_api_url, app_or_client_id, private_rsa_key, install_id, **_discarded_kwargs):
|
||||||
|
"""Generate a GH App Installation access token."""
|
||||||
|
_assert_ids_look_acceptable(app_or_client_id, install_id)
|
||||||
|
|
||||||
|
auth = Auth.AppAuth(
|
||||||
|
app_id=str(app_or_client_id),
|
||||||
|
private_key=private_rsa_key,
|
||||||
|
).get_installation_auth(installation_id=int(install_id))
|
||||||
|
|
||||||
|
Github(
|
||||||
|
auth=auth,
|
||||||
|
base_url=github_api_url if github_api_url else PUBLIC_GH_API_URL,
|
||||||
|
)
|
||||||
|
|
||||||
|
doc_url = 'See https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app'
|
||||||
|
app_install_context = f'app_or_client_id: {app_or_client_id}, install_id: {install_id}'
|
||||||
|
|
||||||
|
try:
|
||||||
|
return auth.token
|
||||||
|
except UnknownObjectException as github_install_not_found_exc:
|
||||||
|
raise ValueError(
|
||||||
|
f'Failed to retrieve a GitHub installation token from {github_api_url} using {app_install_context}. Is the app installed? {doc_url}.'
|
||||||
|
f'\n\n{github_install_not_found_exc}',
|
||||||
|
) from github_install_not_found_exc
|
||||||
|
except GithubException as pygithub_catchall_exc:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'An unexpected error happened while talking to GitHub API @ {github_api_url} ({app_install_context}). '
|
||||||
|
f'Is the app or client ID correct? And the private RSA key? {doc_url}.'
|
||||||
|
f'\n\n{pygithub_catchall_exc}',
|
||||||
|
) from pygithub_catchall_exc
|
||||||
|
except BadAttributeException as github_broken_exc:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'Broken GitHub @ {github_api_url} with {app_install_context}. It is a bug, please report it to the developers.\n\n{github_broken_exc}',
|
||||||
|
) from github_broken_exc
|
||||||
|
|
||||||
|
|
||||||
|
github_app_lookup_plugin = CredentialPlugin(
|
||||||
|
'GitHub App Installation Access Token Lookup',
|
||||||
|
inputs=github_app_inputs,
|
||||||
|
backend=extract_github_app_install_token,
|
||||||
|
)
|
||||||
@ -4,6 +4,7 @@
|
|||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from crum import impersonate
|
from crum import impersonate
|
||||||
|
from ansible_base.resource_registry.signals.handlers import no_reverse_sync
|
||||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||||
from awx.main.signals import disable_computed_fields
|
from awx.main.signals import disable_computed_fields
|
||||||
|
|
||||||
@ -16,8 +17,9 @@ class Command(BaseCommand):
|
|||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
# Wrap the operation in an atomic block, so we do not on accident
|
# Wrap the operation in an atomic block, so we do not on accident
|
||||||
# create the organization but not create the project, etc.
|
# create the organization but not create the project, etc.
|
||||||
with transaction.atomic():
|
with no_reverse_sync():
|
||||||
self._handle()
|
with transaction.atomic():
|
||||||
|
self._handle()
|
||||||
|
|
||||||
def _handle(self):
|
def _handle(self):
|
||||||
changed = False
|
changed = False
|
||||||
|
|||||||
247
awx/main/management/commands/import_auth_config_to_gateway.py
Normal file
247
awx/main/management/commands/import_auth_config_to_gateway.py
Normal file
@ -0,0 +1,247 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
from awx.sso.utils.azure_ad_migrator import AzureADMigrator
|
||||||
|
from awx.sso.utils.github_migrator import GitHubMigrator
|
||||||
|
from awx.sso.utils.ldap_migrator import LDAPMigrator
|
||||||
|
from awx.sso.utils.oidc_migrator import OIDCMigrator
|
||||||
|
from awx.sso.utils.saml_migrator import SAMLMigrator
|
||||||
|
from awx.sso.utils.radius_migrator import RADIUSMigrator
|
||||||
|
from awx.sso.utils.settings_migrator import SettingsMigrator
|
||||||
|
from awx.sso.utils.tacacs_migrator import TACACSMigrator
|
||||||
|
from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator
|
||||||
|
from awx.main.utils.gateway_client import GatewayClient, GatewayAPIError
|
||||||
|
from awx.main.utils.gateway_client_svc_token import GatewayClientSVCToken
|
||||||
|
from ansible_base.resource_registry.tasks.sync import create_api_client
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Import existing auth provider configurations to AAP Gateway via API requests'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--basic-auth', action='store_true', help='Use HTTP Basic Authentication between Controller and Gateway')
|
||||||
|
parser.add_argument(
|
||||||
|
'--skip-all-authenticators',
|
||||||
|
action='store_true',
|
||||||
|
help='Skip importing all authenticators [GitHub, OIDC, SAML, Azure AD, LDAP, RADIUS, TACACS+, Google OAuth2]',
|
||||||
|
)
|
||||||
|
parser.add_argument('--skip-oidc', action='store_true', help='Skip importing generic OIDC authenticators')
|
||||||
|
parser.add_argument('--skip-github', action='store_true', help='Skip importing GitHub authenticator')
|
||||||
|
parser.add_argument('--skip-ldap', action='store_true', help='Skip importing LDAP authenticators')
|
||||||
|
parser.add_argument('--skip-ad', action='store_true', help='Skip importing Azure AD authenticator')
|
||||||
|
parser.add_argument('--skip-saml', action='store_true', help='Skip importing SAML authenticator')
|
||||||
|
parser.add_argument('--skip-radius', action='store_true', help='Skip importing RADIUS authenticator')
|
||||||
|
parser.add_argument('--skip-tacacs', action='store_true', help='Skip importing TACACS+ authenticator')
|
||||||
|
parser.add_argument('--skip-google', action='store_true', help='Skip importing Google OAuth2 authenticator')
|
||||||
|
parser.add_argument('--skip-settings', action='store_true', help='Skip importing settings')
|
||||||
|
parser.add_argument(
|
||||||
|
'--force',
|
||||||
|
action='store_true',
|
||||||
|
help='Force migration even if configurations already exist. Does not apply to skipped authenticators nor skipped settings.',
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# Read Gateway connection parameters from environment variables
|
||||||
|
gateway_base_url = os.getenv('GATEWAY_BASE_URL')
|
||||||
|
gateway_user = os.getenv('GATEWAY_USER')
|
||||||
|
gateway_password = os.getenv('GATEWAY_PASSWORD')
|
||||||
|
gateway_skip_verify = os.getenv('GATEWAY_SKIP_VERIFY', '').lower() in ('true', '1', 'yes', 'on')
|
||||||
|
|
||||||
|
skip_all_authenticators = options['skip_all_authenticators']
|
||||||
|
skip_oidc = options['skip_oidc']
|
||||||
|
skip_github = options['skip_github']
|
||||||
|
skip_ldap = options['skip_ldap']
|
||||||
|
skip_ad = options['skip_ad']
|
||||||
|
skip_saml = options['skip_saml']
|
||||||
|
skip_radius = options['skip_radius']
|
||||||
|
skip_tacacs = options['skip_tacacs']
|
||||||
|
skip_google = options['skip_google']
|
||||||
|
skip_settings = options['skip_settings']
|
||||||
|
force = options['force']
|
||||||
|
basic_auth = options['basic_auth']
|
||||||
|
|
||||||
|
management_command_validation_errors = []
|
||||||
|
|
||||||
|
# If the management command isn't called with all parameters needed to talk to Gateway, consider
|
||||||
|
# it a dry-run and exit cleanly
|
||||||
|
if not gateway_base_url and basic_auth:
|
||||||
|
management_command_validation_errors.append('- GATEWAY_BASE_URL: Base URL of the AAP Gateway instance')
|
||||||
|
if (not gateway_user or not gateway_password) and basic_auth:
|
||||||
|
management_command_validation_errors.append('- GATEWAY_USER: Username for AAP Gateway authentication')
|
||||||
|
management_command_validation_errors.append('- GATEWAY_PASSWORD: Password for AAP Gateway authentication')
|
||||||
|
|
||||||
|
if len(management_command_validation_errors) > 0:
|
||||||
|
self.stdout.write(self.style.WARNING('Missing required environment variables:'))
|
||||||
|
for validation_error in management_command_validation_errors:
|
||||||
|
self.stdout.write(self.style.WARNING(f"{validation_error}"))
|
||||||
|
self.stdout.write(self.style.WARNING('- GATEWAY_SKIP_VERIFY: Skip SSL certificate verification (optional)'))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
resource_api_client = None
|
||||||
|
response = None
|
||||||
|
|
||||||
|
if basic_auth:
|
||||||
|
self.stdout.write(self.style.SUCCESS('HTTP Basic Auth: true'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Gateway Base URL: {gateway_base_url}'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Gateway User: {gateway_user}'))
|
||||||
|
self.stdout.write(self.style.SUCCESS('Gateway Password: *******************'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Skip SSL Verification: {gateway_skip_verify}'))
|
||||||
|
|
||||||
|
else:
|
||||||
|
resource_api_client = create_api_client()
|
||||||
|
resource_api_client.verify_https = not gateway_skip_verify
|
||||||
|
response = resource_api_client.get_service_metadata()
|
||||||
|
parsed_url = urlparse(resource_api_client.base_url)
|
||||||
|
resource_api_client.base_url = urlunparse((parsed_url.scheme, parsed_url.netloc, '/', '', '', ''))
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS('Gateway Service Token: true'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Gateway Base URL: {resource_api_client.base_url}'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Gateway JWT User: {resource_api_client.jwt_user_id}'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Gateway JWT Expiration: {resource_api_client.jwt_expiration}'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Skip SSL Verification: {not resource_api_client.verify_https}'))
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Connection Validated: {response.status_code == 200}'))
|
||||||
|
|
||||||
|
if response.status_code != 200:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.ERROR(
|
||||||
|
f'Gateway Service Token is unable to connect to Gateway via the base URL {resource_api_client.base_url}. Recieved HTTP response code {response.status_code}'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Create Gateway client and run migrations
|
||||||
|
try:
|
||||||
|
self.stdout.write(self.style.SUCCESS('\n=== Connecting to Gateway ==='))
|
||||||
|
pre_gateway_client = None
|
||||||
|
if basic_auth:
|
||||||
|
self.stdout.write(self.style.SUCCESS('\n=== With Basic HTTP Auth ==='))
|
||||||
|
pre_gateway_client = GatewayClient(
|
||||||
|
base_url=gateway_base_url, username=gateway_user, password=gateway_password, skip_verify=gateway_skip_verify, command=self
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.stdout.write(self.style.SUCCESS('\n=== With Service Token ==='))
|
||||||
|
pre_gateway_client = GatewayClientSVCToken(resource_api_client=resource_api_client, command=self)
|
||||||
|
|
||||||
|
with pre_gateway_client as gateway_client:
|
||||||
|
self.stdout.write(self.style.SUCCESS('Successfully connected to Gateway'))
|
||||||
|
|
||||||
|
# Initialize migrators
|
||||||
|
migrators = []
|
||||||
|
if not skip_all_authenticators:
|
||||||
|
if not skip_oidc:
|
||||||
|
migrators.append(OIDCMigrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not skip_github:
|
||||||
|
migrators.append(GitHubMigrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not skip_saml:
|
||||||
|
migrators.append(SAMLMigrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not skip_ad:
|
||||||
|
migrators.append(AzureADMigrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not skip_ldap:
|
||||||
|
migrators.append(LDAPMigrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not skip_radius:
|
||||||
|
migrators.append(RADIUSMigrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not skip_tacacs:
|
||||||
|
migrators.append(TACACSMigrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not skip_google:
|
||||||
|
migrators.append(GoogleOAuth2Migrator(gateway_client, self, force=force))
|
||||||
|
|
||||||
|
if not migrators:
|
||||||
|
self.stdout.write(self.style.WARNING('No authentication configurations found to migrate.'))
|
||||||
|
|
||||||
|
if not skip_settings:
|
||||||
|
migrators.append(SettingsMigrator(gateway_client, self, force=force))
|
||||||
|
else:
|
||||||
|
self.stdout.write(self.style.WARNING('Settings migration will not execute.'))
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
total_results = {
|
||||||
|
'created': 0,
|
||||||
|
'updated': 0,
|
||||||
|
'unchanged': 0,
|
||||||
|
'failed': 0,
|
||||||
|
'mappers_created': 0,
|
||||||
|
'mappers_updated': 0,
|
||||||
|
'mappers_failed': 0,
|
||||||
|
'settings_created': 0,
|
||||||
|
'settings_updated': 0,
|
||||||
|
'settings_unchanged': 0,
|
||||||
|
'settings_failed': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not migrators:
|
||||||
|
self.stdout.write(self.style.WARNING('NO MIGRATIONS WILL EXECUTE.'))
|
||||||
|
# Exit with success code since this is not an error condition
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
for migrator in migrators:
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'\n=== Migrating {migrator.get_authenticator_type()} Configurations ==='))
|
||||||
|
result = migrator.migrate()
|
||||||
|
self._print_export_summary(migrator.get_authenticator_type(), result)
|
||||||
|
|
||||||
|
# Accumulate results - handle missing keys gracefully
|
||||||
|
for key in total_results:
|
||||||
|
total_results[key] += result.get(key, 0)
|
||||||
|
|
||||||
|
# Overall summary
|
||||||
|
self.stdout.write(self.style.SUCCESS('\n=== Migration Summary ==='))
|
||||||
|
self.stdout.write(f'Total authenticators created: {total_results["created"]}')
|
||||||
|
self.stdout.write(f'Total authenticators updated: {total_results["updated"]}')
|
||||||
|
self.stdout.write(f'Total authenticators unchanged: {total_results["unchanged"]}')
|
||||||
|
self.stdout.write(f'Total authenticators failed: {total_results["failed"]}')
|
||||||
|
self.stdout.write(f'Total mappers created: {total_results["mappers_created"]}')
|
||||||
|
self.stdout.write(f'Total mappers updated: {total_results["mappers_updated"]}')
|
||||||
|
self.stdout.write(f'Total mappers failed: {total_results["mappers_failed"]}')
|
||||||
|
self.stdout.write(f'Total settings created: {total_results["settings_created"]}')
|
||||||
|
self.stdout.write(f'Total settings updated: {total_results["settings_updated"]}')
|
||||||
|
self.stdout.write(f'Total settings unchanged: {total_results["settings_unchanged"]}')
|
||||||
|
self.stdout.write(f'Total settings failed: {total_results["settings_failed"]}')
|
||||||
|
|
||||||
|
# Check for any failures and return appropriate status code
|
||||||
|
has_failures = total_results["failed"] > 0 or total_results["mappers_failed"] > 0 or total_results["settings_failed"] > 0
|
||||||
|
|
||||||
|
if has_failures:
|
||||||
|
self.stdout.write(self.style.ERROR('\nMigration completed with failures.'))
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
self.stdout.write(self.style.SUCCESS('\nMigration completed successfully.'))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
except GatewayAPIError as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'Gateway API Error: {e.message}'))
|
||||||
|
if e.status_code:
|
||||||
|
self.stdout.write(self.style.ERROR(f'Status Code: {e.status_code}'))
|
||||||
|
if e.response_data:
|
||||||
|
self.stdout.write(self.style.ERROR(f'Response: {e.response_data}'))
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'Unexpected error during migration: {str(e)}'))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def _print_export_summary(self, config_type, result):
|
||||||
|
"""Print a summary of the export results."""
|
||||||
|
self.stdout.write(f'\n--- {config_type} Export Summary ---')
|
||||||
|
|
||||||
|
if config_type in ['GitHub', 'OIDC', 'SAML', 'Azure AD', 'LDAP', 'RADIUS', 'TACACS+', 'Google OAuth2']:
|
||||||
|
self.stdout.write(f'Authenticators created: {result.get("created", 0)}')
|
||||||
|
self.stdout.write(f'Authenticators updated: {result.get("updated", 0)}')
|
||||||
|
self.stdout.write(f'Authenticators unchanged: {result.get("unchanged", 0)}')
|
||||||
|
self.stdout.write(f'Authenticators failed: {result.get("failed", 0)}')
|
||||||
|
self.stdout.write(f'Mappers created: {result.get("mappers_created", 0)}')
|
||||||
|
self.stdout.write(f'Mappers updated: {result.get("mappers_updated", 0)}')
|
||||||
|
self.stdout.write(f'Mappers failed: {result.get("mappers_failed", 0)}')
|
||||||
|
|
||||||
|
if config_type == 'Settings':
|
||||||
|
self.stdout.write(f'Settings created: {result.get("settings_created", 0)}')
|
||||||
|
self.stdout.write(f'Settings updated: {result.get("settings_updated", 0)}')
|
||||||
|
self.stdout.write(f'Settings unchanged: {result.get("settings_unchanged", 0)}')
|
||||||
|
self.stdout.write(f'Settings failed: {result.get("settings_failed", 0)}')
|
||||||
@ -8,7 +8,7 @@ from awx.main.migrations._dab_rbac import migrate_to_new_rbac, create_permission
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0191_add_django_permissions'),
|
('main', '0191_add_django_permissions'),
|
||||||
('dab_rbac', '__first__'),
|
('dab_rbac', '0003_alter_dabpermission_codename_and_more'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@ -26,6 +26,11 @@ def change_inventory_source_org_unique(apps, schema_editor):
|
|||||||
logger.info(f'Set database constraint rule for {r} inventory source objects')
|
logger.info(f'Set database constraint rule for {r} inventory source objects')
|
||||||
|
|
||||||
|
|
||||||
|
def rename_wfjt(apps, schema_editor):
|
||||||
|
cls = apps.get_model('main', 'WorkflowJobTemplate')
|
||||||
|
_rename_duplicates(cls)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
@ -40,6 +45,7 @@ class Migration(migrations.Migration):
|
|||||||
name='org_unique',
|
name='org_unique',
|
||||||
field=models.BooleanField(blank=True, default=True, editable=False, help_text='Used internally to selectively enforce database constraint on name'),
|
field=models.BooleanField(blank=True, default=True, editable=False, help_text='Used internally to selectively enforce database constraint on name'),
|
||||||
),
|
),
|
||||||
|
migrations.RunPython(rename_wfjt, migrations.RunPython.noop),
|
||||||
migrations.RunPython(change_inventory_source_org_unique, migrations.RunPython.noop),
|
migrations.RunPython(change_inventory_source_org_unique, migrations.RunPython.noop),
|
||||||
migrations.AddConstraint(
|
migrations.AddConstraint(
|
||||||
model_name='unifiedjobtemplate',
|
model_name='unifiedjobtemplate',
|
||||||
|
|||||||
@ -1,9 +1,20 @@
|
|||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
# AWX
|
||||||
|
from awx.main.models import CredentialType
|
||||||
|
from awx.main.utils.common import set_current_apps
|
||||||
|
|
||||||
|
|
||||||
|
def setup_tower_managed_defaults(apps, schema_editor):
|
||||||
|
set_current_apps(apps)
|
||||||
|
CredentialType.setup_tower_managed_defaults(apps)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0200_template_name_constraint'),
|
('main', '0200_template_name_constraint'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = []
|
operations = [
|
||||||
|
migrations.RunPython(setup_tower_managed_defaults),
|
||||||
|
]
|
||||||
|
|||||||
102
awx/main/migrations/0202_convert_controller_role_definitions.py
Normal file
102
awx/main/migrations/0202_convert_controller_role_definitions.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
# Generated by Django migration for converting Controller role definitions
|
||||||
|
|
||||||
|
from ansible_base.rbac.migrations._utils import give_permissions
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def convert_controller_role_definitions(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
Convert Controller role definitions to regular role definitions:
|
||||||
|
- Controller Organization Admin -> Organization Admin
|
||||||
|
- Controller Organization Member -> Organization Member
|
||||||
|
- Controller Team Admin -> Team Admin
|
||||||
|
- Controller Team Member -> Team Member
|
||||||
|
- Controller System Auditor -> Platform Auditor
|
||||||
|
|
||||||
|
Then delete the old Controller role definitions.
|
||||||
|
"""
|
||||||
|
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
|
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||||
|
RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment')
|
||||||
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
|
||||||
|
# Mapping of old Controller role names to new role names
|
||||||
|
role_mappings = {
|
||||||
|
'Controller Organization Admin': 'Organization Admin',
|
||||||
|
'Controller Organization Member': 'Organization Member',
|
||||||
|
'Controller Team Admin': 'Team Admin',
|
||||||
|
'Controller Team Member': 'Team Member',
|
||||||
|
}
|
||||||
|
|
||||||
|
for old_name, new_name in role_mappings.items():
|
||||||
|
# Find the old Controller role definition
|
||||||
|
old_role = RoleDefinition.objects.filter(name=old_name).first()
|
||||||
|
if not old_role:
|
||||||
|
continue # Skip if the old role doesn't exist
|
||||||
|
|
||||||
|
# Find the new role definition
|
||||||
|
new_role = RoleDefinition.objects.get(name=new_name)
|
||||||
|
|
||||||
|
# Collect all the assignments that need to be migrated
|
||||||
|
# Group by object (content_type + object_id) to batch the give_permissions calls
|
||||||
|
assignments_by_object = {}
|
||||||
|
|
||||||
|
# Get user assignments
|
||||||
|
user_assignments = RoleUserAssignment.objects.filter(role_definition=old_role).select_related('object_role')
|
||||||
|
for assignment in user_assignments:
|
||||||
|
key = (assignment.object_role.content_type_id, assignment.object_role.object_id)
|
||||||
|
if key not in assignments_by_object:
|
||||||
|
assignments_by_object[key] = {'users': [], 'teams': []}
|
||||||
|
assignments_by_object[key]['users'].append(assignment.user)
|
||||||
|
|
||||||
|
# Get team assignments
|
||||||
|
team_assignments = RoleTeamAssignment.objects.filter(role_definition=old_role).select_related('object_role')
|
||||||
|
for assignment in team_assignments:
|
||||||
|
key = (assignment.object_role.content_type_id, assignment.object_role.object_id)
|
||||||
|
if key not in assignments_by_object:
|
||||||
|
assignments_by_object[key] = {'users': [], 'teams': []}
|
||||||
|
assignments_by_object[key]['teams'].append(assignment.team.id)
|
||||||
|
|
||||||
|
# Use give_permissions to create new assignments with the new role definition
|
||||||
|
for (content_type_id, object_id), data in assignments_by_object.items():
|
||||||
|
if data['users'] or data['teams']:
|
||||||
|
give_permissions(
|
||||||
|
apps,
|
||||||
|
new_role,
|
||||||
|
users=data['users'],
|
||||||
|
teams=data['teams'],
|
||||||
|
object_id=object_id,
|
||||||
|
content_type_id=content_type_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete the old role definition (this will cascade to delete old assignments and ObjectRoles)
|
||||||
|
old_role.delete()
|
||||||
|
|
||||||
|
# Create or get Platform Auditor
|
||||||
|
auditor_rd, created = RoleDefinition.objects.get_or_create(
|
||||||
|
name='Platform Auditor',
|
||||||
|
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
|
||||||
|
)
|
||||||
|
if created:
|
||||||
|
auditor_rd.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
|
||||||
|
|
||||||
|
old_rd = RoleDefinition.objects.filter(name='Controller System Auditor').first()
|
||||||
|
if old_rd:
|
||||||
|
for assignment in RoleUserAssignment.objects.filter(role_definition=old_rd):
|
||||||
|
RoleUserAssignment.objects.create(
|
||||||
|
user=assignment.user,
|
||||||
|
role_definition=auditor_rd,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete the Controller System Auditor role
|
||||||
|
RoleDefinition.objects.filter(name='Controller System Auditor').delete()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('main', '0201_create_managed_creds'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(convert_controller_role_definitions),
|
||||||
|
]
|
||||||
22
awx/main/migrations/0203_remove_team_of_teams.py
Normal file
22
awx/main/migrations/0203_remove_team_of_teams.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
from awx.main.migrations._dab_rbac import consolidate_indirect_user_roles
|
||||||
|
|
||||||
|
logger = logging.getLogger('awx.main.migrations')
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0202_convert_controller_role_definitions'),
|
||||||
|
]
|
||||||
|
# The DAB RBAC app makes substantial model changes which by change-ordering comes after this
|
||||||
|
# not including run_before might sometimes work but this enforces a more strict and stable order
|
||||||
|
# for both applying migrations forwards and backwards
|
||||||
|
run_before = [("dab_rbac", "0004_remote_permissions_additions")]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(consolidate_indirect_user_roles, migrations.RunPython.noop),
|
||||||
|
]
|
||||||
@ -1,5 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.migrations')
|
logger = logging.getLogger('awx.main.migrations')
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
from django.apps import apps as global_apps
|
from django.apps import apps as global_apps
|
||||||
from django.db.models import ForeignKey
|
from django.db.models import ForeignKey
|
||||||
@ -17,6 +18,7 @@ logger = logging.getLogger('awx.main.migrations._dab_rbac')
|
|||||||
|
|
||||||
|
|
||||||
def create_permissions_as_operation(apps, schema_editor):
|
def create_permissions_as_operation(apps, schema_editor):
|
||||||
|
logger.info('Running data migration create_permissions_as_operation')
|
||||||
# NOTE: the DAB ContentType changes adjusted how they fire
|
# NOTE: the DAB ContentType changes adjusted how they fire
|
||||||
# before they would fire on every app config, like contenttypes
|
# before they would fire on every app config, like contenttypes
|
||||||
create_dab_permissions(global_apps.get_app_config("main"), apps=apps)
|
create_dab_permissions(global_apps.get_app_config("main"), apps=apps)
|
||||||
@ -166,11 +168,15 @@ def migrate_to_new_rbac(apps, schema_editor):
|
|||||||
This method moves the assigned permissions from the old rbac.py models
|
This method moves the assigned permissions from the old rbac.py models
|
||||||
to the new RoleDefinition and ObjectRole models
|
to the new RoleDefinition and ObjectRole models
|
||||||
"""
|
"""
|
||||||
|
logger.info('Running data migration migrate_to_new_rbac')
|
||||||
Role = apps.get_model('main', 'Role')
|
Role = apps.get_model('main', 'Role')
|
||||||
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
|
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||||
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||||
|
|
||||||
|
if Permission.objects.count() == 0:
|
||||||
|
raise RuntimeError('Running migrate_to_new_rbac requires DABPermission objects created first')
|
||||||
|
|
||||||
# remove add premissions that are not valid for migrations from old versions
|
# remove add premissions that are not valid for migrations from old versions
|
||||||
for perm_str in ('add_organization', 'add_jobtemplate'):
|
for perm_str in ('add_organization', 'add_jobtemplate'):
|
||||||
perm = Permission.objects.filter(codename=perm_str).first()
|
perm = Permission.objects.filter(codename=perm_str).first()
|
||||||
@ -250,11 +256,14 @@ def migrate_to_new_rbac(apps, schema_editor):
|
|||||||
|
|
||||||
# Create new replacement system auditor role
|
# Create new replacement system auditor role
|
||||||
new_system_auditor, created = RoleDefinition.objects.get_or_create(
|
new_system_auditor, created = RoleDefinition.objects.get_or_create(
|
||||||
name='Controller System Auditor',
|
name='Platform Auditor',
|
||||||
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
|
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
|
||||||
)
|
)
|
||||||
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
|
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
|
||||||
|
|
||||||
|
if created:
|
||||||
|
logger.info(f'Created RoleDefinition {new_system_auditor.name} pk={new_system_auditor.pk} with {new_system_auditor.permissions.count()} permissions')
|
||||||
|
|
||||||
# migrate is_system_auditor flag, because it is no longer handled by a system role
|
# migrate is_system_auditor flag, because it is no longer handled by a system role
|
||||||
old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first()
|
old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first()
|
||||||
if old_system_auditor:
|
if old_system_auditor:
|
||||||
@ -283,8 +292,9 @@ def get_or_create_managed(name, description, ct, permissions, RoleDefinition):
|
|||||||
|
|
||||||
def setup_managed_role_definitions(apps, schema_editor):
|
def setup_managed_role_definitions(apps, schema_editor):
|
||||||
"""
|
"""
|
||||||
Idepotent method to create or sync the managed role definitions
|
Idempotent method to create or sync the managed role definitions
|
||||||
"""
|
"""
|
||||||
|
logger.info('Running data migration setup_managed_role_definitions')
|
||||||
to_create = {
|
to_create = {
|
||||||
'object_admin': '{cls.__name__} Admin',
|
'object_admin': '{cls.__name__} Admin',
|
||||||
'org_admin': 'Organization Admin',
|
'org_admin': 'Organization Admin',
|
||||||
@ -448,3 +458,115 @@ def setup_managed_role_definitions(apps, schema_editor):
|
|||||||
for role_definition in unexpected_role_definitions:
|
for role_definition in unexpected_role_definitions:
|
||||||
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
|
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
|
||||||
role_definition.delete()
|
role_definition.delete()
|
||||||
|
|
||||||
|
|
||||||
|
def get_team_to_team_relationships(apps, team_member_role):
|
||||||
|
"""
|
||||||
|
Find all team-to-team relationships where one team is a member of another.
|
||||||
|
Returns a dict mapping parent_team_id -> [child_team_id, ...]
|
||||||
|
"""
|
||||||
|
team_to_team_relationships = defaultdict(list)
|
||||||
|
|
||||||
|
# Find all team assignments with the Team Member role
|
||||||
|
RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment')
|
||||||
|
team_assignments = RoleTeamAssignment.objects.filter(role_definition=team_member_role).select_related('team')
|
||||||
|
|
||||||
|
for assignment in team_assignments:
|
||||||
|
parent_team_id = int(assignment.object_id)
|
||||||
|
child_team_id = assignment.team.id
|
||||||
|
team_to_team_relationships[parent_team_id].append(child_team_id)
|
||||||
|
|
||||||
|
return team_to_team_relationships
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_user_members_of_team(apps, team_member_role, team_id, team_to_team_map, visited=None):
|
||||||
|
"""
|
||||||
|
Recursively find all users who are members of a team, including through nested teams.
|
||||||
|
"""
|
||||||
|
if visited is None:
|
||||||
|
visited = set()
|
||||||
|
|
||||||
|
if team_id in visited:
|
||||||
|
return set() # Avoid infinite recursion
|
||||||
|
|
||||||
|
visited.add(team_id)
|
||||||
|
all_users = set()
|
||||||
|
|
||||||
|
# Get direct user assignments to this team
|
||||||
|
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||||
|
user_assignments = RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_id).select_related('user')
|
||||||
|
|
||||||
|
for assignment in user_assignments:
|
||||||
|
all_users.add(assignment.user)
|
||||||
|
|
||||||
|
# Get team-to-team assignments and recursively find their users
|
||||||
|
child_team_ids = team_to_team_map.get(team_id, [])
|
||||||
|
for child_team_id in child_team_ids:
|
||||||
|
nested_users = get_all_user_members_of_team(apps, team_member_role, child_team_id, team_to_team_map, visited.copy())
|
||||||
|
all_users.update(nested_users)
|
||||||
|
|
||||||
|
return all_users
|
||||||
|
|
||||||
|
|
||||||
|
def remove_team_to_team_assignment(apps, team_member_role, parent_team_id, child_team_id):
|
||||||
|
"""
|
||||||
|
Remove team-to-team memberships.
|
||||||
|
"""
|
||||||
|
Team = apps.get_model('main', 'Team')
|
||||||
|
RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment')
|
||||||
|
|
||||||
|
parent_team = Team.objects.get(id=parent_team_id)
|
||||||
|
child_team = Team.objects.get(id=child_team_id)
|
||||||
|
|
||||||
|
# Remove all team-to-team RoleTeamAssignments
|
||||||
|
RoleTeamAssignment.objects.filter(role_definition=team_member_role, object_id=parent_team_id, team=child_team).delete()
|
||||||
|
|
||||||
|
# Check mirroring Team model for children under member_role
|
||||||
|
parent_team.member_role.children.filter(object_id=child_team_id).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def consolidate_indirect_user_roles(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
A user should have a member role for every team they were indirectly
|
||||||
|
a member of. ex. Team A is a member of Team B. All users in Team A
|
||||||
|
previously were only members of Team A. They should now be members of
|
||||||
|
Team A and Team B.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# get models for membership on teams
|
||||||
|
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
|
Team = apps.get_model('main', 'Team')
|
||||||
|
|
||||||
|
team_member_role = RoleDefinition.objects.get(name='Team Member')
|
||||||
|
|
||||||
|
team_to_team_map = get_team_to_team_relationships(apps, team_member_role)
|
||||||
|
|
||||||
|
if not team_to_team_map:
|
||||||
|
return # No team-to-team relationships to consolidate
|
||||||
|
|
||||||
|
# Get content type for Team - needed for give_permissions
|
||||||
|
try:
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
|
||||||
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
|
except ImportError:
|
||||||
|
# Fallback if ContentType is not available
|
||||||
|
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||||
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
|
|
||||||
|
# Get all users who should be direct members of a team
|
||||||
|
for parent_team_id, child_team_ids in team_to_team_map.items():
|
||||||
|
all_users = get_all_user_members_of_team(apps, team_member_role, parent_team_id, team_to_team_map)
|
||||||
|
|
||||||
|
# Create direct RoleUserAssignments for all users
|
||||||
|
if all_users:
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=list(all_users), object_id=parent_team_id, content_type_id=team_content_type.id)
|
||||||
|
|
||||||
|
# Mirror assignments to Team model
|
||||||
|
parent_team = Team.objects.get(id=parent_team_id)
|
||||||
|
for user in all_users:
|
||||||
|
parent_team.member_role.members.add(user.id)
|
||||||
|
|
||||||
|
# Remove all team-to-team assignments for parent team
|
||||||
|
for child_team_id in child_team_ids:
|
||||||
|
remove_team_to_team_assignment(apps, team_member_role, parent_team_id, child_team_id)
|
||||||
|
|||||||
@ -200,7 +200,7 @@ User.add_to_class('created', created)
|
|||||||
|
|
||||||
def get_system_auditor_role():
|
def get_system_auditor_role():
|
||||||
rd, created = RoleDefinition.objects.get_or_create(
|
rd, created = RoleDefinition.objects.get_or_create(
|
||||||
name='Controller System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
|
name='Platform Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
|
||||||
)
|
)
|
||||||
if created:
|
if created:
|
||||||
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))
|
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))
|
||||||
|
|||||||
1421
awx/main/models/credential/__init__.py
Normal file
1421
awx/main/models/credential/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1024,7 +1024,9 @@ class InventorySourceOptions(BaseModel):
|
|||||||
# If a credential was provided, it's important that it matches
|
# If a credential was provided, it's important that it matches
|
||||||
# the actual inventory source being used (Amazon requires Amazon
|
# the actual inventory source being used (Amazon requires Amazon
|
||||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||||
if source.replace('ec2', 'aws') != cred.kind:
|
if source == 'vmware_esxi' and source.replace('vmware_esxi', 'vmware') != cred.kind:
|
||||||
|
return _('VMWARE inventory sources (such as %s) require credentials for the matching cloud service.') % source
|
||||||
|
if source == 'ec2' and source.replace('ec2', 'aws') != cred.kind:
|
||||||
return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source
|
return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source
|
||||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||||
|
|||||||
@ -27,6 +27,7 @@ from django.conf import settings
|
|||||||
|
|
||||||
# Ansible_base app
|
# Ansible_base app
|
||||||
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment
|
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment
|
||||||
|
from ansible_base.rbac.sync import maybe_reverse_sync_assignment, maybe_reverse_sync_unassignment
|
||||||
from ansible_base.rbac import permission_registry
|
from ansible_base.rbac import permission_registry
|
||||||
from ansible_base.lib.utils.models import get_type_for_model
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
|
||||||
@ -560,24 +561,12 @@ def get_role_definition(role):
|
|||||||
f = obj._meta.get_field(role.role_field)
|
f = obj._meta.get_field(role.role_field)
|
||||||
action_name = f.name.rsplit("_", 1)[0]
|
action_name = f.name.rsplit("_", 1)[0]
|
||||||
model_print = type(obj).__name__
|
model_print = type(obj).__name__
|
||||||
|
rd_name = f'{model_print} {action_name.title()} Compat'
|
||||||
perm_list = get_role_codenames(role)
|
perm_list = get_role_codenames(role)
|
||||||
defaults = {
|
defaults = {
|
||||||
'content_type': permission_registry.content_type_model.objects.get_by_natural_key(role.content_type.app_label, role.content_type.model),
|
'content_type': permission_registry.content_type_model.objects.get_by_natural_key(role.content_type.app_label, role.content_type.model),
|
||||||
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
|
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
|
||||||
}
|
}
|
||||||
# use Controller-specific role definitions for Team/Organization and member/admin
|
|
||||||
# instead of platform role definitions
|
|
||||||
# these should exist in the system already, so just do a lookup by role definition name
|
|
||||||
if model_print in ['Team', 'Organization'] and action_name in ['member', 'admin']:
|
|
||||||
rd_name = f'Controller {model_print} {action_name.title()}'
|
|
||||||
rd = RoleDefinition.objects.filter(name=rd_name).first()
|
|
||||||
if rd:
|
|
||||||
return rd
|
|
||||||
else:
|
|
||||||
return RoleDefinition.objects.create_from_permissions(permissions=perm_list, name=rd_name, managed=True, **defaults)
|
|
||||||
|
|
||||||
else:
|
|
||||||
rd_name = f'{model_print} {action_name.title()} Compat'
|
|
||||||
|
|
||||||
with impersonate(None):
|
with impersonate(None):
|
||||||
try:
|
try:
|
||||||
@ -633,12 +622,14 @@ def get_role_from_object_role(object_role):
|
|||||||
return getattr(object_role.content_object, role_name)
|
return getattr(object_role.content_object, role_name)
|
||||||
|
|
||||||
|
|
||||||
def give_or_remove_permission(role, actor, giving=True):
|
def give_or_remove_permission(role, actor, giving=True, rd=None):
|
||||||
obj = role.content_object
|
obj = role.content_object
|
||||||
if obj is None:
|
if obj is None:
|
||||||
return
|
return
|
||||||
rd = get_role_definition(role)
|
if not rd:
|
||||||
rd.give_or_remove_permission(actor, obj, giving=giving)
|
rd = get_role_definition(role)
|
||||||
|
assignment = rd.give_or_remove_permission(actor, obj, giving=giving)
|
||||||
|
return assignment
|
||||||
|
|
||||||
|
|
||||||
class SyncEnabled(threading.local):
|
class SyncEnabled(threading.local):
|
||||||
@ -690,7 +681,15 @@ def sync_members_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
|
|||||||
role = Role.objects.get(pk=user_or_role_id)
|
role = Role.objects.get(pk=user_or_role_id)
|
||||||
else:
|
else:
|
||||||
user = get_user_model().objects.get(pk=user_or_role_id)
|
user = get_user_model().objects.get(pk=user_or_role_id)
|
||||||
give_or_remove_permission(role, user, giving=is_giving)
|
rd = get_role_definition(role)
|
||||||
|
assignment = give_or_remove_permission(role, user, giving=is_giving, rd=rd)
|
||||||
|
|
||||||
|
# sync to resource server
|
||||||
|
if rbac_sync_enabled.enabled:
|
||||||
|
if is_giving:
|
||||||
|
maybe_reverse_sync_assignment(assignment)
|
||||||
|
else:
|
||||||
|
maybe_reverse_sync_unassignment(rd, user, role.content_object)
|
||||||
|
|
||||||
|
|
||||||
def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
|
||||||
@ -733,7 +732,90 @@ def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
|
|||||||
from awx.main.models.organization import Team
|
from awx.main.models.organization import Team
|
||||||
|
|
||||||
team = Team.objects.get(pk=parent_role.object_id)
|
team = Team.objects.get(pk=parent_role.object_id)
|
||||||
give_or_remove_permission(child_role, team, giving=is_giving)
|
rd = get_role_definition(child_role)
|
||||||
|
assignment = give_or_remove_permission(child_role, team, giving=is_giving, rd=rd)
|
||||||
|
|
||||||
|
# sync to resource server
|
||||||
|
if rbac_sync_enabled.enabled:
|
||||||
|
if is_giving:
|
||||||
|
maybe_reverse_sync_assignment(assignment)
|
||||||
|
else:
|
||||||
|
maybe_reverse_sync_unassignment(rd, team, child_role.content_object)
|
||||||
|
|
||||||
|
|
||||||
|
ROLE_DEFINITION_TO_ROLE_FIELD = {
|
||||||
|
'Organization Member': 'member_role',
|
||||||
|
'WorkflowJobTemplate Admin': 'admin_role',
|
||||||
|
'Organization WorkflowJobTemplate Admin': 'workflow_admin_role',
|
||||||
|
'WorkflowJobTemplate Execute': 'execute_role',
|
||||||
|
'WorkflowJobTemplate Approve': 'approval_role',
|
||||||
|
'InstanceGroup Admin': 'admin_role',
|
||||||
|
'InstanceGroup Use': 'use_role',
|
||||||
|
'Organization ExecutionEnvironment Admin': 'execution_environment_admin_role',
|
||||||
|
'Project Admin': 'admin_role',
|
||||||
|
'Organization Project Admin': 'project_admin_role',
|
||||||
|
'Project Use': 'use_role',
|
||||||
|
'Project Update': 'update_role',
|
||||||
|
'JobTemplate Admin': 'admin_role',
|
||||||
|
'Organization JobTemplate Admin': 'job_template_admin_role',
|
||||||
|
'JobTemplate Execute': 'execute_role',
|
||||||
|
'Inventory Admin': 'admin_role',
|
||||||
|
'Organization Inventory Admin': 'inventory_admin_role',
|
||||||
|
'Inventory Use': 'use_role',
|
||||||
|
'Inventory Adhoc': 'adhoc_role',
|
||||||
|
'Inventory Update': 'update_role',
|
||||||
|
'Organization NotificationTemplate Admin': 'notification_admin_role',
|
||||||
|
'Credential Admin': 'admin_role',
|
||||||
|
'Organization Credential Admin': 'credential_admin_role',
|
||||||
|
'Credential Use': 'use_role',
|
||||||
|
'Team Admin': 'admin_role',
|
||||||
|
'Team Member': 'member_role',
|
||||||
|
'Organization Admin': 'admin_role',
|
||||||
|
'Organization Audit': 'auditor_role',
|
||||||
|
'Organization Execute': 'execute_role',
|
||||||
|
'Organization Approval': 'approval_role',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _sync_assignments_to_old_rbac(instance, delete=True):
|
||||||
|
from awx.main.signals import disable_activity_stream
|
||||||
|
|
||||||
|
with disable_activity_stream():
|
||||||
|
with disable_rbac_sync():
|
||||||
|
field_name = ROLE_DEFINITION_TO_ROLE_FIELD.get(instance.role_definition.name)
|
||||||
|
if not field_name:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
role = getattr(instance.object_role.content_object, field_name)
|
||||||
|
# in the case RoleUserAssignment is being cascade deleted, then
|
||||||
|
# object_role might not exist. In which case the object is about to be removed
|
||||||
|
# anyways so just return
|
||||||
|
except ObjectDoesNotExist:
|
||||||
|
return
|
||||||
|
if isinstance(instance.actor, get_user_model()):
|
||||||
|
# user
|
||||||
|
if delete:
|
||||||
|
role.members.remove(instance.actor)
|
||||||
|
else:
|
||||||
|
role.members.add(instance.actor)
|
||||||
|
else:
|
||||||
|
# team
|
||||||
|
if delete:
|
||||||
|
instance.team.member_role.children.remove(role)
|
||||||
|
else:
|
||||||
|
instance.team.member_role.children.add(role)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_delete, sender=RoleUserAssignment)
|
||||||
|
@receiver(post_delete, sender=RoleTeamAssignment)
|
||||||
|
def sync_assignments_to_old_rbac_delete(instance, **kwargs):
|
||||||
|
_sync_assignments_to_old_rbac(instance, delete=True)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save, sender=RoleUserAssignment)
|
||||||
|
@receiver(post_save, sender=RoleTeamAssignment)
|
||||||
|
def sync_user_assignments_to_old_rbac_create(instance, **kwargs):
|
||||||
|
_sync_assignments_to_old_rbac(instance, delete=False)
|
||||||
|
|
||||||
|
|
||||||
ROLE_DEFINITION_TO_ROLE_FIELD = {
|
ROLE_DEFINITION_TO_ROLE_FIELD = {
|
||||||
|
|||||||
@ -1200,6 +1200,13 @@ class UnifiedJob(
|
|||||||
fd = StringIO(fd.getvalue().replace('\\r\\n', '\n'))
|
fd = StringIO(fd.getvalue().replace('\\r\\n', '\n'))
|
||||||
return fd
|
return fd
|
||||||
|
|
||||||
|
def _fix_double_escapes(self, content):
|
||||||
|
"""
|
||||||
|
Collapse double-escaped sequences into single-escaped form.
|
||||||
|
"""
|
||||||
|
# Replace \\ followed by one of ' " \ n r t
|
||||||
|
return re.sub(r'\\([\'"\\nrt])', r'\1', content)
|
||||||
|
|
||||||
def _escape_ascii(self, content):
|
def _escape_ascii(self, content):
|
||||||
# Remove ANSI escape sequences used to embed event data.
|
# Remove ANSI escape sequences used to embed event data.
|
||||||
content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content)
|
content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content)
|
||||||
@ -1207,12 +1214,14 @@ class UnifiedJob(
|
|||||||
content = re.sub(r'\x1b[^m]*m', '', content)
|
content = re.sub(r'\x1b[^m]*m', '', content)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False):
|
def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False, fix_escapes=False):
|
||||||
content = self.result_stdout_raw_handle().read()
|
content = self.result_stdout_raw_handle().read()
|
||||||
if redact_sensitive:
|
if redact_sensitive:
|
||||||
content = UriCleaner.remove_sensitive(content)
|
content = UriCleaner.remove_sensitive(content)
|
||||||
if escape_ascii:
|
if escape_ascii:
|
||||||
content = self._escape_ascii(content)
|
content = self._escape_ascii(content)
|
||||||
|
if fix_escapes:
|
||||||
|
content = self._fix_double_escapes(content)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -1221,9 +1230,10 @@ class UnifiedJob(
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def result_stdout(self):
|
def result_stdout(self):
|
||||||
return self._result_stdout_raw(escape_ascii=True)
|
# Human-facing output should fix escapes
|
||||||
|
return self._result_stdout_raw(escape_ascii=True, fix_escapes=True)
|
||||||
|
|
||||||
def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False):
|
def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False, fix_escapes=False):
|
||||||
return_buffer = StringIO()
|
return_buffer = StringIO()
|
||||||
if end_line is not None:
|
if end_line is not None:
|
||||||
end_line = int(end_line)
|
end_line = int(end_line)
|
||||||
@ -1246,14 +1256,18 @@ class UnifiedJob(
|
|||||||
return_buffer = UriCleaner.remove_sensitive(return_buffer)
|
return_buffer = UriCleaner.remove_sensitive(return_buffer)
|
||||||
if escape_ascii:
|
if escape_ascii:
|
||||||
return_buffer = self._escape_ascii(return_buffer)
|
return_buffer = self._escape_ascii(return_buffer)
|
||||||
|
if fix_escapes:
|
||||||
|
return_buffer = self._fix_double_escapes(return_buffer)
|
||||||
|
|
||||||
return return_buffer, start_actual, end_actual, absolute_end
|
return return_buffer, start_actual, end_actual, absolute_end
|
||||||
|
|
||||||
def result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=False):
|
def result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=False):
|
||||||
|
# Raw should NOT fix escapes
|
||||||
return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive)
|
return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive)
|
||||||
|
|
||||||
def result_stdout_limited(self, start_line=0, end_line=None, redact_sensitive=False):
|
def result_stdout_limited(self, start_line=0, end_line=None, redact_sensitive=False):
|
||||||
return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True)
|
# Human-facing should fix escapes
|
||||||
|
return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True, fix_escapes=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workflow_job_id(self):
|
def workflow_job_id(self):
|
||||||
|
|||||||
@ -5,8 +5,6 @@ import time
|
|||||||
import ssl
|
import ssl
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import irc.client
|
|
||||||
|
|
||||||
from django.utils.encoding import smart_str
|
from django.utils.encoding import smart_str
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
@ -16,6 +14,19 @@ from awx.main.notifications.custom_notification_base import CustomNotificationBa
|
|||||||
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
||||||
|
|
||||||
|
|
||||||
|
def _irc():
|
||||||
|
"""
|
||||||
|
Prime the real jaraco namespace before importing irc.* so that
|
||||||
|
setuptools' vendored 'setuptools._vendor.jaraco' doesn't shadow
|
||||||
|
external 'jaraco.*' packages (e.g., jaraco.stream).
|
||||||
|
"""
|
||||||
|
import jaraco.stream # ensure the namespace package is established # noqa: F401
|
||||||
|
import irc.client as irc_client
|
||||||
|
import irc.connection as irc_connection
|
||||||
|
|
||||||
|
return irc_client, irc_connection
|
||||||
|
|
||||||
|
|
||||||
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||||
init_parameters = {
|
init_parameters = {
|
||||||
"server": {"label": "IRC Server Address", "type": "string"},
|
"server": {"label": "IRC Server Address", "type": "string"},
|
||||||
@ -40,12 +51,15 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
def open(self):
|
def open(self):
|
||||||
if self.connection is not None:
|
if self.connection is not None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
irc_client, irc_connection = _irc()
|
||||||
|
|
||||||
if self.use_ssl:
|
if self.use_ssl:
|
||||||
connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket)
|
connection_factory = irc_connection.Factory(wrapper=ssl.wrap_socket)
|
||||||
else:
|
else:
|
||||||
connection_factory = irc.connection.Factory()
|
connection_factory = irc_connection.Factory()
|
||||||
try:
|
try:
|
||||||
self.reactor = irc.client.Reactor()
|
self.reactor = irc_client.Reactor()
|
||||||
self.connection = self.reactor.server().connect(
|
self.connection = self.reactor.server().connect(
|
||||||
self.server,
|
self.server,
|
||||||
self.port,
|
self.port,
|
||||||
@ -53,7 +67,7 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
password=self.password,
|
password=self.password,
|
||||||
connect_factory=connection_factory,
|
connect_factory=connection_factory,
|
||||||
)
|
)
|
||||||
except irc.client.ServerConnectionError as e:
|
except irc_client.ServerConnectionError as e:
|
||||||
logger.error(smart_str(_("Exception connecting to irc server: {}").format(e)))
|
logger.error(smart_str(_("Exception connecting to irc server: {}").format(e)))
|
||||||
if not self.fail_silently:
|
if not self.fail_silently:
|
||||||
raise
|
raise
|
||||||
@ -65,8 +79,9 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
|||||||
self.connection = None
|
self.connection = None
|
||||||
|
|
||||||
def on_connect(self, connection, event):
|
def on_connect(self, connection, event):
|
||||||
|
irc_client, _ = _irc()
|
||||||
for c in self.channels:
|
for c in self.channels:
|
||||||
if irc.client.is_channel(c):
|
if irc_client.is_channel(c):
|
||||||
connection.join(c)
|
connection.join(c)
|
||||||
else:
|
else:
|
||||||
for m in self.channels[c]:
|
for m in self.channels[c]:
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from django.db import transaction
|
|||||||
# Django flags
|
# Django flags
|
||||||
from flags.state import flag_enabled
|
from flags.state import flag_enabled
|
||||||
|
|
||||||
from awx.main.dispatch.publish import task as task_awx
|
from awx.main.dispatch.publish import task
|
||||||
from awx.main.dispatch import get_task_queuename
|
from awx.main.dispatch import get_task_queuename
|
||||||
from awx.main.models.indirect_managed_node_audit import IndirectManagedNodeAudit
|
from awx.main.models.indirect_managed_node_audit import IndirectManagedNodeAudit
|
||||||
from awx.main.models.event_query import EventQuery
|
from awx.main.models.event_query import EventQuery
|
||||||
@ -159,7 +159,7 @@ def cleanup_old_indirect_host_entries() -> None:
|
|||||||
IndirectManagedNodeAudit.objects.filter(created__lt=limit).delete()
|
IndirectManagedNodeAudit.objects.filter(created__lt=limit).delete()
|
||||||
|
|
||||||
|
|
||||||
@task_awx(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> None:
|
def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> None:
|
||||||
try:
|
try:
|
||||||
job = Job.objects.get(id=job_id)
|
job = Job.objects.get(id=job_id)
|
||||||
@ -201,7 +201,7 @@ def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> Non
|
|||||||
logger.exception(f'Error processing indirect host data for job_id={job_id}')
|
logger.exception(f'Error processing indirect host data for job_id={job_id}')
|
||||||
|
|
||||||
|
|
||||||
@task_awx(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
def cleanup_and_save_indirect_host_entries_fallback() -> None:
|
def cleanup_and_save_indirect_host_entries_fallback() -> None:
|
||||||
if not flag_enabled("FEATURE_INDIRECT_NODE_COUNTING_ENABLED"):
|
if not flag_enabled("FEATURE_INDIRECT_NODE_COUNTING_ENABLED"):
|
||||||
return
|
return
|
||||||
|
|||||||
@ -21,6 +21,8 @@ from django.db import transaction
|
|||||||
|
|
||||||
# Shared code for the AWX platform
|
# Shared code for the AWX platform
|
||||||
from awx_plugins.interfaces._temporary_private_container_api import CONTAINER_ROOT, get_incontainer_path
|
from awx_plugins.interfaces._temporary_private_container_api import CONTAINER_ROOT, get_incontainer_path
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
# Runner
|
# Runner
|
||||||
import ansible_runner
|
import ansible_runner
|
||||||
@ -87,8 +89,9 @@ from awx.main.utils.common import (
|
|||||||
from awx.conf.license import get_license
|
from awx.conf.license import get_license
|
||||||
from awx.main.utils.handlers import SpecialInventoryHandler
|
from awx.main.utils.handlers import SpecialInventoryHandler
|
||||||
from awx.main.utils.update_model import update_model
|
from awx.main.utils.update_model import update_model
|
||||||
from rest_framework.exceptions import PermissionDenied
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
# Django flags
|
||||||
|
from flags.state import flag_enabled
|
||||||
|
|
||||||
# Django flags
|
# Django flags
|
||||||
from flags.state import flag_enabled
|
from flags.state import flag_enabled
|
||||||
|
|||||||
@ -13,6 +13,25 @@ from datetime import datetime
|
|||||||
from distutils.version import LooseVersion as Version
|
from distutils.version import LooseVersion as Version
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import connection, transaction, DatabaseError, IntegrityError
|
||||||
|
from django.db.models.fields.related import ForeignKey
|
||||||
|
from django.utils.timezone import now, timedelta
|
||||||
|
from django.utils.encoding import smart_str
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.utils.translation import gettext_noop
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
|
|
||||||
|
# Django-CRUM
|
||||||
|
from crum import impersonate
|
||||||
|
|
||||||
|
# Django flags
|
||||||
|
from flags.state import flag_enabled
|
||||||
|
|
||||||
# Runner
|
# Runner
|
||||||
import ansible_runner.cleanup
|
import ansible_runner.cleanup
|
||||||
import psycopg
|
import psycopg
|
||||||
@ -72,6 +91,13 @@ from awx.main.tasks.receptor import administrative_workunit_reaper, get_receptor
|
|||||||
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
||||||
from awx.main.utils.reload import stop_local_services
|
from awx.main.utils.reload import stop_local_services
|
||||||
from dispatcherd.publish import task
|
from dispatcherd.publish import task
|
||||||
|
from awx.main.tasks.receptor import get_receptor_ctl, worker_info, worker_cleanup, administrative_workunit_reaper, write_receptor_config
|
||||||
|
from awx.main.consumers import emit_channel_notification
|
||||||
|
from awx.main import analytics
|
||||||
|
from awx.conf import settings_registry
|
||||||
|
from awx.main.analytics.subsystem_metrics import DispatcherMetrics
|
||||||
|
|
||||||
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.tasks.system')
|
logger = logging.getLogger('awx.main.tasks.system')
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"VMWARE_HOST": "https://foo.invalid",
|
||||||
|
"VMWARE_PASSWORD": "fooo",
|
||||||
|
"VMWARE_USER": "fooo",
|
||||||
|
"VMWARE_VALIDATE_CERTS": "False"
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
{
|
||||||
|
"demo.query.example": ""
|
||||||
|
}
|
||||||
@ -1,57 +1,17 @@
|
|||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from dispatcherd.publish import task
|
|
||||||
|
|
||||||
from django.db import connection
|
|
||||||
|
|
||||||
from awx.main.dispatch import get_task_queuename
|
from awx.main.dispatch import get_task_queuename
|
||||||
from awx.main.dispatch.publish import task as old_task
|
from awx.main.dispatch.publish import task
|
||||||
|
|
||||||
from ansible_base.lib.utils.db import advisory_lock
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@old_task(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
def sleep_task(seconds=10, log=False):
|
def sleep_task(seconds=10, log=False):
|
||||||
if log:
|
if log:
|
||||||
logger.info('starting sleep_task')
|
logger.info('starting sleep_task')
|
||||||
time.sleep(seconds)
|
time.sleep(seconds)
|
||||||
if log:
|
if log:
|
||||||
logger.info('finished sleep_task')
|
logger.info('finished sleep_task')
|
||||||
|
|
||||||
|
|
||||||
@task()
|
|
||||||
def sleep_break_connection(seconds=0.2):
|
|
||||||
"""
|
|
||||||
Interact with the database in an intentionally breaking way.
|
|
||||||
After this finishes, queries made by this connection are expected to error
|
|
||||||
with "the connection is closed"
|
|
||||||
This is obviously a problem for any task that comes afterwards.
|
|
||||||
So this is used to break things so that the fixes may be demonstrated.
|
|
||||||
"""
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
cursor.execute(f"SET idle_session_timeout = '{seconds / 2}s';")
|
|
||||||
|
|
||||||
logger.info(f'sleeping for {seconds}s > {seconds / 2}s session timeout')
|
|
||||||
time.sleep(seconds)
|
|
||||||
|
|
||||||
for i in range(1, 3):
|
|
||||||
logger.info(f'\nRunning query number {i}')
|
|
||||||
try:
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
cursor.execute("SELECT 1;")
|
|
||||||
logger.info(' query worked, not expected')
|
|
||||||
except Exception as exc:
|
|
||||||
logger.info(f' query errored as expected\ntype: {type(exc)}\nstr: {str(exc)}')
|
|
||||||
|
|
||||||
logger.info(f'Connection present: {bool(connection.connection)}, reports closed: {getattr(connection.connection, "closed", "not_found")}')
|
|
||||||
|
|
||||||
|
|
||||||
@task()
|
|
||||||
def advisory_lock_exception():
|
|
||||||
time.sleep(0.2) # so it can fill up all the workers... hacky for now
|
|
||||||
with advisory_lock('advisory_lock_exception', lock_session_timeout_milliseconds=20):
|
|
||||||
raise RuntimeError('this is an intentional error')
|
|
||||||
|
|||||||
@ -1224,6 +1224,30 @@ def test_custom_credential_type_create(get, post, organization, admin):
|
|||||||
assert decrypt_field(cred, 'api_token') == 'secret'
|
assert decrypt_field(cred, 'api_token') == 'secret'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_galaxy_create_ok(post, organization, admin):
|
||||||
|
params = {
|
||||||
|
'credential_type': 1,
|
||||||
|
'name': 'Galaxy credential',
|
||||||
|
'inputs': {
|
||||||
|
'url': 'https://galaxy.ansible.com',
|
||||||
|
'token': 'some_galaxy_token',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
galaxy = CredentialType.defaults['galaxy_api_token']()
|
||||||
|
galaxy.save()
|
||||||
|
params['user'] = admin.id
|
||||||
|
params['credential_type'] = galaxy.pk
|
||||||
|
response = post(reverse('api:credential_list'), params, admin)
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
assert Credential.objects.count() == 1
|
||||||
|
cred = Credential.objects.all()[:1].get()
|
||||||
|
assert cred.credential_type == galaxy
|
||||||
|
assert cred.inputs['url'] == 'https://galaxy.ansible.com'
|
||||||
|
assert decrypt_field(cred, 'token') == 'some_galaxy_token'
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# misc xfail conditions
|
# misc xfail conditions
|
||||||
#
|
#
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
@ -5,6 +7,9 @@ from awx.main.models.activity_stream import ActivityStream
|
|||||||
from awx.main.models.ha import Instance
|
from awx.main.models.ha import Instance
|
||||||
|
|
||||||
from django.test.utils import override_settings
|
from django.test.utils import override_settings
|
||||||
|
from django.http import HttpResponse
|
||||||
|
|
||||||
|
from rest_framework import status
|
||||||
|
|
||||||
|
|
||||||
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
||||||
@ -87,3 +92,11 @@ def test_custom_hostname_regex(post, admin_user):
|
|||||||
"peers": [],
|
"peers": [],
|
||||||
}
|
}
|
||||||
post(url=url, user=admin_user, data=data, expect=value[1])
|
post(url=url, user=admin_user, data=data, expect=value[1])
|
||||||
|
|
||||||
|
|
||||||
|
def test_instance_install_bundle(get, admin_user, system_auditor):
|
||||||
|
instance = Instance.objects.create(**INSTANCE_KWARGS)
|
||||||
|
url = reverse('api:instance_install_bundle', kwargs={'pk': instance.pk})
|
||||||
|
with mock.patch('awx.api.views.instance_install_bundle.InstanceInstallBundle.get', return_value=HttpResponse({'test': 'data'}, status=status.HTTP_200_OK)):
|
||||||
|
get(url=url, user=admin_user, expect=200)
|
||||||
|
get(url=url, user=system_auditor, expect=403)
|
||||||
|
|||||||
@ -521,6 +521,19 @@ class TestInventorySourceCredential:
|
|||||||
patch(url=inv_src.get_absolute_url(), data={'credential': aws_cred.pk}, expect=200, user=admin_user)
|
patch(url=inv_src.get_absolute_url(), data={'credential': aws_cred.pk}, expect=200, user=admin_user)
|
||||||
assert list(inv_src.credentials.values_list('id', flat=True)) == [aws_cred.pk]
|
assert list(inv_src.credentials.values_list('id', flat=True)) == [aws_cred.pk]
|
||||||
|
|
||||||
|
def test_vmware_cred_create_esxi_source(self, inventory, admin_user, organization, post, get):
|
||||||
|
"""Test that a vmware esxi source can be added with a vmware credential"""
|
||||||
|
from awx.main.models.credential import Credential, CredentialType
|
||||||
|
|
||||||
|
vmware = CredentialType.defaults['vmware']()
|
||||||
|
vmware.save()
|
||||||
|
vmware_cred = Credential.objects.create(credential_type=vmware, name="bar", organization=organization)
|
||||||
|
inv_src = InventorySource.objects.create(inventory=inventory, name='foobar', source='vmware_esxi')
|
||||||
|
r = post(url=reverse('api:inventory_source_credentials_list', kwargs={'pk': inv_src.pk}), data={'id': vmware_cred.pk}, expect=204, user=admin_user)
|
||||||
|
g = get(inv_src.get_absolute_url(), admin_user)
|
||||||
|
assert r.status_code == 204
|
||||||
|
assert g.data['credential'] == vmware_cred.pk
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestControlledBySCM:
|
class TestControlledBySCM:
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import pytest
|
|||||||
|
|
||||||
from django.contrib.sessions.middleware import SessionMiddleware
|
from django.contrib.sessions.middleware import SessionMiddleware
|
||||||
from django.test.utils import override_settings
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
from awx.main.models import User
|
from awx.main.models import User
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
# Python
|
# Python
|
||||||
import pytest
|
import pytest
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
@ -8,7 +10,7 @@ import importlib
|
|||||||
# Django
|
# Django
|
||||||
from django.urls import resolve
|
from django.urls import resolve
|
||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from django.apps import apps
|
from django.apps import apps as global_apps
|
||||||
from django.core.handlers.exception import response_for_exception
|
from django.core.handlers.exception import response_for_exception
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
@ -47,6 +49,8 @@ from awx.main.models.ad_hoc_commands import AdHocCommand
|
|||||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||||
from awx.main.utils import is_testing
|
from awx.main.utils import is_testing
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
__SWAGGER_REQUESTS__ = {}
|
__SWAGGER_REQUESTS__ = {}
|
||||||
|
|
||||||
|
|
||||||
@ -54,8 +58,17 @@ __SWAGGER_REQUESTS__ = {}
|
|||||||
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
|
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
|
||||||
|
|
||||||
|
|
||||||
|
def create_service_id(app_config, apps=global_apps, **kwargs):
|
||||||
|
try:
|
||||||
|
apps.get_model("dab_resource_registry", "ServiceID")
|
||||||
|
except LookupError:
|
||||||
|
logger.info('Looks like reverse migration, not creating resource registry ServiceID')
|
||||||
|
return
|
||||||
|
dab_rr_initial.create_service_id(apps, None)
|
||||||
|
|
||||||
|
|
||||||
if is_testing():
|
if is_testing():
|
||||||
post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None))
|
post_migrate.connect(create_service_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
@ -126,7 +139,7 @@ def execution_environment():
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def setup_managed_roles():
|
def setup_managed_roles():
|
||||||
"Run the migration script to pre-create managed role definitions"
|
"Run the migration script to pre-create managed role definitions"
|
||||||
setup_managed_role_definitions(apps, None)
|
setup_managed_role_definitions(global_apps, None)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
|||||||
147
awx/main/tests/functional/dab_rbac/test_consolidate_teams.py
Normal file
147
awx/main/tests/functional/dab_rbac/test_consolidate_teams.py
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.test import override_settings
|
||||||
|
from django.apps import apps
|
||||||
|
|
||||||
|
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment
|
||||||
|
from ansible_base.rbac.migrations._utils import give_permissions
|
||||||
|
|
||||||
|
from awx.main.models import User, Team
|
||||||
|
from awx.main.migrations._dab_rbac import consolidate_indirect_user_roles
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True)
|
||||||
|
def test_consolidate_indirect_user_roles_with_nested_teams(setup_managed_roles, organization):
|
||||||
|
"""
|
||||||
|
Test the consolidate_indirect_user_roles function with a nested team hierarchy.
|
||||||
|
Setup:
|
||||||
|
- Users: A, B, C, D
|
||||||
|
- Teams: E, F, G
|
||||||
|
- Direct assignments: A→(E,F,G), B→E, C→F, D→G
|
||||||
|
- Team hierarchy: F→E (F is member of E), G→F (G is member of F)
|
||||||
|
Expected result after consolidation:
|
||||||
|
- Team E should have users: A, B, C, D (A directly, B directly, C through F, D through G→F)
|
||||||
|
- Team F should have users: A, C, D (A directly, C directly, D through G)
|
||||||
|
- Team G should have users: A, D (A directly, D directly)
|
||||||
|
"""
|
||||||
|
user_a = User.objects.create_user(username='user_a')
|
||||||
|
user_b = User.objects.create_user(username='user_b')
|
||||||
|
user_c = User.objects.create_user(username='user_c')
|
||||||
|
user_d = User.objects.create_user(username='user_d')
|
||||||
|
|
||||||
|
team_e = Team.objects.create(name='Team E', organization=organization)
|
||||||
|
team_f = Team.objects.create(name='Team F', organization=organization)
|
||||||
|
team_g = Team.objects.create(name='Team G', organization=organization)
|
||||||
|
|
||||||
|
# Get role definition and content type for give_permissions
|
||||||
|
team_member_role = RoleDefinition.objects.get(name='Team Member')
|
||||||
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
|
|
||||||
|
# Assign users to teams
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_e.id, content_type_id=team_content_type.id)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_f.id, content_type_id=team_content_type.id)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_g.id, content_type_id=team_content_type.id)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user_b], object_id=team_e.id, content_type_id=team_content_type.id)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user_c], object_id=team_f.id, content_type_id=team_content_type.id)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user_d], object_id=team_g.id, content_type_id=team_content_type.id)
|
||||||
|
|
||||||
|
# Mirror user assignments in the old RBAC system because signals don't run in tests
|
||||||
|
team_e.member_role.members.add(user_a.id, user_b.id)
|
||||||
|
team_f.member_role.members.add(user_a.id, user_c.id)
|
||||||
|
team_g.member_role.members.add(user_a.id, user_d.id)
|
||||||
|
|
||||||
|
# Setup team-to-team relationships
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, teams=[team_f], object_id=team_e.id, content_type_id=team_content_type.id)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, teams=[team_g], object_id=team_f.id, content_type_id=team_content_type.id)
|
||||||
|
|
||||||
|
# Verify initial direct assignments
|
||||||
|
team_e_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_e.id).values_list('user_id', flat=True))
|
||||||
|
assert team_e_users_before == {user_a.id, user_b.id}
|
||||||
|
team_f_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_f.id).values_list('user_id', flat=True))
|
||||||
|
assert team_f_users_before == {user_a.id, user_c.id}
|
||||||
|
team_g_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_g.id).values_list('user_id', flat=True))
|
||||||
|
assert team_g_users_before == {user_a.id, user_d.id}
|
||||||
|
|
||||||
|
# Verify team-to-team relationships exist
|
||||||
|
assert RoleTeamAssignment.objects.filter(role_definition=team_member_role, team=team_f, object_id=team_e.id).exists()
|
||||||
|
assert RoleTeamAssignment.objects.filter(role_definition=team_member_role, team=team_g, object_id=team_f.id).exists()
|
||||||
|
|
||||||
|
# Run the consolidation function
|
||||||
|
consolidate_indirect_user_roles(apps, None)
|
||||||
|
|
||||||
|
# Verify consolidation
|
||||||
|
team_e_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_e.id).values_list('user_id', flat=True))
|
||||||
|
assert team_e_users_after == {user_a.id, user_b.id, user_c.id, user_d.id}, f"Team E should have users A, B, C, D but has {team_e_users_after}"
|
||||||
|
team_f_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_f.id).values_list('user_id', flat=True))
|
||||||
|
assert team_f_users_after == {user_a.id, user_c.id, user_d.id}, f"Team F should have users A, C, D but has {team_f_users_after}"
|
||||||
|
team_g_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_g.id).values_list('user_id', flat=True))
|
||||||
|
assert team_g_users_after == {user_a.id, user_d.id}, f"Team G should have users A, D but has {team_g_users_after}"
|
||||||
|
|
||||||
|
# Verify team member changes are mirrored to the old RBAC system
|
||||||
|
assert team_e_users_after == set(team_e.member_role.members.all().values_list('id', flat=True))
|
||||||
|
assert team_f_users_after == set(team_f.member_role.members.all().values_list('id', flat=True))
|
||||||
|
assert team_g_users_after == set(team_g.member_role.members.all().values_list('id', flat=True))
|
||||||
|
|
||||||
|
# Verify team-to-team relationships are removed after consolidation
|
||||||
|
assert not RoleTeamAssignment.objects.filter(
|
||||||
|
role_definition=team_member_role, team=team_f, object_id=team_e.id
|
||||||
|
).exists(), "Team-to-team relationship F→E should be removed"
|
||||||
|
assert not RoleTeamAssignment.objects.filter(
|
||||||
|
role_definition=team_member_role, team=team_g, object_id=team_f.id
|
||||||
|
).exists(), "Team-to-team relationship G→F should be removed"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True)
|
||||||
|
def test_consolidate_indirect_user_roles_no_team_relationships(setup_managed_roles, organization):
|
||||||
|
"""
|
||||||
|
Test that the function handles the case where there are no team-to-team relationships.
|
||||||
|
It should return early without making any changes.
|
||||||
|
"""
|
||||||
|
# Create a user and team with direct assignment
|
||||||
|
user = User.objects.create_user(username='test_user')
|
||||||
|
team = Team.objects.create(name='Test Team', organization=organization)
|
||||||
|
|
||||||
|
team_member_role = RoleDefinition.objects.get(name='Team Member')
|
||||||
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user], object_id=team.id, content_type_id=team_content_type.id)
|
||||||
|
|
||||||
|
# Compare count of assignments before and after consolidation
|
||||||
|
assignments_before = RoleUserAssignment.objects.filter(role_definition=team_member_role).count()
|
||||||
|
consolidate_indirect_user_roles(apps, None)
|
||||||
|
assignments_after = RoleUserAssignment.objects.filter(role_definition=team_member_role).count()
|
||||||
|
|
||||||
|
assert assignments_before == assignments_after, "Number of assignments should not change when there are no team-to-team relationships"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True)
|
||||||
|
def test_consolidate_indirect_user_roles_circular_reference(setup_managed_roles, organization):
|
||||||
|
"""
|
||||||
|
Test that the function handles circular team references without infinite recursion.
|
||||||
|
"""
|
||||||
|
team_a = Team.objects.create(name='Team A', organization=organization)
|
||||||
|
team_b = Team.objects.create(name='Team B', organization=organization)
|
||||||
|
|
||||||
|
# Create a user assigned to team A
|
||||||
|
user = User.objects.create_user(username='test_user')
|
||||||
|
|
||||||
|
team_member_role = RoleDefinition.objects.get(name='Team Member')
|
||||||
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, users=[user], object_id=team_a.id, content_type_id=team_content_type.id)
|
||||||
|
|
||||||
|
# Create circular team relationships: A → B → A
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, teams=[team_b], object_id=team_a.id, content_type_id=team_content_type.id)
|
||||||
|
give_permissions(apps=apps, rd=team_member_role, teams=[team_a], object_id=team_b.id, content_type_id=team_content_type.id)
|
||||||
|
|
||||||
|
# Run the consolidation function - should not raise an exception
|
||||||
|
consolidate_indirect_user_roles(apps, None)
|
||||||
|
|
||||||
|
# Both teams should have the user assigned
|
||||||
|
team_a_users = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_a.id).values_list('user_id', flat=True))
|
||||||
|
team_b_users = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_b.id).values_list('user_id', flat=True))
|
||||||
|
|
||||||
|
assert user.id in team_a_users, "User should be assigned to team A"
|
||||||
|
assert user.id in team_b_users, "User should be assigned to team B"
|
||||||
@ -151,14 +151,6 @@ def test_assign_credential_to_user_of_another_org(setup_managed_roles, credentia
|
|||||||
post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
|
||||||
def test_team_member_role_not_assignable(team, rando, post, admin_user, setup_managed_roles):
|
|
||||||
member_rd = RoleDefinition.objects.get(name='Organization Member')
|
|
||||||
url = django_reverse('roleuserassignment-list')
|
|
||||||
r = post(url, data={'object_id': team.id, 'role_definition': member_rd.id, 'user': rando.id}, user=admin_user, expect=400)
|
|
||||||
assert 'Not managed locally' in str(r.data)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin, bob, post, get):
|
def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin, bob, post, get):
|
||||||
'''
|
'''
|
||||||
@ -178,10 +170,17 @@ def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin
|
|||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@pytest.mark.parametrize('actor', ['user', 'team'])
|
@pytest.mark.parametrize('actor', ['user', 'team'])
|
||||||
@pytest.mark.parametrize('role_name', ['Organization Admin', 'Organization Member', 'Team Admin', 'Team Member'])
|
@pytest.mark.parametrize('role_name', ['Organization Admin', 'Organization Member', 'Team Admin', 'Team Member'])
|
||||||
def test_prevent_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, organization, team, admin, bob, post):
|
def test_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, organization, team, admin, bob, post):
|
||||||
'''
|
'''
|
||||||
Prevent user or team from being added to platform-level roles
|
Allow user to be added to platform-level roles
|
||||||
|
Exceptions:
|
||||||
|
- Team cannot be added to Organization Member or Admin role
|
||||||
|
- Team cannot be added to Team Admin or Team Member role
|
||||||
'''
|
'''
|
||||||
|
if actor == 'team':
|
||||||
|
expect = 400
|
||||||
|
else:
|
||||||
|
expect = 201
|
||||||
rd = RoleDefinition.objects.get(name=role_name)
|
rd = RoleDefinition.objects.get(name=role_name)
|
||||||
endpoint = 'roleuserassignment-list' if actor == 'user' else 'roleteamassignment-list'
|
endpoint = 'roleuserassignment-list' if actor == 'user' else 'roleteamassignment-list'
|
||||||
url = django_reverse(endpoint)
|
url = django_reverse(endpoint)
|
||||||
@ -189,37 +188,9 @@ def test_prevent_adding_actor_to_platform_roles(setup_managed_roles, role_name,
|
|||||||
data = {'object_id': object_id, 'role_definition': rd.id}
|
data = {'object_id': object_id, 'role_definition': rd.id}
|
||||||
actor_id = bob.id if actor == 'user' else team.id
|
actor_id = bob.id if actor == 'user' else team.id
|
||||||
data[actor] = actor_id
|
data[actor] = actor_id
|
||||||
r = post(url, data=data, user=admin, expect=400)
|
r = post(url, data=data, user=admin, expect=expect)
|
||||||
assert 'Not managed locally' in str(r.data)
|
if expect == 400:
|
||||||
|
if 'Organization' in role_name:
|
||||||
|
assert 'Assigning organization member permission to teams is not allowed' in str(r.data)
|
||||||
@pytest.mark.django_db
|
if 'Team' in role_name:
|
||||||
@pytest.mark.parametrize('role_name', ['Controller Team Admin', 'Controller Team Member'])
|
assert 'Assigning team permissions to other teams is not allowed' in str(r.data)
|
||||||
def test_adding_user_to_controller_team_roles(setup_managed_roles, role_name, team, admin, bob, post, get):
|
|
||||||
'''
|
|
||||||
Allow user to be added to Controller Team Admin or Controller Team Member
|
|
||||||
'''
|
|
||||||
url_detail = reverse('api:team_detail', kwargs={'pk': team.id})
|
|
||||||
get(url_detail, user=bob, expect=403)
|
|
||||||
|
|
||||||
rd = RoleDefinition.objects.get(name=role_name)
|
|
||||||
url = django_reverse('roleuserassignment-list')
|
|
||||||
post(url, data={'object_id': team.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201)
|
|
||||||
|
|
||||||
get(url_detail, user=bob, expect=200)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
|
||||||
@pytest.mark.parametrize('role_name', ['Controller Organization Admin', 'Controller Organization Member'])
|
|
||||||
def test_adding_user_to_controller_organization_roles(setup_managed_roles, role_name, organization, admin, bob, post, get):
|
|
||||||
'''
|
|
||||||
Allow user to be added to Controller Organization Admin or Controller Organization Member
|
|
||||||
'''
|
|
||||||
url_detail = reverse('api:organization_detail', kwargs={'pk': organization.id})
|
|
||||||
get(url_detail, user=bob, expect=403)
|
|
||||||
|
|
||||||
rd = RoleDefinition.objects.get(name=role_name)
|
|
||||||
url = django_reverse('roleuserassignment-list')
|
|
||||||
post(url, data={'object_id': organization.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201)
|
|
||||||
|
|
||||||
get(url, user=bob, expect=200)
|
|
||||||
|
|||||||
@ -15,6 +15,14 @@ def test_roles_to_not_create(setup_managed_roles):
|
|||||||
raise Exception(f'Found RoleDefinitions that should not exist: {bad_names}')
|
raise Exception(f'Found RoleDefinitions that should not exist: {bad_names}')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_org_admin_role(setup_managed_roles):
|
||||||
|
rd = RoleDefinition.objects.get(name='Organization Admin')
|
||||||
|
codenames = list(rd.permissions.values_list('codename', flat=True))
|
||||||
|
assert 'view_inventory' in codenames
|
||||||
|
assert 'change_inventory' in codenames
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_project_update_role(setup_managed_roles):
|
def test_project_update_role(setup_managed_roles):
|
||||||
"""Role to allow updating a project on the object-level should exist"""
|
"""Role to allow updating a project on the object-level should exist"""
|
||||||
@ -31,32 +39,18 @@ def test_org_child_add_permission(setup_managed_roles):
|
|||||||
assert not DABPermission.objects.filter(codename='add_jobtemplate').exists()
|
assert not DABPermission.objects.filter(codename='add_jobtemplate').exists()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
|
||||||
def test_controller_specific_roles_have_correct_permissions(setup_managed_roles):
|
|
||||||
'''
|
|
||||||
Controller specific roles should have the same permissions as the platform roles
|
|
||||||
e.g. Controller Team Admin should have same permission set as Team Admin
|
|
||||||
'''
|
|
||||||
for rd_name in ['Controller Team Admin', 'Controller Team Member', 'Controller Organization Member', 'Controller Organization Admin']:
|
|
||||||
rd = RoleDefinition.objects.get(name=rd_name)
|
|
||||||
rd_platform = RoleDefinition.objects.get(name=rd_name.split('Controller ')[1])
|
|
||||||
assert set(rd.permissions.all()) == set(rd_platform.permissions.all())
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@pytest.mark.parametrize('resource_name', ['Team', 'Organization'])
|
@pytest.mark.parametrize('resource_name', ['Team', 'Organization'])
|
||||||
@pytest.mark.parametrize('action', ['Member', 'Admin'])
|
@pytest.mark.parametrize('action', ['Member', 'Admin'])
|
||||||
def test_legacy_RBAC_uses_controller_specific_roles(setup_managed_roles, resource_name, action, team, bob, organization):
|
def test_legacy_RBAC_uses_platform_roles(setup_managed_roles, resource_name, action, team, bob, organization):
|
||||||
'''
|
'''
|
||||||
Assignment to legacy RBAC roles should use controller specific role definitions
|
Assignment to legacy RBAC roles should use platform role definitions
|
||||||
e.g. Controller Team Admin, Controller Team Member, Controller Organization Member, Controller Organization Admin
|
e.g. Team Admin, Team Member, Organization Member, Organization Admin
|
||||||
'''
|
'''
|
||||||
resource = team if resource_name == 'Team' else organization
|
resource = team if resource_name == 'Team' else organization
|
||||||
if action == 'Member':
|
if action == 'Member':
|
||||||
resource.member_role.members.add(bob)
|
resource.member_role.members.add(bob)
|
||||||
else:
|
else:
|
||||||
resource.admin_role.members.add(bob)
|
resource.admin_role.members.add(bob)
|
||||||
rd = RoleDefinition.objects.get(name=f'Controller {resource_name} {action}')
|
rd = RoleDefinition.objects.get(name=f'{resource_name} {action}')
|
||||||
rd_platform = RoleDefinition.objects.get(name=f'{resource_name} {action}')
|
|
||||||
assert RoleUserAssignment.objects.filter(role_definition=rd, user=bob, object_id=resource.id).exists()
|
assert RoleUserAssignment.objects.filter(role_definition=rd, user=bob, object_id=resource.id).exists()
|
||||||
assert not RoleUserAssignment.objects.filter(role_definition=rd_platform, user=bob, object_id=resource.id).exists()
|
|
||||||
|
|||||||
@ -173,20 +173,6 @@ def test_creator_permission(rando, admin_user, inventory, setup_managed_roles):
|
|||||||
assert rando in inventory.admin_role.members.all()
|
assert rando in inventory.admin_role.members.all()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
|
||||||
def test_team_team_read_role(rando, team, admin_user, post, setup_managed_roles):
|
|
||||||
orgs = [Organization.objects.create(name=f'foo-{i}') for i in range(2)]
|
|
||||||
teams = [Team.objects.create(name=f'foo-{i}', organization=orgs[i]) for i in range(2)]
|
|
||||||
teams[1].member_role.members.add(rando)
|
|
||||||
|
|
||||||
# give second team read permission to first team through the API for regression testing
|
|
||||||
url = reverse('api:role_teams_list', kwargs={'pk': teams[0].read_role.pk, 'version': 'v2'})
|
|
||||||
post(url, {'id': teams[1].id}, user=admin_user)
|
|
||||||
|
|
||||||
# user should be able to view the first team
|
|
||||||
assert rando in teams[0].read_role
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_implicit_parents_no_assignments(organization):
|
def test_implicit_parents_no_assignments(organization):
|
||||||
"""Through the normal course of creating models, we should not be changing DAB RBAC permissions"""
|
"""Through the normal course of creating models, we should not be changing DAB RBAC permissions"""
|
||||||
@ -206,19 +192,19 @@ def test_user_auditor_rel(organization, rando, setup_managed_roles):
|
|||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@pytest.mark.parametrize('resource_name', ['Organization', 'Team'])
|
@pytest.mark.parametrize('resource_name', ['Organization', 'Team'])
|
||||||
@pytest.mark.parametrize('role_name', ['Member', 'Admin'])
|
@pytest.mark.parametrize('role_name', ['Member', 'Admin'])
|
||||||
def test_mapping_from_controller_role_definitions_to_roles(organization, team, rando, role_name, resource_name, setup_managed_roles):
|
def test_mapping_from_role_definitions_to_roles(organization, team, rando, role_name, resource_name, setup_managed_roles):
|
||||||
"""
|
"""
|
||||||
ensure mappings for controller roles are correct
|
ensure mappings for platform roles are correct
|
||||||
e.g.
|
e.g.
|
||||||
Controller Organization Member > organization.member_role
|
Organization Member > organization.member_role
|
||||||
Controller Organization Admin > organization.admin_role
|
Organization Admin > organization.admin_role
|
||||||
Controller Team Member > team.member_role
|
Team Member > team.member_role
|
||||||
Controller Team Admin > team.admin_role
|
Team Admin > team.admin_role
|
||||||
"""
|
"""
|
||||||
resource = organization if resource_name == 'Organization' else team
|
resource = organization if resource_name == 'Organization' else team
|
||||||
old_role_name = f"{role_name.lower()}_role"
|
old_role_name = f"{role_name.lower()}_role"
|
||||||
getattr(resource, old_role_name).members.add(rando)
|
getattr(resource, old_role_name).members.add(rando)
|
||||||
assignment = RoleUserAssignment.objects.get(user=rando)
|
assignment = RoleUserAssignment.objects.get(user=rando)
|
||||||
assert assignment.role_definition.name == f'Controller {resource_name} {role_name}'
|
assert assignment.role_definition.name == f'{resource_name} {role_name}'
|
||||||
old_role = get_role_from_object_role(assignment.object_role)
|
old_role = get_role_from_object_role(assignment.object_role)
|
||||||
assert old_role.id == getattr(resource, old_role_name).id
|
assert old_role.id == getattr(resource, old_role_name).id
|
||||||
|
|||||||
@ -35,21 +35,21 @@ class TestNewToOld:
|
|||||||
|
|
||||||
def test_new_to_old_rbac_team_member_addition(self, admin, post, team, bob, setup_managed_roles):
|
def test_new_to_old_rbac_team_member_addition(self, admin, post, team, bob, setup_managed_roles):
|
||||||
'''
|
'''
|
||||||
Assign user to Controller Team Member role definition, should be added to team.member_role.members
|
Assign user to Team Member role definition, should be added to team.member_role.members
|
||||||
'''
|
'''
|
||||||
rd = RoleDefinition.objects.get(name='Controller Team Member')
|
rd = RoleDefinition.objects.get(name='Team Member')
|
||||||
|
|
||||||
url = get_relative_url('roleuserassignment-list')
|
url = get_relative_url('roleuserassignment-list')
|
||||||
post(url, user=admin, data={'role_definition': rd.id, 'user': bob.id, 'object_id': team.id}, expect=201)
|
post(url, user=admin, data={'role_definition': rd.id, 'user': bob.id, 'object_id': team.id}, expect=201)
|
||||||
assert bob in team.member_role.members.all()
|
assert bob in team.member_role.members.all()
|
||||||
|
|
||||||
def test_new_to_old_rbac_team_member_removal(self, admin, delete, team, bob):
|
def test_new_to_old_rbac_team_member_removal(self, admin, delete, team, bob, setup_managed_roles):
|
||||||
'''
|
'''
|
||||||
Remove user from Controller Team Member role definition, should be deleted from team.member_role.members
|
Remove user from Team Member role definition, should be deleted from team.member_role.members
|
||||||
'''
|
'''
|
||||||
team.member_role.members.add(bob)
|
team.member_role.members.add(bob)
|
||||||
|
|
||||||
rd = RoleDefinition.objects.get(name='Controller Team Member')
|
rd = RoleDefinition.objects.get(name='Team Member')
|
||||||
user_assignment = RoleUserAssignment.objects.get(user=bob, role_definition=rd, object_id=team.id)
|
user_assignment = RoleUserAssignment.objects.get(user=bob, role_definition=rd, object_id=team.id)
|
||||||
|
|
||||||
url = get_relative_url('roleuserassignment-detail', kwargs={'pk': user_assignment.id})
|
url = get_relative_url('roleuserassignment-detail', kwargs={'pk': user_assignment.id})
|
||||||
|
|||||||
344
awx/main/tests/functional/github_app_test.py
Normal file
344
awx/main/tests/functional/github_app_test.py
Normal file
@ -0,0 +1,344 @@
|
|||||||
|
"""Tests for GitHub App Installation access token extraction plugin."""
|
||||||
|
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import (
|
||||||
|
RSAPrivateKey,
|
||||||
|
RSAPublicKey,
|
||||||
|
generate_private_key,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.serialization import (
|
||||||
|
Encoding,
|
||||||
|
NoEncryption,
|
||||||
|
PrivateFormat,
|
||||||
|
PublicFormat,
|
||||||
|
)
|
||||||
|
from github.Auth import AppInstallationAuth
|
||||||
|
from github.Consts import DEFAULT_JWT_ALGORITHM
|
||||||
|
from github.GithubException import (
|
||||||
|
BadAttributeException,
|
||||||
|
GithubException,
|
||||||
|
UnknownObjectException,
|
||||||
|
)
|
||||||
|
from jwt import decode as decode_jwt
|
||||||
|
|
||||||
|
from awx.main.credential_plugins import github_app
|
||||||
|
|
||||||
|
|
||||||
|
github_app_jwt_client_id_unsupported = pytest.mark.xfail(
|
||||||
|
raises=(AssertionError, ValueError),
|
||||||
|
reason='Client ID in JWT is not currently supported by ' 'PyGitHub and is disabled.\n\n' 'Ref: https://github.com/PyGithub/PyGithub/issues/3213',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
RSA_PUBLIC_EXPONENT = 65_537 # noqa: WPS303
|
||||||
|
MINIMUM_RSA_KEY_SIZE = 1024 # the lowest value chosen for performance in tests
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def rsa_private_key() -> RSAPrivateKey:
|
||||||
|
"""Generate an RSA private key."""
|
||||||
|
return generate_private_key(
|
||||||
|
public_exponent=RSA_PUBLIC_EXPONENT,
|
||||||
|
key_size=MINIMUM_RSA_KEY_SIZE, # would be 4096 or higher in production
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def rsa_public_key(rsa_private_key: RSAPrivateKey) -> RSAPublicKey:
|
||||||
|
"""Extract a public key out of the private one."""
|
||||||
|
return rsa_private_key.public_key()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def rsa_private_key_bytes(rsa_private_key: RSAPrivateKey) -> bytes:
|
||||||
|
r"""Generate an unencrypted PKCS#1 formatted RSA private key.
|
||||||
|
|
||||||
|
Encoded as PEM-bytes.
|
||||||
|
|
||||||
|
This is what the GitHub-downloaded PEM files contain.
|
||||||
|
|
||||||
|
Ref: https://developer.github.com/apps/building-github-apps/\
|
||||||
|
authenticating-with-github-apps/
|
||||||
|
"""
|
||||||
|
return rsa_private_key.private_bytes(
|
||||||
|
encoding=Encoding.PEM,
|
||||||
|
format=PrivateFormat.TraditionalOpenSSL, # A.K.A. PKCS#1
|
||||||
|
encryption_algorithm=NoEncryption(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def rsa_private_key_str(rsa_private_key_bytes: bytes) -> str:
|
||||||
|
"""Return private key as an instance of string."""
|
||||||
|
return rsa_private_key_bytes.decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def rsa_public_key_bytes(rsa_public_key: RSAPublicKey) -> bytes:
|
||||||
|
"""Return a PKCS#1 formatted RSA public key encoded as PEM."""
|
||||||
|
return rsa_public_key.public_bytes(
|
||||||
|
encoding=Encoding.PEM,
|
||||||
|
format=PublicFormat.PKCS1,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AppInstallIds(TypedDict):
|
||||||
|
"""Schema for augmented extractor function keyword args."""
|
||||||
|
|
||||||
|
app_or_client_id: str
|
||||||
|
install_id: str
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
('extract_github_app_install_token_args', 'expected_error_msg'),
|
||||||
|
(
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'app_or_client_id': 'invalid',
|
||||||
|
'install_id': '666',
|
||||||
|
},
|
||||||
|
'^Expected GitHub App or Client ID to be an integer or a string ' r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got' " 'invalid'$",
|
||||||
|
id='gh-app-id-broken-text',
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbb',
|
||||||
|
'install_id': '666',
|
||||||
|
},
|
||||||
|
'^Expected GitHub App or Client ID to be an integer or a string '
|
||||||
|
r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got'
|
||||||
|
" 'Iv1.bbbbbbbbbbbbbbb'$",
|
||||||
|
id='gh-app-id-client-id-not-enough-chars',
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbbx',
|
||||||
|
'install_id': '666',
|
||||||
|
},
|
||||||
|
'^Expected GitHub App or Client ID to be an integer or a string '
|
||||||
|
r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got'
|
||||||
|
" 'Iv1.bbbbbbbbbbbbbbbx'$",
|
||||||
|
id='gh-app-id-client-id-broken-hex',
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbbbb',
|
||||||
|
'install_id': '666',
|
||||||
|
},
|
||||||
|
'^Expected GitHub App or Client ID to be an integer or a string '
|
||||||
|
r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got'
|
||||||
|
" 'Iv1.bbbbbbbbbbbbbbbbb'$",
|
||||||
|
id='gh-app-id-client-id-too-many-chars',
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'app_or_client_id': 999,
|
||||||
|
'install_id': 'invalid',
|
||||||
|
},
|
||||||
|
'^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$",
|
||||||
|
id='gh-app-invalid-install-id-with-int-app-id',
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'app_or_client_id': '999',
|
||||||
|
'install_id': 'invalid',
|
||||||
|
},
|
||||||
|
'^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$",
|
||||||
|
id='gh-app-invalid-install-id-with-str-digit-app-id',
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
{
|
||||||
|
'app_or_client_id': 'Iv1.cccccccccccccccc',
|
||||||
|
'install_id': 'invalid',
|
||||||
|
},
|
||||||
|
'^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$",
|
||||||
|
id='gh-app-invalid-install-id-with-client-id',
|
||||||
|
marks=github_app_jwt_client_id_unsupported,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_github_app_invalid_args(
|
||||||
|
extract_github_app_install_token_args: AppInstallIds,
|
||||||
|
expected_error_msg: str,
|
||||||
|
) -> None:
|
||||||
|
"""Test that invalid arguments make token extractor bail early."""
|
||||||
|
with pytest.raises(ValueError, match=expected_error_msg):
|
||||||
|
github_app.extract_github_app_install_token(
|
||||||
|
github_api_url='https://github.com',
|
||||||
|
private_rsa_key='key',
|
||||||
|
**extract_github_app_install_token_args,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
(
|
||||||
|
'github_exception',
|
||||||
|
'transformed_exception',
|
||||||
|
'error_msg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
(
|
||||||
|
BadAttributeException(
|
||||||
|
'',
|
||||||
|
{},
|
||||||
|
Exception(),
|
||||||
|
),
|
||||||
|
RuntimeError,
|
||||||
|
(
|
||||||
|
r'^Broken GitHub @ https://github\.com with '
|
||||||
|
r'app_or_client_id: 123, install_id: 456\. It is a bug, '
|
||||||
|
'please report it to the '
|
||||||
|
r"developers\.\n\n\('', \{\}, Exception\(\)\)$"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
GithubException(-1),
|
||||||
|
RuntimeError,
|
||||||
|
(
|
||||||
|
'^An unexpected error happened while talking to GitHub API '
|
||||||
|
r'@ https://github\.com '
|
||||||
|
r'\(app_or_client_id: 123, install_id: 456\)\. '
|
||||||
|
r'Is the app or client ID correct\? '
|
||||||
|
r'And the private RSA key\? '
|
||||||
|
r'See https://docs\.github\.com/rest/reference/apps'
|
||||||
|
r'#create-an-installation-access-token-for-an-app\.'
|
||||||
|
r'\n\n-1$'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
UnknownObjectException(-1),
|
||||||
|
ValueError,
|
||||||
|
(
|
||||||
|
'^Failed to retrieve a GitHub installation token from '
|
||||||
|
r'https://github\.com using '
|
||||||
|
r'app_or_client_id: 123, install_id: 456\. '
|
||||||
|
r'Is the app installed\? See '
|
||||||
|
r'https://docs\.github\.com/rest/reference/apps'
|
||||||
|
r'#create-an-installation-access-token-for-an-app\.'
|
||||||
|
r'\n\n-1$'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
ids=(
|
||||||
|
'github-broken',
|
||||||
|
'unexpected-error',
|
||||||
|
'no-install',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_github_app_api_errors(
|
||||||
|
mocker: MockerFixture,
|
||||||
|
github_exception: Exception,
|
||||||
|
transformed_exception: type[Exception],
|
||||||
|
error_msg: str,
|
||||||
|
) -> None:
|
||||||
|
"""Test successful GitHub authentication."""
|
||||||
|
application_id = 123
|
||||||
|
installation_id = 456
|
||||||
|
|
||||||
|
mocker.patch.object(
|
||||||
|
github_app.Auth.AppInstallationAuth,
|
||||||
|
'token',
|
||||||
|
new_callable=mocker.PropertyMock,
|
||||||
|
side_effect=github_exception,
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(transformed_exception, match=error_msg):
|
||||||
|
github_app.extract_github_app_install_token(
|
||||||
|
github_api_url='https://github.com',
|
||||||
|
app_or_client_id=application_id,
|
||||||
|
install_id=installation_id,
|
||||||
|
private_rsa_key='key',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _FakeAppInstallationAuth(AppInstallationAuth):
|
||||||
|
@property
|
||||||
|
def token(self: '_FakeAppInstallationAuth') -> str:
|
||||||
|
return 'token-sentinel'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'application_id',
|
||||||
|
(
|
||||||
|
123,
|
||||||
|
'123',
|
||||||
|
pytest.param(
|
||||||
|
'Iv1.aaaaaaaaaaaaaaaa',
|
||||||
|
marks=github_app_jwt_client_id_unsupported,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
ids=('app-id-int', 'app-id-str', 'client-id'),
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'installation_id',
|
||||||
|
(456, '456'),
|
||||||
|
ids=('install-id-int', 'install-id-str'),
|
||||||
|
)
|
||||||
|
# pylint: disable-next=too-many-arguments,too-many-positional-arguments
|
||||||
|
def test_github_app_github_authentication( # noqa: WPS211
|
||||||
|
application_id: int | str,
|
||||||
|
installation_id: int | str,
|
||||||
|
mocker: MockerFixture,
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
rsa_private_key_str: str,
|
||||||
|
rsa_public_key_bytes: bytes,
|
||||||
|
) -> None:
|
||||||
|
"""Test successful GitHub authentication."""
|
||||||
|
monkeypatch.setattr(
|
||||||
|
github_app.Auth,
|
||||||
|
'AppInstallationAuth',
|
||||||
|
_FakeAppInstallationAuth,
|
||||||
|
)
|
||||||
|
|
||||||
|
get_installation_auth_spy = mocker.spy(
|
||||||
|
github_app.Auth,
|
||||||
|
'AppInstallationAuth',
|
||||||
|
)
|
||||||
|
github_initializer_spy = mocker.spy(github_app, 'Github')
|
||||||
|
|
||||||
|
token = github_app.extract_github_app_install_token(
|
||||||
|
github_api_url='https://github.com',
|
||||||
|
app_or_client_id=application_id,
|
||||||
|
install_id=installation_id,
|
||||||
|
private_rsa_key=rsa_private_key_str,
|
||||||
|
)
|
||||||
|
|
||||||
|
observed_pygithub_obj = github_initializer_spy.spy_return
|
||||||
|
observed_gh_install_auth_obj = get_installation_auth_spy.spy_return
|
||||||
|
# pylint: disable-next=protected-access
|
||||||
|
signed_jwt = observed_gh_install_auth_obj._app_auth.token # noqa: WPS437
|
||||||
|
|
||||||
|
assert token == 'token-sentinel'
|
||||||
|
|
||||||
|
assert observed_pygithub_obj.requester.base_url == 'https://github.com'
|
||||||
|
|
||||||
|
assert observed_gh_install_auth_obj.installation_id == int(installation_id)
|
||||||
|
assert isinstance(observed_gh_install_auth_obj, _FakeAppInstallationAuth)
|
||||||
|
|
||||||
|
# NOTE: The `decode_jwt()` call asserts that no
|
||||||
|
# NOTE: `jwt.exceptions.InvalidSignatureError()` exception gets raised
|
||||||
|
# NOTE: which would indicate incorrect RSA key or corrupted payload if
|
||||||
|
# NOTE: that was to happen. This verifies that JWT is signed with the
|
||||||
|
# NOTE: private RSA key we passed by using its public counterpart.
|
||||||
|
decode_jwt(
|
||||||
|
signed_jwt,
|
||||||
|
key=rsa_public_key_bytes,
|
||||||
|
algorithms=[DEFAULT_JWT_ALGORITHM],
|
||||||
|
options={
|
||||||
|
'require': ['exp', 'iat', 'iss'],
|
||||||
|
'strict_aud': False,
|
||||||
|
'verify_aud': True,
|
||||||
|
'verify_exp': True,
|
||||||
|
'verify_signature': True,
|
||||||
|
'verify_nbf': True,
|
||||||
|
},
|
||||||
|
audience=None, # GH App JWT don't set the audience claim
|
||||||
|
issuer=str(application_id),
|
||||||
|
leeway=0.001, # noqa: WPS432
|
||||||
|
)
|
||||||
217
awx/main/tests/functional/test_credential_plugins.py
Normal file
217
awx/main/tests/functional/test_credential_plugins.py
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest import mock
|
||||||
|
from awx.main.credential_plugins import hashivault, azure_kv
|
||||||
|
|
||||||
|
from azure.keyvault.secrets import (
|
||||||
|
KeyVaultSecret,
|
||||||
|
SecretClient,
|
||||||
|
SecretProperties,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_imported_azure_cloud_sdk_vars():
|
||||||
|
from awx.main.credential_plugins import azure_kv
|
||||||
|
|
||||||
|
assert len(azure_kv.clouds) > 0
|
||||||
|
assert all([hasattr(c, 'name') for c in azure_kv.clouds])
|
||||||
|
assert all([hasattr(c, 'suffixes') for c in azure_kv.clouds])
|
||||||
|
assert all([hasattr(c.suffixes, 'keyvault_dns') for c in azure_kv.clouds])
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_approle_auth():
|
||||||
|
kwargs = {
|
||||||
|
'role_id': 'the_role_id',
|
||||||
|
'secret_id': 'the_secret_id',
|
||||||
|
}
|
||||||
|
expected_res = {
|
||||||
|
'role_id': 'the_role_id',
|
||||||
|
'secret_id': 'the_secret_id',
|
||||||
|
}
|
||||||
|
res = hashivault.approle_auth(**kwargs)
|
||||||
|
assert res == expected_res
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_kubernetes_auth():
|
||||||
|
kwargs = {
|
||||||
|
'kubernetes_role': 'the_kubernetes_role',
|
||||||
|
}
|
||||||
|
expected_res = {
|
||||||
|
'role': 'the_kubernetes_role',
|
||||||
|
'jwt': 'the_jwt',
|
||||||
|
}
|
||||||
|
with mock.patch('pathlib.Path') as path_mock:
|
||||||
|
mock.mock_open(path_mock.return_value.open, read_data='the_jwt')
|
||||||
|
res = hashivault.kubernetes_auth(**kwargs)
|
||||||
|
path_mock.assert_called_with('/var/run/secrets/kubernetes.io/serviceaccount/token')
|
||||||
|
assert res == expected_res
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_client_cert_auth_explicit_role():
|
||||||
|
kwargs = {
|
||||||
|
'client_cert_role': 'test-cert-1',
|
||||||
|
}
|
||||||
|
expected_res = {
|
||||||
|
'name': 'test-cert-1',
|
||||||
|
}
|
||||||
|
res = hashivault.client_cert_auth(**kwargs)
|
||||||
|
assert res == expected_res
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_client_cert_auth_no_role():
|
||||||
|
kwargs = {}
|
||||||
|
expected_res = {
|
||||||
|
'name': None,
|
||||||
|
}
|
||||||
|
res = hashivault.client_cert_auth(**kwargs)
|
||||||
|
assert res == expected_res
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_userpass_auth():
|
||||||
|
kwargs = {'username': 'the_username', 'password': 'the_password'}
|
||||||
|
expected_res = {'username': 'the_username', 'password': 'the_password'}
|
||||||
|
res = hashivault.userpass_auth(**kwargs)
|
||||||
|
assert res == expected_res
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_handle_auth_token():
|
||||||
|
kwargs = {
|
||||||
|
'token': 'the_token',
|
||||||
|
}
|
||||||
|
token = hashivault.handle_auth(**kwargs)
|
||||||
|
assert token == kwargs['token']
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_handle_auth_approle():
|
||||||
|
kwargs = {
|
||||||
|
'role_id': 'the_role_id',
|
||||||
|
'secret_id': 'the_secret_id',
|
||||||
|
}
|
||||||
|
with mock.patch.object(hashivault, 'method_auth') as method_mock:
|
||||||
|
method_mock.return_value = 'the_token'
|
||||||
|
token = hashivault.handle_auth(**kwargs)
|
||||||
|
method_mock.assert_called_with(**kwargs, auth_param=kwargs)
|
||||||
|
assert token == 'the_token'
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_handle_auth_kubernetes():
|
||||||
|
kwargs = {
|
||||||
|
'kubernetes_role': 'the_kubernetes_role',
|
||||||
|
}
|
||||||
|
with mock.patch.object(hashivault, 'method_auth') as method_mock:
|
||||||
|
with mock.patch('pathlib.Path') as path_mock:
|
||||||
|
mock.mock_open(path_mock.return_value.open, read_data='the_jwt')
|
||||||
|
method_mock.return_value = 'the_token'
|
||||||
|
token = hashivault.handle_auth(**kwargs)
|
||||||
|
method_mock.assert_called_with(**kwargs, auth_param={'role': 'the_kubernetes_role', 'jwt': 'the_jwt'})
|
||||||
|
assert token == 'the_token'
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_handle_auth_client_cert():
|
||||||
|
kwargs = {
|
||||||
|
'client_cert_public': "foo",
|
||||||
|
'client_cert_private': "bar",
|
||||||
|
'client_cert_role': 'test-cert-1',
|
||||||
|
}
|
||||||
|
auth_params = {
|
||||||
|
'name': 'test-cert-1',
|
||||||
|
}
|
||||||
|
with mock.patch.object(hashivault, 'method_auth') as method_mock:
|
||||||
|
method_mock.return_value = 'the_token'
|
||||||
|
token = hashivault.handle_auth(**kwargs)
|
||||||
|
method_mock.assert_called_with(**kwargs, auth_param=auth_params)
|
||||||
|
assert token == 'the_token'
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashivault_handle_auth_not_enough_args():
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
hashivault.handle_auth()
|
||||||
|
|
||||||
|
|
||||||
|
class TestDelineaImports:
|
||||||
|
"""
|
||||||
|
These module have a try-except for ImportError which will allow using the older library
|
||||||
|
but we do not want the awx_devel image to have the older library,
|
||||||
|
so these tests are designed to fail if these wind up using the fallback import
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_dsv_import(self):
|
||||||
|
from awx.main.credential_plugins.dsv import SecretsVault # noqa
|
||||||
|
|
||||||
|
# assert this module as opposed to older thycotic.secrets.vault
|
||||||
|
assert SecretsVault.__module__ == 'delinea.secrets.vault'
|
||||||
|
|
||||||
|
def test_tss_import(self):
|
||||||
|
from awx.main.credential_plugins.tss import DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret # noqa
|
||||||
|
|
||||||
|
for cls in (DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret):
|
||||||
|
# assert this module as opposed to older thycotic.secrets.server
|
||||||
|
assert cls.__module__ == 'delinea.secrets.server'
|
||||||
|
|
||||||
|
|
||||||
|
class _FakeSecretClient(SecretClient):
|
||||||
|
def get_secret(
|
||||||
|
self: '_FakeSecretClient',
|
||||||
|
name: str,
|
||||||
|
version: str | None = None,
|
||||||
|
**kwargs: str,
|
||||||
|
) -> KeyVaultSecret:
|
||||||
|
props = SecretProperties(None, None)
|
||||||
|
return KeyVaultSecret(properties=props, value='test-secret')
|
||||||
|
|
||||||
|
|
||||||
|
def test_azure_kv_invalid_env() -> None:
|
||||||
|
"""Test running outside of Azure raises error."""
|
||||||
|
error_msg = (
|
||||||
|
'You are not operating on an Azure VM, so the Managed Identity '
|
||||||
|
'feature is unavailable. Please provide the full Client ID, '
|
||||||
|
'Client Secret, and Tenant ID or run the software on an Azure VM.'
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
RuntimeError,
|
||||||
|
match=error_msg,
|
||||||
|
):
|
||||||
|
azure_kv.azure_keyvault_backend(
|
||||||
|
url='https://test.vault.azure.net',
|
||||||
|
client='',
|
||||||
|
secret='client-secret',
|
||||||
|
tenant='tenant-id',
|
||||||
|
secret_field='secret',
|
||||||
|
secret_version='',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
('client', 'secret', 'tenant'),
|
||||||
|
(
|
||||||
|
pytest.param('', '', '', id='managed-identity'),
|
||||||
|
pytest.param(
|
||||||
|
'client-id',
|
||||||
|
'client-secret',
|
||||||
|
'tenant-id',
|
||||||
|
id='client-secret-credential',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_azure_kv_valid_auth(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
client: str,
|
||||||
|
secret: str,
|
||||||
|
tenant: str,
|
||||||
|
) -> None:
|
||||||
|
"""Test successful Azure authentication via Managed Identity and credentials."""
|
||||||
|
monkeypatch.setattr(
|
||||||
|
azure_kv,
|
||||||
|
'SecretClient',
|
||||||
|
_FakeSecretClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
keyvault_secret = azure_kv.azure_keyvault_backend(
|
||||||
|
url='https://test.vault.azure.net',
|
||||||
|
client=client,
|
||||||
|
secret=secret,
|
||||||
|
tenant=tenant,
|
||||||
|
secret_field='secret',
|
||||||
|
secret_version='',
|
||||||
|
)
|
||||||
|
assert keyvault_secret == 'test-secret'
|
||||||
@ -50,13 +50,11 @@ def test_org_factory_roles(organization_factory):
|
|||||||
teams=['team1', 'team2'],
|
teams=['team1', 'team2'],
|
||||||
users=['team1:foo', 'bar'],
|
users=['team1:foo', 'bar'],
|
||||||
projects=['baz', 'bang'],
|
projects=['baz', 'bang'],
|
||||||
roles=['team2.member_role:foo', 'team1.admin_role:bar', 'team1.member_role:team2.admin_role', 'baz.admin_role:foo'],
|
roles=['team2.member_role:foo', 'team1.admin_role:bar', 'baz.admin_role:foo'],
|
||||||
)
|
)
|
||||||
|
assert objects.users.bar in objects.teams.team1.admin_role
|
||||||
assert objects.users.bar in objects.teams.team2.admin_role
|
|
||||||
assert objects.users.foo in objects.projects.baz.admin_role
|
assert objects.users.foo in objects.projects.baz.admin_role
|
||||||
assert objects.users.foo in objects.teams.team1.member_role
|
assert objects.users.foo in objects.teams.team1.member_role
|
||||||
assert objects.teams.team2.admin_role in objects.teams.team1.member_role.children.all()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
|
|||||||
45
awx/main/tests/functional/test_ha.py
Normal file
45
awx/main/tests/functional/test_ha.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
# AWX
|
||||||
|
from awx.main.ha import is_ha_environment
|
||||||
|
from awx.main.models.ha import Instance
|
||||||
|
from awx.main.dispatch.pool import get_auto_max_workers
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_multiple_instances():
|
||||||
|
for i in range(2):
|
||||||
|
Instance.objects.create(hostname=f'foo{i}', node_type='hybrid')
|
||||||
|
assert is_ha_environment()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_db_localhost():
|
||||||
|
Instance.objects.create(hostname='foo', node_type='hybrid')
|
||||||
|
Instance.objects.create(hostname='bar', node_type='execution')
|
||||||
|
assert is_ha_environment() is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'settings',
|
||||||
|
[
|
||||||
|
dict(SYSTEM_TASK_ABS_MEM='16Gi', SYSTEM_TASK_ABS_CPU='24', SYSTEM_TASK_FORKS_MEM=400, SYSTEM_TASK_FORKS_CPU=4),
|
||||||
|
dict(SYSTEM_TASK_ABS_MEM='124Gi', SYSTEM_TASK_ABS_CPU='2', SYSTEM_TASK_FORKS_MEM=None, SYSTEM_TASK_FORKS_CPU=None),
|
||||||
|
],
|
||||||
|
ids=['cpu_dominated', 'memory_dominated'],
|
||||||
|
)
|
||||||
|
def test_dispatcher_max_workers_reserve(settings, fake_redis):
|
||||||
|
"""This tests that the dispatcher max_workers matches instance capacity
|
||||||
|
|
||||||
|
Assumes capacity_adjustment is 1,
|
||||||
|
plus reserve worker count
|
||||||
|
"""
|
||||||
|
with override_settings(**settings):
|
||||||
|
i = Instance.objects.create(hostname='test-1', node_type='hybrid')
|
||||||
|
i.local_health_check()
|
||||||
|
|
||||||
|
assert get_auto_max_workers() == i.capacity + 7, (i.cpu, i.memory, i.cpu_capacity, i.mem_capacity)
|
||||||
@ -49,7 +49,6 @@ def credential_kind(source):
|
|||||||
"""Given the inventory source kind, return expected credential kind"""
|
"""Given the inventory source kind, return expected credential kind"""
|
||||||
if source == 'openshift_virtualization':
|
if source == 'openshift_virtualization':
|
||||||
return 'kubernetes_bearer_token'
|
return 'kubernetes_bearer_token'
|
||||||
|
|
||||||
return source.replace('ec2', 'aws')
|
return source.replace('ec2', 'aws')
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
56
awx/main/tests/functional/test_jt_rename_migration.py
Normal file
56
awx/main/tests/functional/test_jt_rename_migration.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from awx.main.migrations._db_constraints import _rename_duplicates
|
||||||
|
from awx.main.models import JobTemplate
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_rename_job_template_duplicates(organization, project):
|
||||||
|
ids = []
|
||||||
|
for i in range(5):
|
||||||
|
jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project)
|
||||||
|
ids.append(jt.id) # saved in order of creation
|
||||||
|
|
||||||
|
# Hack to first allow duplicate names of JT to test migration
|
||||||
|
JobTemplate.objects.filter(id__in=ids).update(org_unique=False)
|
||||||
|
|
||||||
|
# Set all JTs to the same name
|
||||||
|
JobTemplate.objects.filter(id__in=ids).update(name='same_name_for_test')
|
||||||
|
|
||||||
|
_rename_duplicates(JobTemplate)
|
||||||
|
|
||||||
|
first_jt = JobTemplate.objects.get(id=ids[0])
|
||||||
|
assert first_jt.name == 'same_name_for_test'
|
||||||
|
|
||||||
|
for i, pk in enumerate(ids):
|
||||||
|
if i == 0:
|
||||||
|
continue
|
||||||
|
jt = JobTemplate.objects.get(id=pk)
|
||||||
|
# Name should be set based on creation order
|
||||||
|
assert jt.name == f'same_name_for_test_dup{i}'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_rename_job_template_name_too_long(organization, project):
|
||||||
|
ids = []
|
||||||
|
for i in range(3):
|
||||||
|
jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project)
|
||||||
|
ids.append(jt.id) # saved in order of creation
|
||||||
|
|
||||||
|
JobTemplate.objects.filter(id__in=ids).update(org_unique=False)
|
||||||
|
|
||||||
|
chars = 512
|
||||||
|
# Set all JTs to the same reaaaaaaly long name
|
||||||
|
JobTemplate.objects.filter(id__in=ids).update(name='A' * chars)
|
||||||
|
|
||||||
|
_rename_duplicates(JobTemplate)
|
||||||
|
|
||||||
|
first_jt = JobTemplate.objects.get(id=ids[0])
|
||||||
|
assert first_jt.name == 'A' * chars
|
||||||
|
|
||||||
|
for i, pk in enumerate(ids):
|
||||||
|
if i == 0:
|
||||||
|
continue
|
||||||
|
jt = JobTemplate.objects.get(id=pk)
|
||||||
|
assert jt.name.endswith(f'dup{i}')
|
||||||
|
assert len(jt.name) <= 512
|
||||||
@ -70,15 +70,18 @@ class TestMigrationSmoke:
|
|||||||
user = User.objects.create(username='random-user')
|
user = User.objects.create(username='random-user')
|
||||||
org.read_role.members.add(user)
|
org.read_role.members.add(user)
|
||||||
org.member_role.members.add(user)
|
org.member_role.members.add(user)
|
||||||
|
|
||||||
team = Team.objects.create(name='arbitrary-team', organization=org, created=now(), modified=now())
|
team = Team.objects.create(name='arbitrary-team', organization=org, created=now(), modified=now())
|
||||||
team.member_role.members.add(user)
|
team.member_role.members.add(user)
|
||||||
|
|
||||||
new_state = migrator.apply_tested_migration(
|
new_state = migrator.apply_tested_migration(
|
||||||
('main', '0192_custom_roles'),
|
('main', '0192_custom_roles'),
|
||||||
)
|
)
|
||||||
RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment')
|
RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||||
assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists()
|
assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists()
|
||||||
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Organization Member', object_id=org.id).exists()
|
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Organization Member', object_id=org.id).exists()
|
||||||
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Team Member', object_id=team.id).exists()
|
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Team Member', object_id=team.id).exists()
|
||||||
|
|
||||||
# Regression testing for bug that comes from current vs past models mismatch
|
# Regression testing for bug that comes from current vs past models mismatch
|
||||||
RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition')
|
RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition')
|
||||||
assert not RoleDefinition.objects.filter(name='Organization Organization Admin').exists()
|
assert not RoleDefinition.objects.filter(name='Organization Organization Admin').exists()
|
||||||
@ -91,22 +94,39 @@ class TestMigrationSmoke:
|
|||||||
)
|
)
|
||||||
DABPermission = new_state.apps.get_model('dab_rbac', 'DABPermission')
|
DABPermission = new_state.apps.get_model('dab_rbac', 'DABPermission')
|
||||||
assert not DABPermission.objects.filter(codename='view_executionenvironment').exists()
|
assert not DABPermission.objects.filter(codename='view_executionenvironment').exists()
|
||||||
|
|
||||||
# Test create a Project with a duplicate name
|
# Test create a Project with a duplicate name
|
||||||
Organization = new_state.apps.get_model('main', 'Organization')
|
Organization = new_state.apps.get_model('main', 'Organization')
|
||||||
Project = new_state.apps.get_model('main', 'Project')
|
Project = new_state.apps.get_model('main', 'Project')
|
||||||
|
WorkflowJobTemplate = new_state.apps.get_model('main', 'WorkflowJobTemplate')
|
||||||
org = Organization.objects.create(name='duplicate-obj-organization', created=now(), modified=now())
|
org = Organization.objects.create(name='duplicate-obj-organization', created=now(), modified=now())
|
||||||
proj_ids = []
|
proj_ids = []
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
proj = Project.objects.create(name='duplicate-project-name', organization=org, created=now(), modified=now())
|
proj = Project.objects.create(name='duplicate-project-name', organization=org, created=now(), modified=now())
|
||||||
proj_ids.append(proj.id)
|
proj_ids.append(proj.id)
|
||||||
|
|
||||||
|
# Test create WorkflowJobTemplate with duplicate names
|
||||||
|
wfjt_ids = []
|
||||||
|
for i in range(3):
|
||||||
|
wfjt = WorkflowJobTemplate.objects.create(name='duplicate-workflow-name', organization=org, created=now(), modified=now())
|
||||||
|
wfjt_ids.append(wfjt.id)
|
||||||
|
|
||||||
# The uniqueness rules will not apply to InventorySource
|
# The uniqueness rules will not apply to InventorySource
|
||||||
Inventory = new_state.apps.get_model('main', 'Inventory')
|
Inventory = new_state.apps.get_model('main', 'Inventory')
|
||||||
InventorySource = new_state.apps.get_model('main', 'InventorySource')
|
InventorySource = new_state.apps.get_model('main', 'InventorySource')
|
||||||
inv = Inventory.objects.create(name='migration-test-inv', organization=org, created=now(), modified=now())
|
inv = Inventory.objects.create(name='migration-test-inv', organization=org, created=now(), modified=now())
|
||||||
InventorySource.objects.create(name='migration-test-src', source='file', inventory=inv, organization=org, created=now(), modified=now())
|
InventorySource.objects.create(name='migration-test-src', source='file', inventory=inv, organization=org, created=now(), modified=now())
|
||||||
|
|
||||||
|
# Apply migration 0200 which should rename duplicates
|
||||||
new_state = migrator.apply_tested_migration(
|
new_state = migrator.apply_tested_migration(
|
||||||
('main', '0200_template_name_constraint'),
|
('main', '0200_template_name_constraint'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Get the models from the new state for verification
|
||||||
|
Project = new_state.apps.get_model('main', 'Project')
|
||||||
|
WorkflowJobTemplate = new_state.apps.get_model('main', 'WorkflowJobTemplate')
|
||||||
|
InventorySource = new_state.apps.get_model('main', 'InventorySource')
|
||||||
|
|
||||||
for i, proj_id in enumerate(proj_ids):
|
for i, proj_id in enumerate(proj_ids):
|
||||||
proj = Project.objects.get(id=proj_id)
|
proj = Project.objects.get(id=proj_id)
|
||||||
if i == 0:
|
if i == 0:
|
||||||
@ -114,61 +134,37 @@ class TestMigrationSmoke:
|
|||||||
else:
|
else:
|
||||||
assert proj.name != 'duplicate-project-name'
|
assert proj.name != 'duplicate-project-name'
|
||||||
assert proj.name.startswith('duplicate-project-name')
|
assert proj.name.startswith('duplicate-project-name')
|
||||||
|
|
||||||
|
# Verify WorkflowJobTemplate duplicates are renamed
|
||||||
|
for i, wfjt_id in enumerate(wfjt_ids):
|
||||||
|
wfjt = WorkflowJobTemplate.objects.get(id=wfjt_id)
|
||||||
|
if i == 0:
|
||||||
|
assert wfjt.name == 'duplicate-workflow-name'
|
||||||
|
else:
|
||||||
|
assert wfjt.name != 'duplicate-workflow-name'
|
||||||
|
assert wfjt.name.startswith('duplicate-workflow-name')
|
||||||
|
|
||||||
# The inventory source had this field set to avoid the constrains
|
# The inventory source had this field set to avoid the constrains
|
||||||
InventorySource = new_state.apps.get_model('main', 'InventorySource')
|
|
||||||
inv_src = InventorySource.objects.get(name='migration-test-src')
|
inv_src = InventorySource.objects.get(name='migration-test-src')
|
||||||
assert inv_src.org_unique is False
|
assert inv_src.org_unique is False
|
||||||
Project = new_state.apps.get_model('main', 'Project')
|
|
||||||
for proj in Project.objects.all():
|
for proj in Project.objects.all():
|
||||||
assert proj.org_unique is True
|
assert proj.org_unique is True
|
||||||
|
|
||||||
|
# Piggyback test for the new credential types
|
||||||
|
validate_exists = ['GitHub App Installation Access Token Lookup', 'Terraform backend configuration']
|
||||||
|
CredentialType = new_state.apps.get_model('main', 'CredentialType')
|
||||||
|
# simulate an upgrade by deleting existing types with these names
|
||||||
|
for expected_name in validate_exists:
|
||||||
|
ct = CredentialType.objects.filter(name=expected_name).first()
|
||||||
|
if ct:
|
||||||
|
ct.delete()
|
||||||
|
|
||||||
@pytest.mark.django_db
|
new_state = migrator.apply_tested_migration(
|
||||||
class TestGithubAppBug:
|
('main', '0201_create_managed_creds'),
|
||||||
"""
|
|
||||||
Tests that `awx-manage createsuperuser` runs successfully after
|
|
||||||
the `github_app` CredentialType kind is updated to `github_app_lookup`
|
|
||||||
via the migration.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def test_after_github_app_kind_migration(self, migrator):
|
|
||||||
"""
|
|
||||||
Verifies that `createsuperuser` does not raise a KeyError
|
|
||||||
after the 0202_squashed_deletions migration (which includes
|
|
||||||
the `update_github_app_kind` logic) is applied.
|
|
||||||
"""
|
|
||||||
# 1. Apply migrations up to the point *before* the 0202_squashed_deletions migration.
|
|
||||||
# This simulates the state where the problematic CredentialType might exist.
|
|
||||||
# We use 0201_create_managed_creds as the direct predecessor.
|
|
||||||
old_state = migrator.apply_tested_migration(('main', '0201_create_managed_creds'))
|
|
||||||
|
|
||||||
# Get the CredentialType model from the historical state.
|
|
||||||
CredentialType = old_state.apps.get_model('main', 'CredentialType')
|
|
||||||
|
|
||||||
# Create a CredentialType with the old, problematic 'kind' value
|
|
||||||
CredentialType.objects.create(
|
|
||||||
name='Legacy GitHub App Credential',
|
|
||||||
kind='github_app', # The old, problematic 'kind' value
|
|
||||||
namespace='github_app', # The namespace that causes the KeyError in the registry lookup
|
|
||||||
managed=True,
|
|
||||||
created=timezone.now(),
|
|
||||||
modified=timezone.now(),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Apply the migration that includes the fix (0202_squashed_deletions).
|
CredentialType = new_state.apps.get_model('main', 'CredentialType')
|
||||||
new_state = migrator.apply_tested_migration(('main', '0202_squashed_deletions'))
|
for expected_name in validate_exists:
|
||||||
|
assert CredentialType.objects.filter(
|
||||||
# Verify that the CredentialType with the old 'kind' no longer exists
|
name=expected_name
|
||||||
# and the 'kind' has been updated to the new value.
|
).exists(), f'Could not find {expected_name} credential type name, all names: {list(CredentialType.objects.values_list("name", flat=True))}'
|
||||||
CredentialType = new_state.apps.get_model('main', 'CredentialType') # Get CredentialType model from the new state
|
|
||||||
|
|
||||||
# Assertion 1: The CredentialType with the old 'github_app' kind should no longer exist.
|
|
||||||
assert not CredentialType.objects.filter(
|
|
||||||
kind='github_app'
|
|
||||||
).exists(), "CredentialType with old 'github_app' kind should no longer exist after migration."
|
|
||||||
|
|
||||||
# Assertion 2: The CredentialType should now exist with the new 'github_app_lookup' kind
|
|
||||||
# and retain its original name.
|
|
||||||
assert CredentialType.objects.filter(
|
|
||||||
kind='github_app_lookup', name='Legacy GitHub App Credential'
|
|
||||||
).exists(), "CredentialType should be updated to 'github_app_lookup' and retain its name."
|
|
||||||
|
|||||||
@ -334,6 +334,69 @@ def test_team_project_list(get, team_project_list):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_project_teams_list_multiple_roles_distinct(get, organization_factory):
|
||||||
|
# test projects with multiple roles on the same team
|
||||||
|
objects = organization_factory(
|
||||||
|
'org1',
|
||||||
|
superusers=['admin'],
|
||||||
|
teams=['teamA'],
|
||||||
|
projects=['proj1'],
|
||||||
|
roles=[
|
||||||
|
'teamA.member_role:proj1.admin_role',
|
||||||
|
'teamA.member_role:proj1.use_role',
|
||||||
|
'teamA.member_role:proj1.update_role',
|
||||||
|
'teamA.member_role:proj1.read_role',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
admin = objects.superusers.admin
|
||||||
|
proj1 = objects.projects.proj1
|
||||||
|
|
||||||
|
res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data
|
||||||
|
names = [t['name'] for t in res['results']]
|
||||||
|
assert names == ['teamA']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_project_teams_list_multiple_teams(get, organization_factory):
|
||||||
|
# test projects with multiple teams
|
||||||
|
objs = organization_factory(
|
||||||
|
'org1',
|
||||||
|
superusers=['admin'],
|
||||||
|
teams=['teamA', 'teamB', 'teamC', 'teamD'],
|
||||||
|
projects=['proj1'],
|
||||||
|
roles=[
|
||||||
|
'teamA.member_role:proj1.admin_role',
|
||||||
|
'teamB.member_role:proj1.update_role',
|
||||||
|
'teamC.member_role:proj1.use_role',
|
||||||
|
'teamD.member_role:proj1.read_role',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
admin = objs.superusers.admin
|
||||||
|
proj1 = objs.projects.proj1
|
||||||
|
|
||||||
|
res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data
|
||||||
|
names = sorted([t['name'] for t in res['results']])
|
||||||
|
assert names == ['teamA', 'teamB', 'teamC', 'teamD']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_project_teams_list_no_direct_assignments(get, organization_factory):
|
||||||
|
# test projects with no direct team assignments
|
||||||
|
objects = organization_factory(
|
||||||
|
'org1',
|
||||||
|
superusers=['admin'],
|
||||||
|
teams=['teamA'],
|
||||||
|
projects=['proj1'],
|
||||||
|
roles=[],
|
||||||
|
)
|
||||||
|
admin = objects.superusers.admin
|
||||||
|
proj1 = objects.projects.proj1
|
||||||
|
|
||||||
|
res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data
|
||||||
|
assert res['count'] == 0
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("u,expected_status_code", [('rando', 403), ('org_member', 403), ('org_admin', 201), ('admin', 201)])
|
@pytest.mark.parametrize("u,expected_status_code", [('rando', 403), ('org_member', 403), ('org_admin', 201), ('admin', 201)])
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_create_project(post, organization, org_admin, org_member, admin, rando, u, expected_status_code):
|
def test_create_project(post, organization, org_admin, org_member, admin, rando, u, expected_status_code):
|
||||||
|
|||||||
96
awx/main/tests/functional/test_tasks.py
Normal file
96
awx/main/tests/functional/test_tasks.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import pytest
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from awx.main.tasks.jobs import RunJob
|
||||||
|
from awx.main.tasks.system import CleanupImagesAndFiles, execution_node_health_check
|
||||||
|
from awx.main.models import Instance, Job
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def scm_revision_file(tmpdir_factory):
|
||||||
|
# Returns path to temporary testing revision file
|
||||||
|
revision_file = tmpdir_factory.mktemp('revisions').join('revision.txt')
|
||||||
|
with open(str(revision_file), 'w') as f:
|
||||||
|
f.write('1234567890123456789012345678901234567890')
|
||||||
|
return os.path.join(revision_file.dirname, 'revision.txt')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@pytest.mark.parametrize('node_type', ('control. hybrid'))
|
||||||
|
def test_no_worker_info_on_AWX_nodes(node_type):
|
||||||
|
hostname = 'us-south-3-compute.invalid'
|
||||||
|
Instance.objects.create(hostname=hostname, node_type=node_type)
|
||||||
|
assert execution_node_health_check(hostname) is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def job_folder_factory(request):
|
||||||
|
def _rf(job_id='1234'):
|
||||||
|
pdd_path = tempfile.mkdtemp(prefix=f'awx_{job_id}_')
|
||||||
|
|
||||||
|
def test_folder_cleanup():
|
||||||
|
if os.path.exists(pdd_path):
|
||||||
|
shutil.rmtree(pdd_path)
|
||||||
|
|
||||||
|
request.addfinalizer(test_folder_cleanup)
|
||||||
|
|
||||||
|
return pdd_path
|
||||||
|
|
||||||
|
return _rf
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_job_folder(job_folder_factory):
|
||||||
|
return job_folder_factory()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_folder_cleanup_stale_file(mock_job_folder, mock_me):
|
||||||
|
CleanupImagesAndFiles.run()
|
||||||
|
assert os.path.exists(mock_job_folder) # grace period should protect folder from deletion
|
||||||
|
|
||||||
|
CleanupImagesAndFiles.run(grace_period=0)
|
||||||
|
assert not os.path.exists(mock_job_folder) # should be deleted
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_folder_cleanup_running_job(mock_job_folder, me_inst):
|
||||||
|
job = Job.objects.create(id=1234, controller_node=me_inst.hostname, status='running')
|
||||||
|
CleanupImagesAndFiles.run(grace_period=0)
|
||||||
|
assert os.path.exists(mock_job_folder) # running job should prevent folder from getting deleted
|
||||||
|
|
||||||
|
job.status = 'failed'
|
||||||
|
job.save(update_fields=['status'])
|
||||||
|
CleanupImagesAndFiles.run(grace_period=0)
|
||||||
|
assert not os.path.exists(mock_job_folder) # job is finished and no grace period, should delete
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_folder_cleanup_multiple_running_jobs(job_folder_factory, me_inst):
|
||||||
|
jobs = []
|
||||||
|
dirs = []
|
||||||
|
num_jobs = 3
|
||||||
|
|
||||||
|
for i in range(num_jobs):
|
||||||
|
job = Job.objects.create(controller_node=me_inst.hostname, status='running')
|
||||||
|
dirs.append(job_folder_factory(job.id))
|
||||||
|
jobs.append(job)
|
||||||
|
|
||||||
|
CleanupImagesAndFiles.run(grace_period=0)
|
||||||
|
|
||||||
|
assert [os.path.exists(d) for d in dirs] == [True for i in range(num_jobs)]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_does_not_run_reaped_job(mocker, mock_me):
|
||||||
|
job = Job.objects.create(status='failed', job_explanation='This job has been reaped.')
|
||||||
|
mock_run = mocker.patch('awx.main.tasks.jobs.ansible_runner.interface.run')
|
||||||
|
try:
|
||||||
|
RunJob().run(job.id)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
job.refresh_from_db()
|
||||||
|
assert job.status == 'failed'
|
||||||
|
mock_run.assert_not_called()
|
||||||
@ -3,7 +3,6 @@ import time
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import logging
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@ -14,15 +13,11 @@ from awx.api.versioning import reverse
|
|||||||
# These tests are invoked from the awx/main/tests/live/ subfolder
|
# These tests are invoked from the awx/main/tests/live/ subfolder
|
||||||
# so any fixtures from higher-up conftest files must be explicitly included
|
# so any fixtures from higher-up conftest files must be explicitly included
|
||||||
from awx.main.tests.functional.conftest import * # noqa
|
from awx.main.tests.functional.conftest import * # noqa
|
||||||
from awx.main.tests.conftest import load_all_credentials # noqa: F401; pylint: disable=unused-import
|
|
||||||
from awx.main.tests import data
|
from awx.main.tests import data
|
||||||
|
|
||||||
from awx.main.models import Project, JobTemplate, Organization, Inventory
|
from awx.main.models import Project, JobTemplate, Organization, Inventory
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
PROJ_DATA = os.path.join(os.path.dirname(data.__file__), 'projects')
|
PROJ_DATA = os.path.join(os.path.dirname(data.__file__), 'projects')
|
||||||
|
|
||||||
|
|
||||||
@ -138,29 +133,30 @@ def podman_image_generator():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def project_factory(post, default_org, admin):
|
def run_job_from_playbook(default_org, demo_inv, post, admin):
|
||||||
def _rf(scm_url=None, local_path=None):
|
def _rf(test_name, playbook, local_path=None, scm_url=None, jt_params=None):
|
||||||
proj_kwargs = {}
|
project_name = f'{test_name} project'
|
||||||
|
jt_name = f'{test_name} JT: {playbook}'
|
||||||
|
|
||||||
|
old_proj = Project.objects.filter(name=project_name).first()
|
||||||
|
if old_proj:
|
||||||
|
old_proj.delete()
|
||||||
|
|
||||||
|
old_jt = JobTemplate.objects.filter(name=jt_name).first()
|
||||||
|
if old_jt:
|
||||||
|
old_jt.delete()
|
||||||
|
|
||||||
|
proj_kwargs = {'name': project_name, 'organization': default_org.id}
|
||||||
if local_path:
|
if local_path:
|
||||||
# manual path
|
# manual path
|
||||||
project_name = f'Manual roject {local_path}'
|
|
||||||
proj_kwargs['scm_type'] = ''
|
proj_kwargs['scm_type'] = ''
|
||||||
proj_kwargs['local_path'] = local_path
|
proj_kwargs['local_path'] = local_path
|
||||||
elif scm_url:
|
elif scm_url:
|
||||||
project_name = f'Project {scm_url}'
|
|
||||||
proj_kwargs['scm_type'] = 'git'
|
proj_kwargs['scm_type'] = 'git'
|
||||||
proj_kwargs['scm_url'] = scm_url
|
proj_kwargs['scm_url'] = scm_url
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('Need to provide scm_url or local_path')
|
raise RuntimeError('Need to provide scm_url or local_path')
|
||||||
|
|
||||||
proj_kwargs['name'] = project_name
|
|
||||||
proj_kwargs['organization'] = default_org.id
|
|
||||||
|
|
||||||
old_proj = Project.objects.filter(name=project_name).first()
|
|
||||||
if old_proj:
|
|
||||||
logger.info(f'Deleting existing project {project_name}')
|
|
||||||
old_proj.delete()
|
|
||||||
|
|
||||||
result = post(
|
result = post(
|
||||||
reverse('api:project_list'),
|
reverse('api:project_list'),
|
||||||
proj_kwargs,
|
proj_kwargs,
|
||||||
@ -168,23 +164,6 @@ def project_factory(post, default_org, admin):
|
|||||||
expect=201,
|
expect=201,
|
||||||
)
|
)
|
||||||
proj = Project.objects.get(id=result.data['id'])
|
proj = Project.objects.get(id=result.data['id'])
|
||||||
return proj
|
|
||||||
|
|
||||||
return _rf
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def run_job_from_playbook(demo_inv, post, admin, project_factory):
|
|
||||||
def _rf(test_name, playbook, local_path=None, scm_url=None, jt_params=None, proj=None, wait=True):
|
|
||||||
jt_name = f'{test_name} JT: {playbook}'
|
|
||||||
|
|
||||||
if not proj:
|
|
||||||
proj = project_factory(scm_url=scm_url, local_path=local_path)
|
|
||||||
|
|
||||||
old_jt = JobTemplate.objects.filter(name=jt_name).first()
|
|
||||||
if old_jt:
|
|
||||||
logger.info(f'Deleting existing JT {jt_name}')
|
|
||||||
old_jt.delete()
|
|
||||||
|
|
||||||
if proj.current_job:
|
if proj.current_job:
|
||||||
wait_for_job(proj.current_job)
|
wait_for_job(proj.current_job)
|
||||||
@ -206,9 +185,7 @@ def run_job_from_playbook(demo_inv, post, admin, project_factory):
|
|||||||
job = jt.create_unified_job()
|
job = jt.create_unified_job()
|
||||||
job.signal_start()
|
job.signal_start()
|
||||||
|
|
||||||
if wait:
|
wait_for_job(job)
|
||||||
wait_for_job(job)
|
assert job.status == 'successful'
|
||||||
assert job.status == 'successful'
|
|
||||||
return {'job': job, 'job_template': jt, 'project': proj}
|
|
||||||
|
|
||||||
return _rf
|
return _rf
|
||||||
|
|||||||
@ -1,20 +1,14 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from awx.main.tests.live.tests.conftest import wait_for_events, wait_for_job
|
from awx.main.tests.live.tests.conftest import wait_for_events
|
||||||
|
|
||||||
from awx.main.models import Job, Inventory
|
from awx.main.models import Job, Inventory
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def facts_project(live_tmp_folder, project_factory):
|
|
||||||
return project_factory(scm_url=f'file://{live_tmp_folder}/facts')
|
|
||||||
|
|
||||||
|
|
||||||
def assert_facts_populated(name):
|
def assert_facts_populated(name):
|
||||||
job = Job.objects.filter(name__icontains=name).order_by('-created').first()
|
job = Job.objects.filter(name__icontains=name).order_by('-created').first()
|
||||||
assert job is not None
|
assert job is not None
|
||||||
wait_for_events(job)
|
wait_for_events(job)
|
||||||
wait_for_job(job)
|
|
||||||
|
|
||||||
inventory = job.inventory
|
inventory = job.inventory
|
||||||
assert inventory.hosts.count() > 0 # sanity
|
assert inventory.hosts.count() > 0 # sanity
|
||||||
@ -23,24 +17,24 @@ def assert_facts_populated(name):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def general_facts_test(facts_project, run_job_from_playbook):
|
def general_facts_test(live_tmp_folder, run_job_from_playbook):
|
||||||
def _rf(slug, jt_params):
|
def _rf(slug, jt_params):
|
||||||
jt_params['use_fact_cache'] = True
|
jt_params['use_fact_cache'] = True
|
||||||
standard_kwargs = dict(jt_params=jt_params)
|
standard_kwargs = dict(scm_url=f'file://{live_tmp_folder}/facts', jt_params=jt_params)
|
||||||
|
|
||||||
# GATHER FACTS
|
# GATHER FACTS
|
||||||
name = f'test_gather_ansible_facts_{slug}'
|
name = f'test_gather_ansible_facts_{slug}'
|
||||||
run_job_from_playbook(name, 'gather.yml', proj=facts_project, **standard_kwargs)
|
run_job_from_playbook(name, 'gather.yml', **standard_kwargs)
|
||||||
assert_facts_populated(name)
|
assert_facts_populated(name)
|
||||||
|
|
||||||
# KEEP FACTS
|
# KEEP FACTS
|
||||||
name = f'test_clear_ansible_facts_{slug}'
|
name = f'test_clear_ansible_facts_{slug}'
|
||||||
run_job_from_playbook(name, 'no_op.yml', proj=facts_project, **standard_kwargs)
|
run_job_from_playbook(name, 'no_op.yml', **standard_kwargs)
|
||||||
assert_facts_populated(name)
|
assert_facts_populated(name)
|
||||||
|
|
||||||
# CLEAR FACTS
|
# CLEAR FACTS
|
||||||
name = f'test_clear_ansible_facts_{slug}'
|
name = f'test_clear_ansible_facts_{slug}'
|
||||||
run_job_from_playbook(name, 'clear.yml', proj=facts_project, **standard_kwargs)
|
run_job_from_playbook(name, 'clear.yml', **standard_kwargs)
|
||||||
job = Job.objects.filter(name__icontains=name).order_by('-created').first()
|
job = Job.objects.filter(name__icontains=name).order_by('-created').first()
|
||||||
|
|
||||||
assert job is not None
|
assert job is not None
|
||||||
|
|||||||
@ -0,0 +1,581 @@
|
|||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch, Mock, call, DEFAULT
|
||||||
|
from io import StringIO
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
from awx.main.management.commands.import_auth_config_to_gateway import Command
|
||||||
|
from awx.main.utils.gateway_client import GatewayAPIError
|
||||||
|
|
||||||
|
|
||||||
|
class TestImportAuthConfigToGatewayCommand(TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.command = Command()
|
||||||
|
|
||||||
|
def options_basic_auth_full_send(self):
|
||||||
|
return {
|
||||||
|
'basic_auth': True,
|
||||||
|
'skip_all_authenticators': False,
|
||||||
|
'skip_oidc': False,
|
||||||
|
'skip_github': False,
|
||||||
|
'skip_ldap': False,
|
||||||
|
'skip_ad': False,
|
||||||
|
'skip_saml': False,
|
||||||
|
'skip_radius': False,
|
||||||
|
'skip_tacacs': False,
|
||||||
|
'skip_google': False,
|
||||||
|
'skip_settings': False,
|
||||||
|
'force': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def options_basic_auth_skip_all_individual(self):
|
||||||
|
return {
|
||||||
|
'basic_auth': True,
|
||||||
|
'skip_all_authenticators': False,
|
||||||
|
'skip_oidc': True,
|
||||||
|
'skip_github': True,
|
||||||
|
'skip_ldap': True,
|
||||||
|
'skip_ad': True,
|
||||||
|
'skip_saml': True,
|
||||||
|
'skip_radius': True,
|
||||||
|
'skip_tacacs': True,
|
||||||
|
'skip_google': True,
|
||||||
|
'skip_settings': True,
|
||||||
|
'force': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def options_svc_token_full_send(self):
|
||||||
|
options = self.options_basic_auth_full_send()
|
||||||
|
options['basic_auth'] = False
|
||||||
|
return options
|
||||||
|
|
||||||
|
def options_svc_token_skip_all(self):
|
||||||
|
options = self.options_basic_auth_skip_all_individual()
|
||||||
|
options['basic_auth'] = False
|
||||||
|
return options
|
||||||
|
|
||||||
|
def create_mock_migrator(
|
||||||
|
self,
|
||||||
|
mock_migrator_class,
|
||||||
|
authenticator_type="TestAuth",
|
||||||
|
created=0,
|
||||||
|
updated=0,
|
||||||
|
unchanged=0,
|
||||||
|
failed=0,
|
||||||
|
mappers_created=0,
|
||||||
|
mappers_updated=0,
|
||||||
|
mappers_failed=0,
|
||||||
|
settings_created=0,
|
||||||
|
settings_updated=0,
|
||||||
|
settings_unchanged=0,
|
||||||
|
settings_failed=0,
|
||||||
|
):
|
||||||
|
"""Helper method to create a mock migrator with specified return values."""
|
||||||
|
mock_migrator = Mock()
|
||||||
|
mock_migrator.get_authenticator_type.return_value = authenticator_type
|
||||||
|
mock_migrator.migrate.return_value = {
|
||||||
|
'created': created,
|
||||||
|
'updated': updated,
|
||||||
|
'unchanged': unchanged,
|
||||||
|
'failed': failed,
|
||||||
|
'mappers_created': mappers_created,
|
||||||
|
'mappers_updated': mappers_updated,
|
||||||
|
'mappers_failed': mappers_failed,
|
||||||
|
}
|
||||||
|
mock_migrator_class.return_value = mock_migrator
|
||||||
|
return mock_migrator
|
||||||
|
|
||||||
|
def test_add_arguments(self):
|
||||||
|
"""Test that all expected arguments are properly added to the parser."""
|
||||||
|
parser = Mock()
|
||||||
|
self.command.add_arguments(parser)
|
||||||
|
|
||||||
|
expected_calls = [
|
||||||
|
call('--basic-auth', action='store_true', help='Use HTTP Basic Authentication between Controller and Gateway'),
|
||||||
|
call(
|
||||||
|
'--skip-all-authenticators',
|
||||||
|
action='store_true',
|
||||||
|
help='Skip importing all authenticators [GitHub, OIDC, SAML, Azure AD, LDAP, RADIUS, TACACS+, Google OAuth2]',
|
||||||
|
),
|
||||||
|
call('--skip-oidc', action='store_true', help='Skip importing generic OIDC authenticators'),
|
||||||
|
call('--skip-github', action='store_true', help='Skip importing GitHub authenticator'),
|
||||||
|
call('--skip-ldap', action='store_true', help='Skip importing LDAP authenticators'),
|
||||||
|
call('--skip-ad', action='store_true', help='Skip importing Azure AD authenticator'),
|
||||||
|
call('--skip-saml', action='store_true', help='Skip importing SAML authenticator'),
|
||||||
|
call('--skip-radius', action='store_true', help='Skip importing RADIUS authenticator'),
|
||||||
|
call('--skip-tacacs', action='store_true', help='Skip importing TACACS+ authenticator'),
|
||||||
|
call('--skip-google', action='store_true', help='Skip importing Google OAuth2 authenticator'),
|
||||||
|
call('--skip-settings', action='store_true', help='Skip importing settings'),
|
||||||
|
call(
|
||||||
|
'--force',
|
||||||
|
action='store_true',
|
||||||
|
help='Force migration even if configurations already exist. Does not apply to skipped authenticators nor skipped settings.',
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
parser.add_argument.assert_has_calls(expected_calls, any_order=True)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {}, clear=True)
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_handle_missing_env_vars_basic_auth(self, mock_stdout):
|
||||||
|
"""Test that missing environment variables cause clean exit when using basic auth."""
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**self.options_basic_auth_full_send())
|
||||||
|
# Should exit with code 0 for successful early validation
|
||||||
|
assert exc_info.value.code == 0
|
||||||
|
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('Missing required environment variables:', output)
|
||||||
|
self.assertIn('GATEWAY_BASE_URL', output)
|
||||||
|
self.assertIn('GATEWAY_USER', output)
|
||||||
|
self.assertIn('GATEWAY_PASSWORD', output)
|
||||||
|
|
||||||
|
@patch.dict(
|
||||||
|
os.environ,
|
||||||
|
{'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass', 'GATEWAY_SKIP_VERIFY': 'true'},
|
||||||
|
)
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator')
|
||||||
|
@patch.multiple(
|
||||||
|
'awx.main.management.commands.import_auth_config_to_gateway',
|
||||||
|
GitHubMigrator=DEFAULT,
|
||||||
|
OIDCMigrator=DEFAULT,
|
||||||
|
SAMLMigrator=DEFAULT,
|
||||||
|
AzureADMigrator=DEFAULT,
|
||||||
|
LDAPMigrator=DEFAULT,
|
||||||
|
RADIUSMigrator=DEFAULT,
|
||||||
|
TACACSMigrator=DEFAULT,
|
||||||
|
GoogleOAuth2Migrator=DEFAULT,
|
||||||
|
)
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_handle_basic_auth_success(self, mock_stdout, mock_gateway_client, mock_settings_migrator, **mock_migrators):
|
||||||
|
"""Test successful execution with basic auth."""
|
||||||
|
# Mock gateway client context manager
|
||||||
|
mock_client_instance = Mock()
|
||||||
|
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
|
||||||
|
mock_gateway_client.return_value.__exit__.return_value = None
|
||||||
|
|
||||||
|
for mock_migrator_class in mock_migrators.values():
|
||||||
|
self.create_mock_migrator(mock_migrator_class, created=1, mappers_created=2)
|
||||||
|
|
||||||
|
self.create_mock_migrator(mock_settings_migrator, settings_created=1, settings_updated=0, settings_unchanged=2, settings_failed=0)
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**self.options_basic_auth_full_send())
|
||||||
|
# Should exit with code 0 for success
|
||||||
|
assert exc_info.value.code == 0
|
||||||
|
|
||||||
|
# Verify gateway client was created with correct parameters
|
||||||
|
mock_gateway_client.assert_called_once_with(
|
||||||
|
base_url='https://gateway.example.com', username='testuser', password='testpass', skip_verify=True, command=self.command
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify all migrators were created
|
||||||
|
for mock_migrator in mock_migrators.values():
|
||||||
|
mock_migrator.assert_called_once_with(mock_client_instance, self.command, force=False)
|
||||||
|
|
||||||
|
mock_settings_migrator.assert_called_once_with(mock_client_instance, self.command, force=False)
|
||||||
|
|
||||||
|
# Verify output contains success messages
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
|
||||||
|
self.assertIn('HTTP Basic Auth: true', output)
|
||||||
|
self.assertIn('Successfully connected to Gateway', output)
|
||||||
|
self.assertIn('Migration Summary', output)
|
||||||
|
self.assertIn('authenticators', output)
|
||||||
|
self.assertIn('mappers', output)
|
||||||
|
self.assertIn('settings', output)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_SKIP_VERIFY': 'false'}, clear=True) # Ensure verify_https=True
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.create_api_client')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClientSVCToken')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlparse')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlunparse')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_handle_service_token_success(self, mock_stdout, mock_urlunparse, mock_urlparse, mock_gateway_client_svc, mock_create_api_client):
|
||||||
|
"""Test successful execution with service token."""
|
||||||
|
# Mock resource API client
|
||||||
|
mock_resource_client = Mock()
|
||||||
|
mock_resource_client.base_url = 'https://gateway.example.com/api/v1'
|
||||||
|
mock_resource_client.jwt_user_id = 'test-user'
|
||||||
|
mock_resource_client.jwt_expiration = '2024-12-31'
|
||||||
|
mock_resource_client.verify_https = True
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_resource_client.get_service_metadata.return_value = mock_response
|
||||||
|
mock_create_api_client.return_value = mock_resource_client
|
||||||
|
|
||||||
|
# Mock URL parsing
|
||||||
|
mock_parsed = Mock()
|
||||||
|
mock_parsed.scheme = 'https'
|
||||||
|
mock_parsed.netloc = 'gateway.example.com'
|
||||||
|
mock_urlparse.return_value = mock_parsed
|
||||||
|
mock_urlunparse.return_value = 'https://gateway.example.com/'
|
||||||
|
|
||||||
|
# Mock gateway client context manager
|
||||||
|
mock_client_instance = Mock()
|
||||||
|
mock_gateway_client_svc.return_value.__enter__.return_value = mock_client_instance
|
||||||
|
mock_gateway_client_svc.return_value.__exit__.return_value = None
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with patch('sys.exit'):
|
||||||
|
self.command.handle(**self.options_svc_token_skip_all())
|
||||||
|
# Should call sys.exit(0) for success, but may not due to test setup
|
||||||
|
# Just verify the command completed without raising an exception
|
||||||
|
|
||||||
|
# Verify resource API client was created and configured
|
||||||
|
mock_create_api_client.assert_called_once()
|
||||||
|
self.assertTrue(mock_resource_client.verify_https) # Should be True when GATEWAY_SKIP_VERIFY='false'
|
||||||
|
mock_resource_client.get_service_metadata.assert_called_once()
|
||||||
|
|
||||||
|
# Verify service token client was created
|
||||||
|
mock_gateway_client_svc.assert_called_once_with(resource_api_client=mock_resource_client, command=self.command)
|
||||||
|
|
||||||
|
# Verify output contains service token messages
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('Gateway Service Token: true', output)
|
||||||
|
self.assertIn('Connection Validated: True', output)
|
||||||
|
self.assertIn('No authentication configurations found to migrate.', output)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
|
||||||
|
@patch.multiple(
|
||||||
|
'awx.main.management.commands.import_auth_config_to_gateway',
|
||||||
|
GitHubMigrator=DEFAULT,
|
||||||
|
OIDCMigrator=DEFAULT,
|
||||||
|
SAMLMigrator=DEFAULT,
|
||||||
|
AzureADMigrator=DEFAULT,
|
||||||
|
LDAPMigrator=DEFAULT,
|
||||||
|
RADIUSMigrator=DEFAULT,
|
||||||
|
TACACSMigrator=DEFAULT,
|
||||||
|
GoogleOAuth2Migrator=DEFAULT,
|
||||||
|
SettingsMigrator=DEFAULT,
|
||||||
|
)
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_skip_flags_prevent_authenticator_individual_and_settings_migration(self, mock_stdout, mock_gateway_client, **mock_migrators):
|
||||||
|
"""Test that skip flags prevent corresponding migrators from being created."""
|
||||||
|
|
||||||
|
# Mock gateway client context manager
|
||||||
|
mock_client_instance = Mock()
|
||||||
|
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
|
||||||
|
mock_gateway_client.return_value.__exit__.return_value = None
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with patch('sys.exit'):
|
||||||
|
self.command.handle(**self.options_basic_auth_skip_all_individual())
|
||||||
|
# Should call sys.exit(0) for success, but may not due to test setup
|
||||||
|
# Just verify the command completed without raising an exception
|
||||||
|
|
||||||
|
# Verify no migrators were created
|
||||||
|
for mock_migrator in mock_migrators.values():
|
||||||
|
mock_migrator.assert_not_called()
|
||||||
|
|
||||||
|
# Verify warning message about no configurations
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('No authentication configurations found to migrate.', output)
|
||||||
|
self.assertIn('Settings migration will not execute.', output)
|
||||||
|
self.assertIn('NO MIGRATIONS WILL EXECUTE.', output)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
|
||||||
|
@patch.multiple(
|
||||||
|
'awx.main.management.commands.import_auth_config_to_gateway',
|
||||||
|
GitHubMigrator=DEFAULT,
|
||||||
|
OIDCMigrator=DEFAULT,
|
||||||
|
SAMLMigrator=DEFAULT,
|
||||||
|
AzureADMigrator=DEFAULT,
|
||||||
|
LDAPMigrator=DEFAULT,
|
||||||
|
RADIUSMigrator=DEFAULT,
|
||||||
|
TACACSMigrator=DEFAULT,
|
||||||
|
GoogleOAuth2Migrator=DEFAULT,
|
||||||
|
)
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_skip_flags_prevent_authenticator_migration(self, mock_stdout, mock_gateway_client, **mock_migrators):
|
||||||
|
"""Test that skip flags prevent corresponding migrators from being created."""
|
||||||
|
|
||||||
|
# Mock gateway client context manager
|
||||||
|
mock_client_instance = Mock()
|
||||||
|
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
|
||||||
|
mock_gateway_client.return_value.__exit__.return_value = None
|
||||||
|
|
||||||
|
options = self.options_basic_auth_full_send()
|
||||||
|
options['skip_all_authenticators'] = True
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**options)
|
||||||
|
# Should exit with code 0 for success (no failures)
|
||||||
|
assert exc_info.value.code == 0
|
||||||
|
|
||||||
|
# Verify no migrators were created
|
||||||
|
for mock_migrator in mock_migrators.values():
|
||||||
|
mock_migrator.assert_not_called()
|
||||||
|
|
||||||
|
# Verify warning message about no configurations
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('No authentication configurations found to migrate.', output)
|
||||||
|
self.assertNotIn('Settings migration will not execute.', output)
|
||||||
|
self.assertNotIn('NO MIGRATIONS WILL EXECUTE.', output)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_handle_gateway_api_error(self, mock_stdout, mock_gateway_client):
|
||||||
|
"""Test handling of GatewayAPIError exceptions."""
|
||||||
|
# Mock gateway client to raise GatewayAPIError
|
||||||
|
mock_gateway_client.side_effect = GatewayAPIError('Test error message', status_code=400, response_data={'error': 'Bad request'})
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**self.options_basic_auth_full_send())
|
||||||
|
# Should exit with code 1 for errors
|
||||||
|
assert exc_info.value.code == 1
|
||||||
|
|
||||||
|
# Verify error message output
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('Gateway API Error: Test error message', output)
|
||||||
|
self.assertIn('Status Code: 400', output)
|
||||||
|
self.assertIn("Response: {'error': 'Bad request'}", output)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_handle_unexpected_error(self, mock_stdout, mock_gateway_client):
|
||||||
|
"""Test handling of unexpected exceptions."""
|
||||||
|
# Mock gateway client to raise unexpected error
|
||||||
|
mock_gateway_client.side_effect = ValueError('Unexpected error')
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**self.options_basic_auth_full_send())
|
||||||
|
# Should exit with code 1 for errors
|
||||||
|
assert exc_info.value.code == 1
|
||||||
|
|
||||||
|
# Verify error message output
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('Unexpected error during migration: Unexpected error', output)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GitHubMigrator')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_force_flag_passed_to_migrators(self, mock_stdout, mock_github, mock_settings_migrator, mock_gateway_client):
|
||||||
|
"""Test that force flag is properly passed to migrators."""
|
||||||
|
# Mock gateway client context manager
|
||||||
|
mock_client_instance = Mock()
|
||||||
|
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
|
||||||
|
mock_gateway_client.return_value.__exit__.return_value = None
|
||||||
|
|
||||||
|
# Mock migrator
|
||||||
|
self.create_mock_migrator(mock_github, authenticator_type="GitHub", created=0, mappers_created=2)
|
||||||
|
self.create_mock_migrator(
|
||||||
|
mock_settings_migrator, authenticator_type="Settings", settings_created=0, settings_updated=2, settings_unchanged=0, settings_failed=0
|
||||||
|
)
|
||||||
|
|
||||||
|
options = self.options_basic_auth_skip_all_individual()
|
||||||
|
options['force'] = True
|
||||||
|
options['skip_github'] = False
|
||||||
|
options['skip_settings'] = False
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**options)
|
||||||
|
# Should exit with code 0 for success
|
||||||
|
assert exc_info.value.code == 0
|
||||||
|
|
||||||
|
# Verify migrator was created with force=True
|
||||||
|
mock_github.assert_called_once_with(mock_client_instance, self.command, force=True)
|
||||||
|
|
||||||
|
# Verify settings migrator was created with force=True
|
||||||
|
mock_settings_migrator.assert_called_once_with(mock_client_instance, self.command, force=True)
|
||||||
|
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_print_export_summary(self, mock_stdout):
|
||||||
|
"""Test the _print_export_summary method."""
|
||||||
|
result = {
|
||||||
|
'created': 2,
|
||||||
|
'updated': 1,
|
||||||
|
'unchanged': 3,
|
||||||
|
'failed': 0,
|
||||||
|
'mappers_created': 5,
|
||||||
|
'mappers_updated': 2,
|
||||||
|
'mappers_failed': 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
self.command._print_export_summary('SAML', result)
|
||||||
|
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('--- SAML Export Summary ---', output)
|
||||||
|
self.assertIn('Authenticators created: 2', output)
|
||||||
|
self.assertIn('Authenticators updated: 1', output)
|
||||||
|
self.assertIn('Authenticators unchanged: 3', output)
|
||||||
|
self.assertIn('Authenticators failed: 0', output)
|
||||||
|
self.assertIn('Mappers created: 5', output)
|
||||||
|
self.assertIn('Mappers updated: 2', output)
|
||||||
|
self.assertIn('Mappers failed: 1', output)
|
||||||
|
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_print_export_summary_settings(self, mock_stdout):
|
||||||
|
"""Test the _print_export_summary method."""
|
||||||
|
result = {
|
||||||
|
'settings_created': 2,
|
||||||
|
'settings_updated': 1,
|
||||||
|
'settings_unchanged': 3,
|
||||||
|
'settings_failed': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
self.command._print_export_summary('Settings', result)
|
||||||
|
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('--- Settings Export Summary ---', output)
|
||||||
|
self.assertIn('Settings created: 2', output)
|
||||||
|
self.assertIn('Settings updated: 1', output)
|
||||||
|
self.assertIn('Settings unchanged: 3', output)
|
||||||
|
self.assertIn('Settings failed: 0', output)
|
||||||
|
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_print_export_summary_missing_keys(self, mock_stdout):
|
||||||
|
"""Test _print_export_summary handles missing keys gracefully."""
|
||||||
|
result = {
|
||||||
|
'created': 1,
|
||||||
|
'updated': 2,
|
||||||
|
# Missing other keys
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
self.command._print_export_summary('LDAP', result)
|
||||||
|
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('--- LDAP Export Summary ---', output)
|
||||||
|
self.assertIn('Authenticators created: 1', output)
|
||||||
|
self.assertIn('Authenticators updated: 2', output)
|
||||||
|
self.assertIn('Authenticators unchanged: 0', output) # Default value
|
||||||
|
self.assertIn('Mappers created: 0', output) # Default value
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.GitHubMigrator')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.OIDCMigrator')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_total_results_accumulation(self, mock_stdout, mock_oidc, mock_github, mock_gateway_client):
|
||||||
|
"""Test that results from multiple migrators are properly accumulated."""
|
||||||
|
# Mock gateway client context manager
|
||||||
|
mock_client_instance = Mock()
|
||||||
|
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
|
||||||
|
mock_gateway_client.return_value.__exit__.return_value = None
|
||||||
|
|
||||||
|
# Mock migrators with different results
|
||||||
|
self.create_mock_migrator(mock_github, authenticator_type="GitHub", created=1, mappers_created=2)
|
||||||
|
self.create_mock_migrator(mock_oidc, authenticator_type="OIDC", created=0, updated=1, unchanged=1, mappers_created=1, mappers_updated=1)
|
||||||
|
|
||||||
|
options = self.options_basic_auth_skip_all_individual()
|
||||||
|
options['skip_oidc'] = False
|
||||||
|
options['skip_github'] = False
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**options)
|
||||||
|
# Should exit with code 0 for success
|
||||||
|
assert exc_info.value.code == 0
|
||||||
|
|
||||||
|
# Verify total results are accumulated correctly
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn('Total authenticators created: 1', output) # 1 + 0
|
||||||
|
self.assertIn('Total authenticators updated: 1', output) # 0 + 1
|
||||||
|
self.assertIn('Total authenticators unchanged: 1', output) # 0 + 1
|
||||||
|
self.assertIn('Total authenticators failed: 0', output) # 0 + 0
|
||||||
|
self.assertIn('Total mappers created: 3', output) # 2 + 1
|
||||||
|
self.assertIn('Total mappers updated: 1', output) # 0 + 1
|
||||||
|
self.assertIn('Total mappers failed: 0', output) # 0 + 0
|
||||||
|
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_environment_variable_parsing(self, mock_stdout):
|
||||||
|
"""Test that environment variables are parsed correctly."""
|
||||||
|
test_cases = [
|
||||||
|
('true', True),
|
||||||
|
('1', True),
|
||||||
|
('yes', True),
|
||||||
|
('on', True),
|
||||||
|
('TRUE', True),
|
||||||
|
('false', False),
|
||||||
|
('0', False),
|
||||||
|
('no', False),
|
||||||
|
('off', False),
|
||||||
|
('', False),
|
||||||
|
('random', False),
|
||||||
|
]
|
||||||
|
|
||||||
|
for env_value, expected in test_cases:
|
||||||
|
with patch.dict(
|
||||||
|
os.environ,
|
||||||
|
{
|
||||||
|
'GATEWAY_BASE_URL': 'https://gateway.example.com',
|
||||||
|
'GATEWAY_USER': 'testuser',
|
||||||
|
'GATEWAY_PASSWORD': 'testpass',
|
||||||
|
'GATEWAY_SKIP_VERIFY': env_value,
|
||||||
|
},
|
||||||
|
):
|
||||||
|
with patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') as mock_gateway_client:
|
||||||
|
# Mock gateway client context manager
|
||||||
|
mock_client_instance = Mock()
|
||||||
|
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
|
||||||
|
mock_gateway_client.return_value.__exit__.return_value = None
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with patch('sys.exit'):
|
||||||
|
self.command.handle(**self.options_basic_auth_skip_all_individual())
|
||||||
|
|
||||||
|
# Verify gateway client was called with correct skip_verify value
|
||||||
|
mock_gateway_client.assert_called_once_with(
|
||||||
|
base_url='https://gateway.example.com', username='testuser', password='testpass', skip_verify=expected, command=self.command
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reset for next iteration
|
||||||
|
mock_gateway_client.reset_mock()
|
||||||
|
mock_stdout.seek(0)
|
||||||
|
mock_stdout.truncate(0)
|
||||||
|
|
||||||
|
@patch.dict(os.environ, {'GATEWAY_SKIP_VERIFY': 'false'})
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.create_api_client')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlparse')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlunparse')
|
||||||
|
@patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator')
|
||||||
|
@patch('sys.stdout', new_callable=StringIO)
|
||||||
|
def test_service_token_connection_validation_failure(self, mock_stdout, mock_settings_migrator, mock_urlunparse, mock_urlparse, mock_create_api_client):
|
||||||
|
"""Test that non-200 response from get_service_metadata causes error exit."""
|
||||||
|
# Mock resource API client with failing response
|
||||||
|
mock_resource_client = Mock()
|
||||||
|
mock_resource_client.base_url = 'https://gateway.example.com/api/v1'
|
||||||
|
mock_resource_client.jwt_user_id = 'test-user'
|
||||||
|
mock_resource_client.jwt_expiration = '2024-12-31'
|
||||||
|
mock_resource_client.verify_https = True
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.status_code = 401 # Simulate unauthenticated error
|
||||||
|
mock_resource_client.get_service_metadata.return_value = mock_response
|
||||||
|
mock_create_api_client.return_value = mock_resource_client
|
||||||
|
|
||||||
|
# Mock URL parsing (needed for the service token flow)
|
||||||
|
mock_parsed = Mock()
|
||||||
|
mock_parsed.scheme = 'https'
|
||||||
|
mock_parsed.netloc = 'gateway.example.com'
|
||||||
|
mock_urlparse.return_value = mock_parsed
|
||||||
|
mock_urlunparse.return_value = 'https://gateway.example.com/'
|
||||||
|
|
||||||
|
with patch.object(self.command, 'stdout', mock_stdout):
|
||||||
|
with pytest.raises(SystemExit) as exc_info:
|
||||||
|
self.command.handle(**self.options_svc_token_skip_all())
|
||||||
|
# Should exit with code 1 for connection failure
|
||||||
|
assert exc_info.value.code == 1
|
||||||
|
|
||||||
|
# Verify error message is displayed
|
||||||
|
output = mock_stdout.getvalue()
|
||||||
|
self.assertIn(
|
||||||
|
'Gateway Service Token is unable to connect to Gateway via the base URL https://gateway.example.com/. Recieved HTTP response code 401', output
|
||||||
|
)
|
||||||
|
self.assertIn('Connection Validated: False', output)
|
||||||
@ -125,9 +125,6 @@ def test_finish_job_fact_cache_clear(hosts, mocker, ref_time, tmpdir):
|
|||||||
for host in (hosts[0], hosts[2], hosts[3]):
|
for host in (hosts[0], hosts[2], hosts[3]):
|
||||||
assert host.ansible_facts == {"a": 1, "b": 2}
|
assert host.ansible_facts == {"a": 1, "b": 2}
|
||||||
assert host.ansible_facts_modified == ref_time
|
assert host.ansible_facts_modified == ref_time
|
||||||
|
|
||||||
# Verify facts were cleared for host with deleted cache file
|
|
||||||
assert hosts[1].ansible_facts == {}
|
|
||||||
assert hosts[1].ansible_facts_modified > ref_time
|
assert hosts[1].ansible_facts_modified > ref_time
|
||||||
|
|
||||||
# Current implementation skips the call entirely if hosts_to_update == []
|
# Current implementation skips the call entirely if hosts_to_update == []
|
||||||
|
|||||||
@ -871,6 +871,314 @@ class TestJobCredentials(TestJobExecution):
|
|||||||
assert f.read() == self.EXAMPLE_PRIVATE_KEY
|
assert f.read() == self.EXAMPLE_PRIVATE_KEY
|
||||||
assert safe_env['ANSIBLE_NET_PASSWORD'] == HIDDEN_PASSWORD
|
assert safe_env['ANSIBLE_NET_PASSWORD'] == HIDDEN_PASSWORD
|
||||||
|
|
||||||
|
def test_terraform_cloud_credentials(self, job, private_data_dir, mock_me):
|
||||||
|
terraform = CredentialType.defaults['terraform']()
|
||||||
|
hcl_config = '''
|
||||||
|
backend "s3" {
|
||||||
|
bucket = "s3_sample_bucket"
|
||||||
|
key = "/tf_state/"
|
||||||
|
region = "us-east-1"
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
credential = Credential(pk=1, credential_type=terraform, inputs={'configuration': hcl_config})
|
||||||
|
credential.inputs['configuration'] = encrypt_field(credential, 'configuration')
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
safe_env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
|
||||||
|
|
||||||
|
local_path = to_host_path(env['TF_BACKEND_CONFIG_FILE'], private_data_dir)
|
||||||
|
config = open(local_path, 'r').read()
|
||||||
|
assert config == hcl_config
|
||||||
|
|
||||||
|
def test_terraform_gcs_backend_credentials(self, job, private_data_dir, mock_me):
|
||||||
|
terraform = CredentialType.defaults['terraform']()
|
||||||
|
hcl_config = '''
|
||||||
|
backend "gcs" {
|
||||||
|
bucket = "gce_storage"
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
gce_backend_credentials = '''
|
||||||
|
{
|
||||||
|
"type": "service_account",
|
||||||
|
"project_id": "sample",
|
||||||
|
"private_key_id": "eeeeeeeeeeeeeeeeeeeeeeeeeee",
|
||||||
|
"private_key": "-----BEGIN PRIVATE KEY-----\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n-----END PRIVATE KEY-----\n",
|
||||||
|
"client_email": "sample@sample.iam.gserviceaccount.com",
|
||||||
|
"client_id": "0123456789",
|
||||||
|
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||||
|
"token_uri": "https://oauth2.googleapis.com/token",
|
||||||
|
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||||
|
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/cloud-content-robot%40sample.iam.gserviceaccount.com",
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
credential = Credential(pk=1, credential_type=terraform, inputs={'configuration': hcl_config, 'gce_credentials': gce_backend_credentials})
|
||||||
|
credential.inputs['configuration'] = encrypt_field(credential, 'configuration')
|
||||||
|
credential.inputs['gce_credentials'] = encrypt_field(credential, 'gce_credentials')
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
safe_env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
|
||||||
|
|
||||||
|
local_path = to_host_path(env['TF_BACKEND_CONFIG_FILE'], private_data_dir)
|
||||||
|
config = open(local_path, 'r').read()
|
||||||
|
assert config == hcl_config
|
||||||
|
|
||||||
|
credentials_path = to_host_path(env['GOOGLE_BACKEND_CREDENTIALS'], private_data_dir)
|
||||||
|
credentials = open(credentials_path, 'r').read()
|
||||||
|
assert credentials == gce_backend_credentials
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir, mock_me):
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||||
|
injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
|
||||||
|
|
||||||
|
with pytest.raises(jinja2.exceptions.UndefinedError):
|
||||||
|
credential.credential_type.inject_credential(credential, {}, {}, [], private_data_dir)
|
||||||
|
|
||||||
|
def test_custom_environment_injectors(self, private_data_dir, mock_me):
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||||
|
injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||||
|
|
||||||
|
assert env['MY_CLOUD_API_TOKEN'] == 'ABC123'
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir, mock_me):
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
|
||||||
|
injectors={'env': {'TURBO_BUTTON': '{{turbo_button}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||||
|
|
||||||
|
assert env['TURBO_BUTTON'] == str(True)
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job, mock_me):
|
||||||
|
task = jobs.RunJob()
|
||||||
|
task.instance = job
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||||
|
injectors={'env': {'JOB_ID': 'reserved'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
env = task.build_env(job, private_data_dir)
|
||||||
|
|
||||||
|
assert env['JOB_ID'] == str(job.pk)
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_secret_field(self, private_data_dir, mock_me):
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
|
||||||
|
injectors={'env': {'MY_CLOUD_PRIVATE_VAR': '{{password}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'})
|
||||||
|
credential.inputs['password'] = encrypt_field(credential, 'password')
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
safe_env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
|
||||||
|
|
||||||
|
assert env['MY_CLOUD_PRIVATE_VAR'] == 'SUPER-SECRET-123'
|
||||||
|
assert 'SUPER-SECRET-123' not in safe_env.values()
|
||||||
|
assert safe_env['MY_CLOUD_PRIVATE_VAR'] == HIDDEN_PASSWORD
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job, mock_me):
|
||||||
|
task = jobs.RunJob()
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||||
|
injectors={'extra_vars': {'api_token': '{{api_token}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
args = task.build_args(job, private_data_dir, {})
|
||||||
|
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
|
||||||
|
extra_vars = parse_extra_vars(args, private_data_dir)
|
||||||
|
|
||||||
|
assert extra_vars["api_token"] == "ABC123"
|
||||||
|
assert hasattr(extra_vars["api_token"], '__UNSAFE__')
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir, mock_me):
|
||||||
|
task = jobs.RunJob()
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
|
||||||
|
injectors={'extra_vars': {'turbo_button': '{{turbo_button}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
args = task.build_args(job, private_data_dir, {})
|
||||||
|
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
|
||||||
|
extra_vars = parse_extra_vars(args, private_data_dir)
|
||||||
|
|
||||||
|
assert extra_vars["turbo_button"] == "True"
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_nested_extra_vars(self, private_data_dir, job, mock_me):
|
||||||
|
task = jobs.RunJob()
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'host', 'label': 'Host', 'type': 'string'}]},
|
||||||
|
injectors={'extra_vars': {'auth': {'host': '{{host}}'}}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'host': 'example.com'})
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
args = task.build_args(job, private_data_dir, {})
|
||||||
|
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
|
||||||
|
extra_vars = parse_extra_vars(args, private_data_dir)
|
||||||
|
|
||||||
|
assert extra_vars["auth"]["host"] == "example.com"
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_templated_extra_vars_key(self, private_data_dir, job, mock_me):
|
||||||
|
task = jobs.RunJob()
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'environment', 'label': 'Environment', 'type': 'string'}, {'id': 'host', 'label': 'Host', 'type': 'string'}]},
|
||||||
|
injectors={'extra_vars': {'{{environment}}_auth': {'host': '{{host}}'}}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'environment': 'test', 'host': 'example.com'})
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
args = task.build_args(job, private_data_dir, {})
|
||||||
|
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
|
||||||
|
extra_vars = parse_extra_vars(args, private_data_dir)
|
||||||
|
|
||||||
|
assert extra_vars["test_auth"]["host"] == "example.com"
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir, mock_me):
|
||||||
|
task = jobs.RunJob()
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
|
||||||
|
injectors={'extra_vars': {'turbo_button': '{% if turbo_button %}FAST!{% else %}SLOW!{% endif %}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
args = task.build_args(job, private_data_dir, {})
|
||||||
|
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
|
||||||
|
extra_vars = parse_extra_vars(args, private_data_dir)
|
||||||
|
|
||||||
|
assert extra_vars["turbo_button"] == "FAST!"
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir, mock_me):
|
||||||
|
"""
|
||||||
|
extra_vars that contain secret field values should be censored in the DB
|
||||||
|
"""
|
||||||
|
task = jobs.RunJob()
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
|
||||||
|
injectors={'extra_vars': {'password': '{{password}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'})
|
||||||
|
credential.inputs['password'] = encrypt_field(credential, 'password')
|
||||||
|
job.credentials.add(credential)
|
||||||
|
|
||||||
|
args = task.build_args(job, private_data_dir, {})
|
||||||
|
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
|
||||||
|
|
||||||
|
extra_vars = parse_extra_vars(args, private_data_dir)
|
||||||
|
assert extra_vars["password"] == "SUPER-SECRET-123"
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_file(self, private_data_dir, mock_me):
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||||
|
injectors={'file': {'template': '[mycloud]\n{{api_token}}'}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||||
|
|
||||||
|
path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir)
|
||||||
|
assert open(path, 'r').read() == '[mycloud]\nABC123'
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_unicode_content(self, private_data_dir, mock_me):
|
||||||
|
value = 'Iñtërnâtiônàlizætiøn'
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': []},
|
||||||
|
injectors={'file': {'template': value}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
|
||||||
|
)
|
||||||
|
credential = Credential(
|
||||||
|
pk=1,
|
||||||
|
credential_type=some_cloud,
|
||||||
|
)
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||||
|
|
||||||
|
path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir)
|
||||||
|
assert open(path, 'r').read() == value
|
||||||
|
|
||||||
|
def test_custom_environment_injectors_with_files(self, private_data_dir, mock_me):
|
||||||
|
some_cloud = CredentialType(
|
||||||
|
kind='cloud',
|
||||||
|
name='SomeCloud',
|
||||||
|
managed=False,
|
||||||
|
inputs={'fields': [{'id': 'cert', 'label': 'Certificate', 'type': 'string'}, {'id': 'key', 'label': 'Key', 'type': 'string'}]},
|
||||||
|
injectors={
|
||||||
|
'file': {'template.cert': '[mycert]\n{{cert}}', 'template.key': '[mykey]\n{{key}}'},
|
||||||
|
'env': {'MY_CERT_INI_FILE': '{{tower.filename.cert}}', 'MY_KEY_INI_FILE': '{{tower.filename.key}}'},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
credential = Credential(pk=1, credential_type=some_cloud, inputs={'cert': 'CERT123', 'key': 'KEY123'})
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||||
|
|
||||||
|
cert_path = to_host_path(env['MY_CERT_INI_FILE'], private_data_dir)
|
||||||
|
key_path = to_host_path(env['MY_KEY_INI_FILE'], private_data_dir)
|
||||||
|
assert open(cert_path, 'r').read() == '[mycert]\nCERT123'
|
||||||
|
assert open(key_path, 'r').read() == '[mykey]\nKEY123'
|
||||||
|
|
||||||
def test_multi_cloud(self, private_data_dir, mock_me):
|
def test_multi_cloud(self, private_data_dir, mock_me):
|
||||||
gce = CredentialType.defaults['gce']()
|
gce = CredentialType.defaults['gce']()
|
||||||
gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})
|
gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})
|
||||||
|
|||||||
1137
awx/main/tests/unit/utils/test_auth_migration.py
Normal file
1137
awx/main/tests/unit/utils/test_auth_migration.py
Normal file
File diff suppressed because it is too large
Load Diff
1243
awx/main/tests/unit/utils/test_base_migrator.py
Normal file
1243
awx/main/tests/unit/utils/test_base_migrator.py
Normal file
File diff suppressed because it is too large
Load Diff
124
awx/main/tests/unit/utils/test_github_migrator.py
Normal file
124
awx/main/tests/unit/utils/test_github_migrator.py
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for GitHub authenticator migrator functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import Mock, patch
|
||||||
|
from awx.sso.utils.github_migrator import GitHubMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class TestGitHubMigrator:
|
||||||
|
"""Tests for GitHubMigrator class."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Set up test fixtures."""
|
||||||
|
self.gateway_client = Mock()
|
||||||
|
self.command = Mock()
|
||||||
|
self.migrator = GitHubMigrator(self.gateway_client, self.command)
|
||||||
|
|
||||||
|
def test_create_gateway_authenticator_returns_boolean_causes_crash(self):
|
||||||
|
"""
|
||||||
|
Test that verifies create_gateway_authenticator returns proper dictionary
|
||||||
|
structure instead of boolean when credentials are missing.
|
||||||
|
|
||||||
|
This test verifies the fix for the bug.
|
||||||
|
"""
|
||||||
|
# Mock the get_controller_config to return a GitHub config with missing credentials
|
||||||
|
github_config_missing_creds = {
|
||||||
|
'category': 'github',
|
||||||
|
'settings': {'SOCIAL_AUTH_GITHUB_KEY': '', 'SOCIAL_AUTH_GITHUB_SECRET': 'test-secret'}, # Missing key
|
||||||
|
'org_mappers': [],
|
||||||
|
'team_mappers': [],
|
||||||
|
'login_redirect_override': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(self.migrator, 'get_controller_config', return_value=[github_config_missing_creds]):
|
||||||
|
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
|
||||||
|
# This should NOT crash now that the bug is fixed
|
||||||
|
result = self.migrator.migrate()
|
||||||
|
|
||||||
|
# Verify the migration ran successfully without crashing
|
||||||
|
assert 'created' in result
|
||||||
|
assert 'failed' in result
|
||||||
|
# Should have failed=1 since the config has success=False (missing credentials)
|
||||||
|
assert result['failed'] == 1
|
||||||
|
|
||||||
|
def test_create_gateway_authenticator_returns_boolean_with_unknown_category(self):
|
||||||
|
"""
|
||||||
|
Test that verifies create_gateway_authenticator returns proper dictionary
|
||||||
|
structure instead of boolean when category is unknown.
|
||||||
|
|
||||||
|
This test verifies the fix for the bug.
|
||||||
|
"""
|
||||||
|
# Mock the get_controller_config to return a GitHub config with unknown category
|
||||||
|
github_config_unknown_category = {
|
||||||
|
'category': 'unknown-category',
|
||||||
|
'settings': {'SOCIAL_AUTH_UNKNOWN_KEY': 'test-key', 'SOCIAL_AUTH_UNKNOWN_SECRET': 'test-secret'},
|
||||||
|
'org_mappers': [],
|
||||||
|
'team_mappers': [],
|
||||||
|
'login_redirect_override': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(self.migrator, 'get_controller_config', return_value=[github_config_unknown_category]):
|
||||||
|
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
|
||||||
|
# This should NOT crash now that the bug is fixed
|
||||||
|
result = self.migrator.migrate()
|
||||||
|
|
||||||
|
# Verify the migration ran successfully without crashing
|
||||||
|
assert 'created' in result
|
||||||
|
assert 'failed' in result
|
||||||
|
# Should have failed=1 since the config has success=False (unknown category)
|
||||||
|
assert result['failed'] == 1
|
||||||
|
|
||||||
|
def test_create_gateway_authenticator_direct_boolean_return_missing_creds(self):
|
||||||
|
"""
|
||||||
|
Test that directly calls create_gateway_authenticator and verifies it returns
|
||||||
|
proper dictionary structure instead of boolean for missing credentials.
|
||||||
|
"""
|
||||||
|
# Config with missing key (empty string)
|
||||||
|
config_missing_key = {
|
||||||
|
'category': 'github',
|
||||||
|
'settings': {'SOCIAL_AUTH_GITHUB_KEY': '', 'SOCIAL_AUTH_GITHUB_SECRET': 'test-secret'}, # Missing key
|
||||||
|
'org_mappers': [],
|
||||||
|
'team_mappers': [],
|
||||||
|
'login_redirect_override': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
|
||||||
|
result = self.migrator.create_gateway_authenticator(config_missing_key)
|
||||||
|
|
||||||
|
# Now the method should return a proper dictionary structure
|
||||||
|
assert isinstance(result, dict), f"Expected dict, got {type(result)} with value: {result}"
|
||||||
|
assert 'success' in result, f"Expected 'success' key in result: {result}"
|
||||||
|
assert 'action' in result, f"Expected 'action' key in result: {result}"
|
||||||
|
assert 'error' in result, f"Expected 'error' key in result: {result}"
|
||||||
|
# Verify the expected values
|
||||||
|
assert result['success'] is False
|
||||||
|
assert result['action'] == 'skipped'
|
||||||
|
assert 'Missing OAuth2 credentials' in result['error']
|
||||||
|
|
||||||
|
def test_create_gateway_authenticator_direct_boolean_return_unknown_category(self):
|
||||||
|
"""
|
||||||
|
Test that directly calls create_gateway_authenticator and verifies it returns
|
||||||
|
proper dictionary structure instead of boolean for unknown category.
|
||||||
|
"""
|
||||||
|
# Config with unknown category
|
||||||
|
config_unknown_category = {
|
||||||
|
'category': 'unknown-category',
|
||||||
|
'settings': {'SOCIAL_AUTH_UNKNOWN_KEY': 'test-key', 'SOCIAL_AUTH_UNKNOWN_SECRET': 'test-secret'},
|
||||||
|
'org_mappers': [],
|
||||||
|
'team_mappers': [],
|
||||||
|
'login_redirect_override': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
|
||||||
|
result = self.migrator.create_gateway_authenticator(config_unknown_category)
|
||||||
|
|
||||||
|
# Now the method should return a proper dictionary structure
|
||||||
|
assert isinstance(result, dict), f"Expected dict, got {type(result)} with value: {result}"
|
||||||
|
assert 'success' in result, f"Expected 'success' key in result: {result}"
|
||||||
|
assert 'action' in result, f"Expected 'action' key in result: {result}"
|
||||||
|
assert 'error' in result, f"Expected 'error' key in result: {result}"
|
||||||
|
# Verify the expected values
|
||||||
|
assert result['success'] is False
|
||||||
|
assert result['action'] == 'skipped'
|
||||||
|
assert 'Unknown category unknown-category' in result['error']
|
||||||
1024
awx/main/tests/unit/utils/test_ldap_migrator.py
Normal file
1024
awx/main/tests/unit/utils/test_ldap_migrator.py
Normal file
File diff suppressed because it is too large
Load Diff
614
awx/main/tests/unit/utils/test_role_mapping.py
Normal file
614
awx/main/tests/unit/utils/test_role_mapping.py
Normal file
@ -0,0 +1,614 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for role mapping utilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from awx.main.utils.gateway_mapping import role_map_to_gateway_format
|
||||||
|
from awx.sso.utils.ldap_migrator import LDAPMigrator
|
||||||
|
|
||||||
|
|
||||||
|
def get_role_mappers(role_map, start_order=1):
|
||||||
|
"""Helper function to get just the mappers from role_map_to_gateway_format."""
|
||||||
|
result, _ = role_map_to_gateway_format(role_map, start_order)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=1):
|
||||||
|
"""Helper function to test LDAP group allow mapping via LDAPMigrator."""
|
||||||
|
migrator = LDAPMigrator()
|
||||||
|
return migrator._ldap_group_allow_to_gateway_format(result, ldap_group, deny, start_order)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRoleMapToGatewayFormat:
|
||||||
|
"""Tests for role_map_to_gateway_format function."""
|
||||||
|
|
||||||
|
def test_none_input(self):
|
||||||
|
"""Test that None input returns empty list."""
|
||||||
|
result, next_order = role_map_to_gateway_format(None)
|
||||||
|
assert result == []
|
||||||
|
assert next_order == 1 # Default start_order
|
||||||
|
|
||||||
|
def test_empty_dict(self):
|
||||||
|
"""Test that empty dict returns empty list."""
|
||||||
|
result, next_order = role_map_to_gateway_format({})
|
||||||
|
assert result == []
|
||||||
|
assert next_order == 1
|
||||||
|
|
||||||
|
def test_is_superuser_single_group(self):
|
||||||
|
"""Test is_superuser with single group."""
|
||||||
|
role_map = {"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
"name": "is_superuser - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": "is_superuser",
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": 1,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
def test_is_superuser_multiple_groups(self):
|
||||||
|
"""Test is_superuser with multiple groups."""
|
||||||
|
role_map = {"is_superuser": ["cn=super_users,dc=example,dc=com", "cn=admins,dc=example,dc=com"]}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
"name": "is_superuser - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": "is_superuser",
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": ["cn=super_users,dc=example,dc=com", "cn=admins,dc=example,dc=com"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": 1,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
def test_is_system_auditor_single_group(self):
|
||||||
|
"""Test is_system_auditor with single group."""
|
||||||
|
role_map = {"is_system_auditor": "cn=auditors,dc=example,dc=com"}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
"name": "is_system_auditor - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": "role",
|
||||||
|
"role": "Platform Auditor",
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": ["cn=auditors,dc=example,dc=com"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": 1,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
def test_is_system_auditor_multiple_groups(self):
|
||||||
|
"""Test is_system_auditor with multiple groups."""
|
||||||
|
role_map = {"is_system_auditor": ["cn=auditors,dc=example,dc=com", "cn=viewers,dc=example,dc=com"]}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
"name": "is_system_auditor - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": "role",
|
||||||
|
"role": "Platform Auditor",
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": ["cn=auditors,dc=example,dc=com", "cn=viewers,dc=example,dc=com"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": 1,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
def test_multiple_roles(self):
|
||||||
|
"""Test multiple role mappings."""
|
||||||
|
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
"name": "is_superuser - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": "is_superuser",
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": ["cn=super_users,dc=example,dc=com"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "is_system_auditor - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": "role",
|
||||||
|
"role": "Platform Auditor",
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": ["cn=auditors,dc=example,dc=com"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": 2,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
def test_unsupported_role_flag(self):
|
||||||
|
"""Test that unsupported role flags are ignored."""
|
||||||
|
role_map = {
|
||||||
|
"is_superuser": "cn=super_users,dc=example,dc=com",
|
||||||
|
"is_staff": "cn=staff,dc=example,dc=com", # Unsupported flag
|
||||||
|
"is_system_auditor": "cn=auditors,dc=example,dc=com",
|
||||||
|
}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
# Should only have 2 mappers (is_superuser and is_system_auditor)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0]["map_type"] == "is_superuser"
|
||||||
|
assert result[1]["map_type"] == "role"
|
||||||
|
assert result[1]["role"] == "Platform Auditor"
|
||||||
|
|
||||||
|
def test_order_increments_correctly(self):
|
||||||
|
"""Test that order values increment correctly."""
|
||||||
|
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0]["order"] == 1
|
||||||
|
assert result[1]["order"] == 2
|
||||||
|
|
||||||
|
def test_start_order_parameter(self):
|
||||||
|
"""Test that start_order parameter is respected."""
|
||||||
|
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com"}
|
||||||
|
|
||||||
|
result, next_order = role_map_to_gateway_format(role_map, start_order=5)
|
||||||
|
|
||||||
|
assert result[0]["order"] == 5
|
||||||
|
assert next_order == 6
|
||||||
|
|
||||||
|
def test_string_to_list_conversion(self):
|
||||||
|
"""Test that string groups are converted to lists."""
|
||||||
|
role_map = {"is_superuser": "single-group"}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
# Should convert string to list for has_or
|
||||||
|
assert result[0]["triggers"]["groups"]["has_or"] == ["single-group"]
|
||||||
|
|
||||||
|
def test_triggers_format_validation(self):
|
||||||
|
"""Test that trigger formats match Gateway specification."""
|
||||||
|
role_map = {"is_superuser": ["group1", "group2"]}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
# Validate that triggers follow Gateway format
|
||||||
|
triggers = result[0]["triggers"]
|
||||||
|
assert "groups" in triggers
|
||||||
|
assert "has_or" in triggers["groups"]
|
||||||
|
assert isinstance(triggers["groups"]["has_or"], list)
|
||||||
|
assert triggers["groups"]["has_or"] == ["group1", "group2"]
|
||||||
|
|
||||||
|
def test_ldap_dn_format(self):
|
||||||
|
"""Test with realistic LDAP DN format."""
|
||||||
|
role_map = {
|
||||||
|
"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com",
|
||||||
|
"is_system_auditor": "cn=awx_auditors,OU=administration groups,DC=contoso,DC=com",
|
||||||
|
}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0]["triggers"]["groups"]["has_or"] == ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"]
|
||||||
|
assert result[1]["triggers"]["groups"]["has_or"] == ["cn=awx_auditors,OU=administration groups,DC=contoso,DC=com"]
|
||||||
|
|
||||||
|
def test_gateway_format_compliance(self):
|
||||||
|
"""Test that all results comply with Gateway role mapping format."""
|
||||||
|
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
for mapping in result:
|
||||||
|
# Required fields per Gateway spec
|
||||||
|
assert "name" in mapping
|
||||||
|
assert "authenticator" in mapping
|
||||||
|
assert "map_type" in mapping
|
||||||
|
assert "organization" in mapping
|
||||||
|
assert "team" in mapping
|
||||||
|
assert "triggers" in mapping
|
||||||
|
assert "order" in mapping
|
||||||
|
assert "revoke" in mapping
|
||||||
|
|
||||||
|
# Field types
|
||||||
|
assert isinstance(mapping["name"], str)
|
||||||
|
assert isinstance(mapping["authenticator"], int)
|
||||||
|
assert mapping["map_type"] in ["is_superuser", "role"]
|
||||||
|
assert mapping["organization"] is None
|
||||||
|
assert mapping["team"] is None
|
||||||
|
assert isinstance(mapping["triggers"], dict)
|
||||||
|
assert isinstance(mapping["order"], int)
|
||||||
|
assert isinstance(mapping["revoke"], bool)
|
||||||
|
|
||||||
|
# Specific field validations based on map_type
|
||||||
|
if mapping["map_type"] == "is_superuser":
|
||||||
|
assert "role" not in mapping
|
||||||
|
elif mapping["map_type"] == "role":
|
||||||
|
assert "role" in mapping
|
||||||
|
assert isinstance(mapping["role"], str)
|
||||||
|
assert mapping["role"] == "Platform Auditor"
|
||||||
|
|
||||||
|
|
||||||
|
# Parametrized tests for role mappings
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"role_map,expected_length",
|
||||||
|
[
|
||||||
|
(None, 0),
|
||||||
|
({}, 0),
|
||||||
|
({"is_superuser": "group1"}, 1),
|
||||||
|
({"is_system_auditor": "group1"}, 1),
|
||||||
|
({"is_superuser": "group1", "is_system_auditor": "group2"}, 2),
|
||||||
|
({"is_staff": "group1"}, 0), # Unsupported flag
|
||||||
|
({"is_superuser": "group1", "is_staff": "group2", "is_system_auditor": "group3"}, 2), # Mixed supported/unsupported
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_role_map_result_lengths(role_map, expected_length):
|
||||||
|
"""Test that role_map_to_gateway_format returns expected number of mappings."""
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
assert len(result) == expected_length
|
||||||
|
|
||||||
|
|
||||||
|
# Edge case tests
|
||||||
|
def test_empty_groups_handling():
|
||||||
|
"""Test handling of empty group lists."""
|
||||||
|
role_map = {"is_superuser": []}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["triggers"]["groups"]["has_or"] == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_mixed_group_types():
|
||||||
|
"""Test handling of mixed group types (string and list)."""
|
||||||
|
role_map = {"is_superuser": "single-group", "is_system_auditor": ["group1", "group2"]}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0]["triggers"]["groups"]["has_or"] == ["single-group"]
|
||||||
|
assert result[1]["triggers"]["groups"]["has_or"] == ["group1", "group2"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_realistic_ldap_user_flags_by_group():
|
||||||
|
"""Test with realistic LDAP USER_FLAGS_BY_GROUP data."""
|
||||||
|
role_map = {"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"}
|
||||||
|
|
||||||
|
result, _ = role_map_to_gateway_format(role_map)
|
||||||
|
|
||||||
|
# This is exactly the use case from the user's example
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["map_type"] == "is_superuser"
|
||||||
|
assert result[0]["triggers"]["groups"]["has_or"] == ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"]
|
||||||
|
assert result[0]["revoke"] is True
|
||||||
|
assert result[0]["team"] is None
|
||||||
|
assert result[0]["organization"] is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestLdapGroupAllowToGatewayFormat:
|
||||||
|
"""Tests for ldap_group_allow_to_gateway_format function."""
|
||||||
|
|
||||||
|
def test_none_input_with_empty_result(self):
|
||||||
|
"""Test that None input with empty result returns unchanged result."""
|
||||||
|
result = []
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, None, deny=False)
|
||||||
|
|
||||||
|
assert output_result == []
|
||||||
|
assert next_order == 1 # Default start_order
|
||||||
|
|
||||||
|
def test_none_input_with_existing_result(self):
|
||||||
|
"""Test that None input with existing mappers returns unchanged result."""
|
||||||
|
result = [{"existing": "mapper"}]
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, None, deny=False, start_order=5)
|
||||||
|
|
||||||
|
assert output_result == [{"existing": "mapper"}]
|
||||||
|
assert next_order == 5 # start_order unchanged
|
||||||
|
|
||||||
|
def test_require_group_mapping(self):
|
||||||
|
"""Test LDAP REQUIRE_GROUP mapping (deny=False)."""
|
||||||
|
result = []
|
||||||
|
ldap_group = "cn=allowed_users,dc=example,dc=com"
|
||||||
|
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=1)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
"name": "LDAP-RequireGroup",
|
||||||
|
"authenticator": -1,
|
||||||
|
"map_type": "allow",
|
||||||
|
"revoke": False,
|
||||||
|
"triggers": {"groups": {"has_and": ["cn=allowed_users,dc=example,dc=com"]}},
|
||||||
|
"order": 1,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert output_result == expected
|
||||||
|
assert next_order == 2
|
||||||
|
|
||||||
|
def test_deny_group_mapping(self):
|
||||||
|
"""Test LDAP DENY_GROUP mapping (deny=True)."""
|
||||||
|
result = []
|
||||||
|
ldap_group = "cn=blocked_users,dc=example,dc=com"
|
||||||
|
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=True, start_order=1)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
{
|
||||||
|
"name": "LDAP-DenyGroup",
|
||||||
|
"authenticator": -1,
|
||||||
|
"map_type": "allow",
|
||||||
|
"revoke": True,
|
||||||
|
"triggers": {"groups": {"has_or": ["cn=blocked_users,dc=example,dc=com"]}},
|
||||||
|
"order": 1,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert output_result == expected
|
||||||
|
assert next_order == 2
|
||||||
|
|
||||||
|
def test_appending_to_existing_result(self):
|
||||||
|
"""Test appending to existing result list."""
|
||||||
|
existing_mapper = {
|
||||||
|
"name": "existing-mapper",
|
||||||
|
"authenticator": -1,
|
||||||
|
"map_type": "role",
|
||||||
|
"order": 1,
|
||||||
|
}
|
||||||
|
result = [existing_mapper]
|
||||||
|
ldap_group = "cn=new_group,dc=example,dc=com"
|
||||||
|
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=2)
|
||||||
|
|
||||||
|
assert len(output_result) == 2
|
||||||
|
assert output_result[0] == existing_mapper # Original mapper unchanged
|
||||||
|
assert output_result[1]["name"] == "LDAP-RequireGroup"
|
||||||
|
assert output_result[1]["order"] == 2
|
||||||
|
assert next_order == 3
|
||||||
|
|
||||||
|
def test_custom_start_order(self):
|
||||||
|
"""Test that custom start_order is respected."""
|
||||||
|
result = []
|
||||||
|
ldap_group = "cn=test_group,dc=example,dc=com"
|
||||||
|
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=10)
|
||||||
|
|
||||||
|
assert output_result[0]["order"] == 10
|
||||||
|
assert next_order == 11
|
||||||
|
|
||||||
|
def test_require_vs_deny_trigger_differences(self):
|
||||||
|
"""Test the difference between require and deny group triggers."""
|
||||||
|
ldap_group = "cn=test_group,dc=example,dc=com"
|
||||||
|
|
||||||
|
# Test require group (deny=False)
|
||||||
|
require_result, _ = ldap_group_allow_to_gateway_format([], ldap_group, deny=False)
|
||||||
|
|
||||||
|
# Test deny group (deny=True)
|
||||||
|
deny_result, _ = ldap_group_allow_to_gateway_format([], ldap_group, deny=True)
|
||||||
|
|
||||||
|
# Require group should use has_and
|
||||||
|
assert require_result[0]["triggers"]["groups"]["has_and"] == ["cn=test_group,dc=example,dc=com"]
|
||||||
|
assert require_result[0]["revoke"] is False
|
||||||
|
assert require_result[0]["name"] == "LDAP-RequireGroup"
|
||||||
|
|
||||||
|
# Deny group should use has_or
|
||||||
|
assert deny_result[0]["triggers"]["groups"]["has_or"] == ["cn=test_group,dc=example,dc=com"]
|
||||||
|
assert deny_result[0]["revoke"] is True
|
||||||
|
assert deny_result[0]["name"] == "LDAP-DenyGroup"
|
||||||
|
|
||||||
|
def test_realistic_ldap_dn_format(self):
|
||||||
|
"""Test with realistic LDAP DN format."""
|
||||||
|
result = []
|
||||||
|
|
||||||
|
# Test with require group
|
||||||
|
require_group = "cn=awx_users,OU=application groups,DC=contoso,DC=com"
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, require_group, deny=False, start_order=1)
|
||||||
|
|
||||||
|
assert len(output_result) == 1
|
||||||
|
assert output_result[0]["triggers"]["groups"]["has_and"] == ["cn=awx_users,OU=application groups,DC=contoso,DC=com"]
|
||||||
|
assert output_result[0]["name"] == "LDAP-RequireGroup"
|
||||||
|
assert next_order == 2
|
||||||
|
|
||||||
|
def test_multiple_sequential_calls(self):
|
||||||
|
"""Test multiple sequential calls to build complex allow mappers."""
|
||||||
|
result = []
|
||||||
|
|
||||||
|
# Add deny group first
|
||||||
|
result, next_order = ldap_group_allow_to_gateway_format(result, "cn=blocked,dc=example,dc=com", deny=True, start_order=1)
|
||||||
|
|
||||||
|
# Add require group second
|
||||||
|
result, next_order = ldap_group_allow_to_gateway_format(result, "cn=allowed,dc=example,dc=com", deny=False, start_order=next_order)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
# First mapper should be deny group
|
||||||
|
assert result[0]["name"] == "LDAP-DenyGroup"
|
||||||
|
assert result[0]["revoke"] is True
|
||||||
|
assert result[0]["triggers"]["groups"]["has_or"] == ["cn=blocked,dc=example,dc=com"]
|
||||||
|
assert result[0]["order"] == 1
|
||||||
|
|
||||||
|
# Second mapper should be require group
|
||||||
|
assert result[1]["name"] == "LDAP-RequireGroup"
|
||||||
|
assert result[1]["revoke"] is False
|
||||||
|
assert result[1]["triggers"]["groups"]["has_and"] == ["cn=allowed,dc=example,dc=com"]
|
||||||
|
assert result[1]["order"] == 2
|
||||||
|
|
||||||
|
assert next_order == 3
|
||||||
|
|
||||||
|
def test_gateway_format_compliance(self):
|
||||||
|
"""Test that all results comply with Gateway allow mapping format."""
|
||||||
|
result = []
|
||||||
|
|
||||||
|
# Test both deny and require groups
|
||||||
|
result, _ = ldap_group_allow_to_gateway_format(result, "cn=denied,dc=example,dc=com", deny=True, start_order=1)
|
||||||
|
result, _ = ldap_group_allow_to_gateway_format(result, "cn=required,dc=example,dc=com", deny=False, start_order=2)
|
||||||
|
|
||||||
|
for mapping in result:
|
||||||
|
# Required fields per Gateway spec
|
||||||
|
assert "name" in mapping
|
||||||
|
assert "authenticator" in mapping
|
||||||
|
assert "map_type" in mapping
|
||||||
|
assert "triggers" in mapping
|
||||||
|
assert "order" in mapping
|
||||||
|
assert "revoke" in mapping
|
||||||
|
|
||||||
|
# Field types
|
||||||
|
assert isinstance(mapping["name"], str)
|
||||||
|
assert isinstance(mapping["authenticator"], int)
|
||||||
|
assert mapping["map_type"] == "allow"
|
||||||
|
assert isinstance(mapping["triggers"], dict)
|
||||||
|
assert isinstance(mapping["order"], int)
|
||||||
|
assert isinstance(mapping["revoke"], bool)
|
||||||
|
|
||||||
|
# Trigger format validation
|
||||||
|
assert "groups" in mapping["triggers"]
|
||||||
|
groups_trigger = mapping["triggers"]["groups"]
|
||||||
|
|
||||||
|
# Should have either has_and or has_or, but not both
|
||||||
|
has_and = "has_and" in groups_trigger
|
||||||
|
has_or = "has_or" in groups_trigger
|
||||||
|
assert has_and != has_or # XOR - exactly one should be true
|
||||||
|
|
||||||
|
if has_and:
|
||||||
|
assert isinstance(groups_trigger["has_and"], list)
|
||||||
|
assert len(groups_trigger["has_and"]) == 1
|
||||||
|
if has_or:
|
||||||
|
assert isinstance(groups_trigger["has_or"], list)
|
||||||
|
assert len(groups_trigger["has_or"]) == 1
|
||||||
|
|
||||||
|
def test_original_result_not_modified_when_none(self):
|
||||||
|
"""Test that original result list is not modified when ldap_group is None."""
|
||||||
|
original_result = [{"original": "mapper"}]
|
||||||
|
result_copy = original_result.copy()
|
||||||
|
|
||||||
|
output_result, _ = ldap_group_allow_to_gateway_format(original_result, None, deny=False)
|
||||||
|
|
||||||
|
# Original list should be unchanged
|
||||||
|
assert original_result == result_copy
|
||||||
|
# Output should be the same reference
|
||||||
|
assert output_result is original_result
|
||||||
|
|
||||||
|
def test_empty_string_group(self):
|
||||||
|
"""Test handling of empty string group."""
|
||||||
|
result = []
|
||||||
|
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, "", deny=False, start_order=1)
|
||||||
|
|
||||||
|
# Should still create a mapper even with empty string
|
||||||
|
assert len(output_result) == 1
|
||||||
|
assert output_result[0]["triggers"]["groups"]["has_and"] == [""]
|
||||||
|
assert next_order == 2
|
||||||
|
|
||||||
|
|
||||||
|
# Parametrized tests for ldap_group_allow_to_gateway_format
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"ldap_group,deny,expected_name,expected_revoke,expected_trigger_type",
|
||||||
|
[
|
||||||
|
("cn=test,dc=example,dc=com", True, "LDAP-DenyGroup", True, "has_or"),
|
||||||
|
("cn=test,dc=example,dc=com", False, "LDAP-RequireGroup", False, "has_and"),
|
||||||
|
("cn=users,ou=groups,dc=company,dc=com", True, "LDAP-DenyGroup", True, "has_or"),
|
||||||
|
("cn=users,ou=groups,dc=company,dc=com", False, "LDAP-RequireGroup", False, "has_and"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_ldap_group_parametrized(ldap_group, deny, expected_name, expected_revoke, expected_trigger_type):
|
||||||
|
"""Parametrized test for various LDAP group configurations."""
|
||||||
|
result = []
|
||||||
|
|
||||||
|
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=deny, start_order=1)
|
||||||
|
|
||||||
|
assert len(output_result) == 1
|
||||||
|
mapper = output_result[0]
|
||||||
|
|
||||||
|
assert mapper["name"] == expected_name
|
||||||
|
assert mapper["revoke"] == expected_revoke
|
||||||
|
assert expected_trigger_type in mapper["triggers"]["groups"]
|
||||||
|
assert mapper["triggers"]["groups"][expected_trigger_type] == [ldap_group]
|
||||||
|
assert next_order == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_realistic_awx_ldap_migration_scenario():
|
||||||
|
"""Test realistic scenario from AWX LDAP migration."""
|
||||||
|
result = []
|
||||||
|
|
||||||
|
# Simulate AWX LDAP configuration with both REQUIRE_GROUP and DENY_GROUP
|
||||||
|
deny_group = "cn=blocked_users,OU=blocked groups,DC=contoso,DC=com"
|
||||||
|
require_group = "cn=awx_users,OU=application groups,DC=contoso,DC=com"
|
||||||
|
|
||||||
|
# Add deny group first (as in the migrator)
|
||||||
|
result, next_order = ldap_group_allow_to_gateway_format(result, deny_group, deny=True, start_order=1)
|
||||||
|
|
||||||
|
# Add require group second
|
||||||
|
result, next_order = ldap_group_allow_to_gateway_format(result, require_group, deny=False, start_order=next_order)
|
||||||
|
|
||||||
|
# Should have 2 allow mappers
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
# Verify deny group mapper
|
||||||
|
deny_mapper = result[0]
|
||||||
|
assert deny_mapper["name"] == "LDAP-DenyGroup"
|
||||||
|
assert deny_mapper["map_type"] == "allow"
|
||||||
|
assert deny_mapper["revoke"] is True
|
||||||
|
assert deny_mapper["triggers"]["groups"]["has_or"] == [deny_group]
|
||||||
|
assert deny_mapper["order"] == 1
|
||||||
|
|
||||||
|
# Verify require group mapper
|
||||||
|
require_mapper = result[1]
|
||||||
|
assert require_mapper["name"] == "LDAP-RequireGroup"
|
||||||
|
assert require_mapper["map_type"] == "allow"
|
||||||
|
assert require_mapper["revoke"] is False
|
||||||
|
assert require_mapper["triggers"]["groups"]["has_and"] == [require_group]
|
||||||
|
assert require_mapper["order"] == 2
|
||||||
|
|
||||||
|
assert next_order == 3
|
||||||
511
awx/main/utils/gateway_client.py
Normal file
511
awx/main/utils/gateway_client.py
Normal file
@ -0,0 +1,511 @@
|
|||||||
|
"""
|
||||||
|
Gateway API client for AAP Gateway interactions.
|
||||||
|
|
||||||
|
This module provides a client class to interact with the AAP Gateway REST API,
|
||||||
|
specifically for creating authenticators and mapping configurations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
from typing import Dict, List, Optional, Any
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class GatewayAPIError(Exception):
|
||||||
|
"""Exception raised for Gateway API errors."""
|
||||||
|
|
||||||
|
def __init__(self, message: str, status_code: Optional[int] = None, response_data: Optional[Dict] = None):
|
||||||
|
self.message = message
|
||||||
|
self.status_code = status_code
|
||||||
|
self.response_data = response_data
|
||||||
|
super().__init__(self.message)
|
||||||
|
|
||||||
|
|
||||||
|
class GatewayClient:
|
||||||
|
"""Client for AAP Gateway REST API interactions."""
|
||||||
|
|
||||||
|
def __init__(self, base_url: str, username: str, password: str, skip_verify: bool = False, skip_session_init: bool = False, command=None):
|
||||||
|
"""Initialize Gateway client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_url: Base URL of the AAP Gateway instance
|
||||||
|
username: Username for authentication
|
||||||
|
password: Password for authentication
|
||||||
|
skip_verify: Skip SSL certificate verification
|
||||||
|
skip_session_init: Skip initializing the session. Only set to True if you are using a base class that doesn't need the initialization of the session.
|
||||||
|
command: The command object. This is used to write output to the console.
|
||||||
|
"""
|
||||||
|
self.base_url = base_url.rstrip('/')
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.skip_verify = skip_verify
|
||||||
|
self.command = command
|
||||||
|
self.session_was_not_initialized = skip_session_init
|
||||||
|
|
||||||
|
# Initialize session
|
||||||
|
if not skip_session_init:
|
||||||
|
self.session = requests.Session()
|
||||||
|
|
||||||
|
# Configure SSL verification
|
||||||
|
if skip_verify:
|
||||||
|
self.session.verify = False
|
||||||
|
# Disable SSL warnings when verification is disabled
|
||||||
|
import urllib3
|
||||||
|
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
|
# Set default headers
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
'User-Agent': 'AWX-Gateway-Migration-Client/1.0',
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.session = None
|
||||||
|
|
||||||
|
# Authentication state
|
||||||
|
self._authenticated = False
|
||||||
|
|
||||||
|
def authenticate(self) -> bool:
|
||||||
|
"""Authenticate with the Gateway using HTTP Basic Authentication.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if authentication successful, False otherwise
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If authentication fails
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Set up HTTP Basic Authentication
|
||||||
|
from requests.auth import HTTPBasicAuth
|
||||||
|
|
||||||
|
self.session.auth = HTTPBasicAuth(self.username, self.password)
|
||||||
|
|
||||||
|
# Test authentication by making a simple request to the API
|
||||||
|
test_url = urljoin(self.base_url, '/api/gateway/v1/authenticators/')
|
||||||
|
|
||||||
|
response = self.session.get(test_url)
|
||||||
|
|
||||||
|
if response.status_code in [200, 401]: # 401 means auth is working but might need permissions
|
||||||
|
self._authenticated = True
|
||||||
|
logger.info("Successfully authenticated with Gateway using Basic Auth")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
error_msg = f"Authentication test failed with status {response.status_code}"
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_msg += f": {error_data}"
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
error_msg += f": {response.text}"
|
||||||
|
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Network error during authentication: {str(e)}")
|
||||||
|
|
||||||
|
def _ensure_authenticated(self):
|
||||||
|
"""Ensure the client is authenticated, authenticate if needed."""
|
||||||
|
if not self._authenticated:
|
||||||
|
self.authenticate()
|
||||||
|
|
||||||
|
def _make_request(self, method: str, endpoint: str, data: Optional[Dict] = None, params: Optional[Dict] = None) -> requests.Response:
|
||||||
|
"""Make an authenticated request to the Gateway API.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
method: HTTP method (GET, POST, PUT, DELETE, etc.)
|
||||||
|
endpoint: API endpoint (without base URL)
|
||||||
|
data: JSON data to send in request body
|
||||||
|
params: Query parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
requests.Response: The response object
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If request fails
|
||||||
|
"""
|
||||||
|
self._ensure_authenticated()
|
||||||
|
|
||||||
|
url = urljoin(self.base_url, endpoint.lstrip('/'))
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.session.request(method=method.upper(), url=url, json=data, params=params)
|
||||||
|
|
||||||
|
# Log request details
|
||||||
|
logger.debug(f"{method.upper()} {url} - Status: {response.status_code}")
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Request failed: {str(e)}")
|
||||||
|
|
||||||
|
def create_authenticator(self, authenticator_config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Create a new authenticator in Gateway.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
authenticator_config: Authenticator configuration dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Created authenticator data
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If creation fails
|
||||||
|
"""
|
||||||
|
endpoint = '/api/gateway/v1/authenticators/'
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self._make_request('POST', endpoint, data=authenticator_config)
|
||||||
|
|
||||||
|
if response.status_code == 201:
|
||||||
|
result = response.json()
|
||||||
|
logger.info(f"Successfully created authenticator: {result.get('name', 'Unknown')}")
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to create authenticator. Status: {response.status_code}"
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_msg += f", Error: {error_data}"
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
error_msg += f", Response: {response.text}"
|
||||||
|
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to create authenticator: {str(e)}")
|
||||||
|
|
||||||
|
def update_authenticator(self, authenticator_id: int, authenticator_config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Update an existing authenticator in Gateway.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
authenticator_id: ID of the authenticator to update
|
||||||
|
authenticator_config: Authenticator configuration dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Updated authenticator data
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If update fails
|
||||||
|
"""
|
||||||
|
endpoint = f'/api/gateway/v1/authenticators/{authenticator_id}/'
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self._make_request('PATCH', endpoint, data=authenticator_config)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
logger.info(f"Successfully updated authenticator: {result.get('name', 'Unknown')}")
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to update authenticator. Status: {response.status_code}"
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_msg += f", Error: {error_data}"
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
error_msg += f", Response: {response.text}"
|
||||||
|
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to update authenticator: {str(e)}")
|
||||||
|
|
||||||
|
def create_authenticator_map(self, authenticator_id: int, mapper_config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Create a new authenticator map in Gateway.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
authenticator_id: ID of the authenticator to create map for
|
||||||
|
mapper_config: Mapper configuration dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Created mapper data
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If creation fails
|
||||||
|
"""
|
||||||
|
endpoint = '/api/gateway/v1/authenticator_maps/'
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
response = self._make_request('POST', endpoint, data=mapper_config)
|
||||||
|
|
||||||
|
if response.status_code == 201:
|
||||||
|
result = response.json()
|
||||||
|
logger.info(f"Successfully created authenticator map: {result.get('name', 'Unknown')}")
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to create authenticator map. Status: {response.status_code}"
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_msg += f", Error: {error_data}"
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
error_msg += f", Response: {response.text}"
|
||||||
|
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to create authenticator map: {str(e)}")
|
||||||
|
|
||||||
|
def update_authenticator_map(self, mapper_id: int, mapper_config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Update an existing authenticator map in Gateway.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mapper_id: ID of the authenticator map to update
|
||||||
|
mapper_config: Mapper configuration dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Updated mapper data
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If update fails
|
||||||
|
"""
|
||||||
|
endpoint = f'/api/gateway/v1/authenticator_maps/{mapper_id}/'
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self._make_request('PATCH', endpoint, data=mapper_config)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
logger.info(f"Successfully updated authenticator map: {result.get('name', 'Unknown')}")
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to update authenticator map. Status: {response.status_code}"
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_msg += f", Error: {error_data}"
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
error_msg += f", Response: {response.text}"
|
||||||
|
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to update authenticator map: {str(e)}")
|
||||||
|
|
||||||
|
def get_authenticators(self, params: Optional[Dict] = None) -> List[Dict[str, Any]]:
|
||||||
|
"""Get list of authenticators from Gateway.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
params: Optional query parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of authenticator configurations
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If request fails
|
||||||
|
"""
|
||||||
|
endpoint = '/api/gateway/v1/authenticators/'
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self._make_request('GET', endpoint, params=params)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
# Handle paginated response
|
||||||
|
if isinstance(result, dict) and 'results' in result:
|
||||||
|
return result['results']
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to get authenticators. Status: {response.status_code}"
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to get authenticators: {str(e)}")
|
||||||
|
|
||||||
|
def get_authenticator_by_slug(self, slug: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get a specific authenticator by slug.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
slug: The authenticator slug to search for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The authenticator data if found, None otherwise
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If request fails
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Use query parameter to filter by slug - more efficient than getting all
|
||||||
|
authenticators = self.get_authenticators(params={'slug': slug})
|
||||||
|
|
||||||
|
# Return the first match (slugs should be unique)
|
||||||
|
if authenticators:
|
||||||
|
return authenticators[0]
|
||||||
|
return None
|
||||||
|
|
||||||
|
except GatewayAPIError as e:
|
||||||
|
# Re-raise Gateway API errors
|
||||||
|
raise e
|
||||||
|
except Exception as e:
|
||||||
|
raise GatewayAPIError(f"Failed to get authenticator by slug: {str(e)}")
|
||||||
|
|
||||||
|
def get_authenticator_maps(self, authenticator_id: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Get list of maps for a specific authenticator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
authenticator_id: ID of the authenticator
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of authenticator maps
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If request fails
|
||||||
|
"""
|
||||||
|
endpoint = f'/api/gateway/v1/authenticators/{authenticator_id}/authenticator_maps/'
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self._make_request('GET', endpoint)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
# Handle paginated response
|
||||||
|
if isinstance(result, dict) and 'results' in result:
|
||||||
|
return result['results']
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to get authenticator maps. Status: {response.status_code}"
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to get authenticator maps: {str(e)}")
|
||||||
|
|
||||||
|
def create_github_authenticator(
|
||||||
|
self, name: str, client_id: str, client_secret: str, enabled: bool = True, create_objects: bool = False, remove_users: bool = False
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create a GitHub authenticator with the specified configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name for the authenticator
|
||||||
|
client_id: GitHub OAuth App Client ID
|
||||||
|
client_secret: GitHub OAuth App Client Secret
|
||||||
|
enabled: Whether authenticator should be enabled
|
||||||
|
create_objects: Whether to create users/orgs/teams automatically
|
||||||
|
remove_users: Whether to remove users when they lose access
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Created authenticator data
|
||||||
|
"""
|
||||||
|
config = {
|
||||||
|
"name": name,
|
||||||
|
"type": "ansible_base.authentication.authenticator_plugins.github",
|
||||||
|
"enabled": enabled,
|
||||||
|
"create_objects": create_objects,
|
||||||
|
"remove_users": remove_users,
|
||||||
|
"configuration": {"KEY": client_id, "SECRET": client_secret},
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.create_authenticator(config)
|
||||||
|
|
||||||
|
def update_gateway_setting(self, setting_name: str, setting_value: Any) -> Dict[str, Any]:
|
||||||
|
"""Update a Gateway setting via the settings API.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
setting_name: Name of the setting to update
|
||||||
|
setting_value: Value to set for the setting
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Upon successful update, well formed responses are returned, otherwise the original payload is returned.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If update fails, anything other than a 200 or 204 response code.
|
||||||
|
"""
|
||||||
|
endpoint = '/api/gateway/v1/settings/all/'
|
||||||
|
|
||||||
|
# Create the JSON payload with the setting name and value
|
||||||
|
payload = {setting_name: setting_value}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self._make_request('PUT', endpoint, data=payload)
|
||||||
|
|
||||||
|
if response.status_code in [200, 204]:
|
||||||
|
logger.info(f"Successfully updated Gateway setting: {setting_name}")
|
||||||
|
# Return the response data if available, otherwise return the payload
|
||||||
|
if response.content:
|
||||||
|
try:
|
||||||
|
return response.json()
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
return payload
|
||||||
|
return payload
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to update Gateway setting. Status: {response.status_code}"
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_msg += f", Error: {error_data}"
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
error_msg += f", Response: {response.text}"
|
||||||
|
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to update Gateway setting: {str(e)}")
|
||||||
|
|
||||||
|
def get_gateway_setting(self, setting_name: str) -> Any:
|
||||||
|
"""Get a Gateway setting value via the settings API.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
setting_name: Name of the setting to retrieve
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Any: The value of the setting, or None if not found
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If request fails
|
||||||
|
"""
|
||||||
|
endpoint = '/api/gateway/v1/settings/all/'
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self._make_request('GET', endpoint)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
settings_data = response.json()
|
||||||
|
logger.info("Successfully retrieved Gateway settings")
|
||||||
|
|
||||||
|
# Return the specific setting value or None if not found
|
||||||
|
return settings_data.get(setting_name)
|
||||||
|
else:
|
||||||
|
error_msg = f"Failed to get Gateway settings from '{endpoint}' for '{setting_name}'. Status: {response.status_code}"
|
||||||
|
error_data = response.text
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_msg += f", Error: {error_data}"
|
||||||
|
except requests.exceptions.JSONDecodeError:
|
||||||
|
error_msg += f", Response: {response.text}"
|
||||||
|
|
||||||
|
raise GatewayAPIError(error_msg, response.status_code, error_data)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Failed to get Gateway settings from '{endpoint}' for '{setting_name}'. Unexpected Exception - Error: {str(e)}")
|
||||||
|
|
||||||
|
def get_base_url(self) -> str:
|
||||||
|
"""Get the base URL of the Gateway instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The base URL of the Gateway instance
|
||||||
|
"""
|
||||||
|
return self.base_url
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the session and clean up resources."""
|
||||||
|
if self.session:
|
||||||
|
self.session.close()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Context manager entry."""
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Context manager exit."""
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def _write_output(self, message, style=None):
|
||||||
|
"""Write output message if command is available."""
|
||||||
|
if self.command:
|
||||||
|
if style == 'success':
|
||||||
|
self.command.stdout.write(self.command.style.SUCCESS(message))
|
||||||
|
elif style == 'warning':
|
||||||
|
self.command.stdout.write(self.command.style.WARNING(message))
|
||||||
|
elif style == 'error':
|
||||||
|
self.command.stdout.write(self.command.style.ERROR(message))
|
||||||
|
else:
|
||||||
|
self.command.stdout.write(message)
|
||||||
77
awx/main/utils/gateway_client_svc_token.py
Normal file
77
awx/main/utils/gateway_client_svc_token.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Gateway API client for AAP Gateway interactions with Service Tokens.
|
||||||
|
|
||||||
|
This module provides a client class to interact with the AAP Gateway REST API,
|
||||||
|
specifically for creating authenticators and mapping configurations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Optional
|
||||||
|
from awx.main.utils.gateway_client import GatewayClient, GatewayAPIError
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class GatewayClientSVCToken(GatewayClient):
|
||||||
|
"""Client for AAP Gateway REST API interactions."""
|
||||||
|
|
||||||
|
def __init__(self, resource_api_client=None, command=None):
|
||||||
|
"""Initialize Gateway client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
resource_api_client: Resource API Client for Gateway leveraging service tokens
|
||||||
|
"""
|
||||||
|
super().__init__(
|
||||||
|
base_url=resource_api_client.base_url,
|
||||||
|
username=resource_api_client.jwt_user_id,
|
||||||
|
password="required-in-GatewayClient-authenticate()-but-unused-by-GatewayClientSVCToken",
|
||||||
|
skip_verify=(not resource_api_client.verify_https),
|
||||||
|
skip_session_init=True,
|
||||||
|
command=command,
|
||||||
|
)
|
||||||
|
self.resource_api_client = resource_api_client
|
||||||
|
# Authentication state
|
||||||
|
self._authenticated = True
|
||||||
|
|
||||||
|
def authenticate(self) -> bool:
|
||||||
|
"""Overload the base class method to always return True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True always
|
||||||
|
"""
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _ensure_authenticated(self):
|
||||||
|
"""Refresh JWT service token"""
|
||||||
|
self.resource_api_client.refresh_jwt()
|
||||||
|
|
||||||
|
def _make_request(self, method: str, endpoint: str, data: Optional[Dict] = None, params: Optional[Dict] = None) -> requests.Response:
|
||||||
|
"""Make a service token authenticated request to the Gateway API.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
method: HTTP method (GET, POST, PUT, DELETE, etc.)
|
||||||
|
endpoint: API endpoint (without base URL)
|
||||||
|
data: JSON data to send in request body
|
||||||
|
params: Query parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
requests.Response: The response object
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
GatewayAPIError: If request fails
|
||||||
|
"""
|
||||||
|
self._ensure_authenticated()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.resource_api_client._make_request(method=method, path=endpoint, data=data, params=params)
|
||||||
|
|
||||||
|
# Log request details
|
||||||
|
logger.debug(f"{method.upper()} {self.base_url}{endpoint} - Status: {response.status_code}")
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise GatewayAPIError(f"Request failed: {str(e)}")
|
||||||
361
awx/main/utils/gateway_mapping.py
Normal file
361
awx/main/utils/gateway_mapping.py
Normal file
@ -0,0 +1,361 @@
|
|||||||
|
"""
|
||||||
|
Gateway mapping conversion utilities.
|
||||||
|
|
||||||
|
This module contains functions to convert AWX authentication mappings
|
||||||
|
(organization and team mappings) to AAP Gateway format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import cast, Any, Literal, Pattern, Union
|
||||||
|
|
||||||
|
email_regex = re.compile(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$")
|
||||||
|
|
||||||
|
|
||||||
|
def truncate_name(name: str, max_length: int = 128) -> str:
|
||||||
|
"""Truncate a name to the specified maximum length."""
|
||||||
|
if len(name) <= max_length:
|
||||||
|
return name
|
||||||
|
return name[:max_length]
|
||||||
|
|
||||||
|
|
||||||
|
def build_truncated_name(org_name: str, entity_name: str, trigger_name: str, max_component_length: int = 40) -> str:
|
||||||
|
"""Build a name by truncating each component individually and joining with ' - '."""
|
||||||
|
truncated_org = truncate_name(org_name, max_component_length)
|
||||||
|
truncated_entity = truncate_name(entity_name, max_component_length)
|
||||||
|
truncated_trigger = truncate_name(trigger_name, max_component_length)
|
||||||
|
return f"{truncated_org} - {truncated_entity} {truncated_trigger}"
|
||||||
|
|
||||||
|
|
||||||
|
def pattern_to_slash_format(pattern: Any) -> str:
|
||||||
|
"""Convert a re.Pattern object to /pattern/flags format."""
|
||||||
|
if not isinstance(pattern, re.Pattern):
|
||||||
|
return str(pattern)
|
||||||
|
|
||||||
|
flags_str = ""
|
||||||
|
if pattern.flags & re.IGNORECASE:
|
||||||
|
flags_str += "i"
|
||||||
|
if pattern.flags & re.MULTILINE:
|
||||||
|
flags_str += "m"
|
||||||
|
if pattern.flags & re.DOTALL:
|
||||||
|
flags_str += "s"
|
||||||
|
if pattern.flags & re.VERBOSE:
|
||||||
|
flags_str += "x"
|
||||||
|
|
||||||
|
return f"/{pattern.pattern}/{flags_str}"
|
||||||
|
|
||||||
|
|
||||||
|
def process_ldap_user_list(
|
||||||
|
groups: Union[None, str, bool, list[Union[None, str, bool]]],
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
if not isinstance(groups, list):
|
||||||
|
groups = [groups]
|
||||||
|
|
||||||
|
# Type cast to help mypy understand the type after conversion
|
||||||
|
groups_list: list[Union[str, bool, None]] = cast(list[Union[str, bool, None]], groups)
|
||||||
|
|
||||||
|
triggers = []
|
||||||
|
if groups_list == [None]:
|
||||||
|
# A None value means we shouldn't update whatever this is based on LDAP values
|
||||||
|
pass
|
||||||
|
elif groups_list == []:
|
||||||
|
# Empty list means no triggers should be created
|
||||||
|
pass
|
||||||
|
elif groups_list == [True]:
|
||||||
|
triggers.append({"name": "Always Allow", "trigger": {"always": {}}})
|
||||||
|
elif groups_list == [False]:
|
||||||
|
triggers.append(
|
||||||
|
{
|
||||||
|
"name": "Never Allow",
|
||||||
|
"trigger": {"never": {}},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
triggers.append({"name": "Match User Groups", "trigger": {"groups": {"has_or": groups_list}}})
|
||||||
|
return triggers
|
||||||
|
|
||||||
|
|
||||||
|
def process_sso_user_list(
|
||||||
|
users: Union[str, bool, Pattern[str], list[Union[str, bool, Pattern[str]]]], email_attr: str = 'email', username_attr: str = 'username'
|
||||||
|
) -> dict[str, Union[str, dict[str, dict[str, Union[str, list[str]]]]]]:
|
||||||
|
"""Process SSO user list and return a single consolidated trigger instead of multiple separate ones."""
|
||||||
|
if not isinstance(users, list):
|
||||||
|
users = [users]
|
||||||
|
|
||||||
|
# Type cast to help mypy understand the type after conversion
|
||||||
|
user_list: list[Union[str, bool, Pattern[str]]] = cast(list[Union[str, bool, Pattern[str]]], users)
|
||||||
|
|
||||||
|
if user_list == ["false"] or user_list == [False]:
|
||||||
|
return {"name": "Never Allow", "trigger": {"never": {}}}
|
||||||
|
elif user_list == ["true"] or user_list == [True]:
|
||||||
|
return {"name": "Always Allow", "trigger": {"always": {}}}
|
||||||
|
else:
|
||||||
|
# Group users by type
|
||||||
|
emails = []
|
||||||
|
usernames = []
|
||||||
|
regexes_username = []
|
||||||
|
regexes_email = []
|
||||||
|
|
||||||
|
for user_or_email in user_list:
|
||||||
|
if isinstance(user_or_email, re.Pattern):
|
||||||
|
pattern_str = pattern_to_slash_format(user_or_email)
|
||||||
|
regexes_username.append(pattern_str)
|
||||||
|
regexes_email.append(pattern_str)
|
||||||
|
elif isinstance(user_or_email, str):
|
||||||
|
if email_regex.match(user_or_email):
|
||||||
|
emails.append(user_or_email)
|
||||||
|
else:
|
||||||
|
usernames.append(user_or_email)
|
||||||
|
else:
|
||||||
|
# Convert other objects to string and treat as both
|
||||||
|
str_val = str(user_or_email)
|
||||||
|
usernames.append(str_val)
|
||||||
|
emails.append(str_val)
|
||||||
|
|
||||||
|
# Build consolidated trigger
|
||||||
|
attributes = {"join_condition": "or"}
|
||||||
|
|
||||||
|
if emails:
|
||||||
|
if len(emails) == 1:
|
||||||
|
attributes[email_attr] = {"equals": emails[0]}
|
||||||
|
else:
|
||||||
|
attributes[email_attr] = {"in": emails}
|
||||||
|
|
||||||
|
if usernames:
|
||||||
|
if len(usernames) == 1:
|
||||||
|
attributes[username_attr] = {"equals": usernames[0]}
|
||||||
|
else:
|
||||||
|
attributes[username_attr] = {"in": usernames}
|
||||||
|
|
||||||
|
# For regex patterns, we need to create separate matches conditions since there's no matches_or
|
||||||
|
for i, pattern in enumerate(regexes_username):
|
||||||
|
pattern_key = f"{username_attr}_pattern_{i}" if len(regexes_username) > 1 else username_attr
|
||||||
|
if pattern_key not in attributes:
|
||||||
|
attributes[pattern_key] = {}
|
||||||
|
attributes[pattern_key]["matches"] = pattern
|
||||||
|
|
||||||
|
for i, pattern in enumerate(regexes_email):
|
||||||
|
pattern_key = f"{email_attr}_pattern_{i}" if len(regexes_email) > 1 else email_attr
|
||||||
|
if pattern_key not in attributes:
|
||||||
|
attributes[pattern_key] = {}
|
||||||
|
attributes[pattern_key]["matches"] = pattern
|
||||||
|
|
||||||
|
# Create a deterministic, concise name based on trigger types and counts
|
||||||
|
name_parts = []
|
||||||
|
if emails:
|
||||||
|
name_parts.append(f"E:{len(emails)}")
|
||||||
|
if usernames:
|
||||||
|
name_parts.append(f"U:{len(usernames)}")
|
||||||
|
if regexes_username:
|
||||||
|
name_parts.append(f"UP:{len(regexes_username)}")
|
||||||
|
if regexes_email:
|
||||||
|
name_parts.append(f"EP:{len(regexes_email)}")
|
||||||
|
|
||||||
|
name = " ".join(name_parts) if name_parts else "Mixed Rules"
|
||||||
|
|
||||||
|
return {"name": name, "trigger": {"attributes": attributes}}
|
||||||
|
|
||||||
|
|
||||||
|
def team_map_to_gateway_format(team_map, start_order=1, email_attr: str = 'email', username_attr: str = 'username', auth_type: Literal['sso', 'ldap'] = 'sso'):
|
||||||
|
"""Convert AWX team mapping to Gateway authenticator format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
team_map: The SOCIAL_AUTH_*_TEAM_MAP setting value
|
||||||
|
start_order: Starting order value for the mappers
|
||||||
|
email_attr: The attribute representing the email
|
||||||
|
username_attr: The attribute representing the username
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (List of Gateway-compatible team mappers, next_order)
|
||||||
|
"""
|
||||||
|
if team_map is None:
|
||||||
|
return [], start_order
|
||||||
|
|
||||||
|
result = []
|
||||||
|
order = start_order
|
||||||
|
|
||||||
|
for team_name in team_map.keys():
|
||||||
|
team = team_map[team_name]
|
||||||
|
# TODO: Confirm that if we have None with remove we still won't remove
|
||||||
|
if team['users'] is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get the organization name
|
||||||
|
organization_name = team.get('organization', 'Unknown')
|
||||||
|
|
||||||
|
# Check for remove flag
|
||||||
|
revoke = team.get('remove', False)
|
||||||
|
|
||||||
|
if auth_type == 'ldap':
|
||||||
|
triggers = process_ldap_user_list(team['users'])
|
||||||
|
for trigger in triggers:
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": build_truncated_name(organization_name, team_name, trigger['name']),
|
||||||
|
"map_type": "team",
|
||||||
|
"order": order,
|
||||||
|
"authenticator": -1, # Will be updated when creating the mapper
|
||||||
|
"triggers": trigger['trigger'],
|
||||||
|
"organization": organization_name,
|
||||||
|
"team": team_name,
|
||||||
|
"role": "Team Member", # Gateway team member role
|
||||||
|
"revoke": revoke,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
order += 1
|
||||||
|
|
||||||
|
if auth_type == 'sso':
|
||||||
|
trigger = process_sso_user_list(team['users'], email_attr=email_attr, username_attr=username_attr)
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": build_truncated_name(organization_name, team_name, trigger['name']),
|
||||||
|
"map_type": "team",
|
||||||
|
"order": order,
|
||||||
|
"authenticator": -1, # Will be updated when creating the mapper
|
||||||
|
"triggers": trigger['trigger'],
|
||||||
|
"organization": organization_name,
|
||||||
|
"team": team_name,
|
||||||
|
"role": "Team Member", # Gateway team member role
|
||||||
|
"revoke": revoke,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
order += 1
|
||||||
|
|
||||||
|
return result, order
|
||||||
|
|
||||||
|
|
||||||
|
def org_map_to_gateway_format(org_map, start_order=1, email_attr: str = 'email', username_attr: str = 'username', auth_type: Literal['sso', 'ldap'] = 'sso'):
|
||||||
|
"""Convert AWX organization mapping to Gateway authenticator format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
org_map: The SOCIAL_AUTH_*_ORGANIZATION_MAP setting value
|
||||||
|
start_order: Starting order value for the mappers
|
||||||
|
email_attr: The attribute representing the email
|
||||||
|
username_attr: The attribute representing the username
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (List of Gateway-compatible organization mappers, next_order)
|
||||||
|
"""
|
||||||
|
if org_map is None:
|
||||||
|
return [], start_order
|
||||||
|
|
||||||
|
result = []
|
||||||
|
order = start_order
|
||||||
|
|
||||||
|
for organization_name in org_map.keys():
|
||||||
|
organization = org_map[organization_name]
|
||||||
|
for user_type in ['admins', 'users']:
|
||||||
|
if organization.get(user_type, None) is None:
|
||||||
|
# TODO: Confirm that if we have None with remove we still won't remove
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get the permission type
|
||||||
|
permission_type = user_type.title()
|
||||||
|
|
||||||
|
# Map AWX admin/users to appropriate Gateway organization roles
|
||||||
|
role = "Organization Admin" if user_type == "admins" else "Organization Member"
|
||||||
|
|
||||||
|
# Check for remove flags
|
||||||
|
revoke = False
|
||||||
|
if organization.get(f"remove_{user_type}"):
|
||||||
|
revoke = True
|
||||||
|
|
||||||
|
if auth_type == 'ldap':
|
||||||
|
triggers = process_ldap_user_list(organization[user_type])
|
||||||
|
for trigger in triggers:
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": build_truncated_name(organization_name, permission_type, trigger['name']),
|
||||||
|
"map_type": "organization",
|
||||||
|
"order": order,
|
||||||
|
"authenticator": -1, # Will be updated when creating the mapper
|
||||||
|
"triggers": trigger['trigger'],
|
||||||
|
"organization": organization_name,
|
||||||
|
"team": None, # Organization-level mapping, not team-specific
|
||||||
|
"role": role,
|
||||||
|
"revoke": revoke,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
order += 1
|
||||||
|
|
||||||
|
if auth_type == 'sso':
|
||||||
|
trigger = process_sso_user_list(organization[user_type], email_attr=email_attr, username_attr=username_attr)
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": build_truncated_name(organization_name, permission_type, trigger['name']),
|
||||||
|
"map_type": "organization",
|
||||||
|
"order": order,
|
||||||
|
"authenticator": -1, # Will be updated when creating the mapper
|
||||||
|
"triggers": trigger['trigger'],
|
||||||
|
"organization": organization_name,
|
||||||
|
"team": None, # Organization-level mapping, not team-specific
|
||||||
|
"role": role,
|
||||||
|
"revoke": revoke,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
order += 1
|
||||||
|
|
||||||
|
return result, order
|
||||||
|
|
||||||
|
|
||||||
|
def role_map_to_gateway_format(role_map, start_order=1):
|
||||||
|
"""Convert AWX role mapping to Gateway authenticator format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
role_map: An LDAP or SAML role mapping
|
||||||
|
start_order: Starting order value for the mappers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (List of Gateway-compatible organization mappers, next_order)
|
||||||
|
"""
|
||||||
|
if role_map is None:
|
||||||
|
return [], start_order
|
||||||
|
|
||||||
|
result = []
|
||||||
|
order = start_order
|
||||||
|
|
||||||
|
for flag in role_map:
|
||||||
|
groups = role_map[flag]
|
||||||
|
if type(groups) is str:
|
||||||
|
groups = [groups]
|
||||||
|
|
||||||
|
if flag == 'is_superuser':
|
||||||
|
# Gateway has a special map_type for superusers
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": f"{flag} - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": flag,
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": groups,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": order,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
elif flag == 'is_system_auditor':
|
||||||
|
# roles other than superuser must be represented as a generic role mapper
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": f"{flag} - role",
|
||||||
|
"authenticator": -1,
|
||||||
|
"revoke": True,
|
||||||
|
"map_type": "role",
|
||||||
|
"role": "Platform Auditor",
|
||||||
|
"team": None,
|
||||||
|
"organization": None,
|
||||||
|
"triggers": {
|
||||||
|
"groups": {
|
||||||
|
"has_or": groups,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"order": order,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
order += 1
|
||||||
|
|
||||||
|
return result, order
|
||||||
@ -249,7 +249,7 @@ class Licenser(object):
|
|||||||
'GET',
|
'GET',
|
||||||
host,
|
host,
|
||||||
verify=True,
|
verify=True,
|
||||||
timeout=(5, 20),
|
timeout=(31, 31),
|
||||||
)
|
)
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
logger.warning("Failed to connect to console.redhat.com using Service Account credentials. Falling back to basic auth.")
|
logger.warning("Failed to connect to console.redhat.com using Service Account credentials. Falling back to basic auth.")
|
||||||
@ -258,7 +258,7 @@ class Licenser(object):
|
|||||||
host,
|
host,
|
||||||
auth=(client_id, client_secret),
|
auth=(client_id, client_secret),
|
||||||
verify=True,
|
verify=True,
|
||||||
timeout=(5, 20),
|
timeout=(31, 31),
|
||||||
)
|
)
|
||||||
subs.raise_for_status()
|
subs.raise_for_status()
|
||||||
subs_formatted = []
|
subs_formatted = []
|
||||||
|
|||||||
@ -38,7 +38,7 @@ class ActionModule(ActionBase):
|
|||||||
|
|
||||||
def _obtain_auth_token(self, oidc_endpoint, client_id, client_secret):
|
def _obtain_auth_token(self, oidc_endpoint, client_id, client_secret):
|
||||||
if oidc_endpoint.endswith('/'):
|
if oidc_endpoint.endswith('/'):
|
||||||
oidc_endpoint = oidc_endpoint.rstrip('/')
|
oidc_endpoint = oidc_endpoint[:-1]
|
||||||
main_url = oidc_endpoint + '/.well-known/openid-configuration'
|
main_url = oidc_endpoint + '/.well-known/openid-configuration'
|
||||||
response = requests.get(url=main_url, headers={'Accept': 'application/json'})
|
response = requests.get(url=main_url, headers={'Accept': 'application/json'})
|
||||||
data = {}
|
data = {}
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
||||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||||
|
from ansible_base.rbac.models import RoleDefinition
|
||||||
|
from ansible_base.resource_registry.shared_types import RoleDefinitionType
|
||||||
|
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
|
|
||||||
@ -19,4 +21,8 @@ RESOURCE_LIST = (
|
|||||||
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
||||||
parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
|
parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
|
||||||
),
|
),
|
||||||
|
ResourceConfig(
|
||||||
|
RoleDefinition,
|
||||||
|
shared_resource=SharedResource(serializer=RoleDefinitionType, is_provider=False),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|||||||
@ -83,7 +83,7 @@ USE_I18N = True
|
|||||||
USE_TZ = True
|
USE_TZ = True
|
||||||
|
|
||||||
STATICFILES_DIRS = [
|
STATICFILES_DIRS = [
|
||||||
os.path.join(BASE_DIR, 'ui', 'build'),
|
os.path.join(BASE_DIR, 'ui', 'build', 'static'),
|
||||||
os.path.join(BASE_DIR, 'static'),
|
os.path.join(BASE_DIR, 'static'),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -540,7 +540,7 @@ AWX_AUTO_DEPROVISION_INSTANCES = False
|
|||||||
|
|
||||||
|
|
||||||
# If True, allow users to be assigned to roles that were created via JWT
|
# If True, allow users to be assigned to roles that were created via JWT
|
||||||
ALLOW_LOCAL_ASSIGNING_JWT_ROLES = False
|
ALLOW_LOCAL_ASSIGNING_JWT_ROLES = True
|
||||||
|
|
||||||
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
|
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
|
||||||
# Note: This setting may be overridden by database settings.
|
# Note: This setting may be overridden by database settings.
|
||||||
@ -599,6 +599,11 @@ VMWARE_EXCLUDE_EMPTY_GROUPS = True
|
|||||||
|
|
||||||
VMWARE_VALIDATE_CERTS = False
|
VMWARE_VALIDATE_CERTS = False
|
||||||
|
|
||||||
|
# -----------------
|
||||||
|
# -- VMware ESXi --
|
||||||
|
# -----------------
|
||||||
|
VMWARE_ESXI_EXCLUDE_EMPTY_GROUPS = True
|
||||||
|
|
||||||
# ---------------------------
|
# ---------------------------
|
||||||
# -- Google Compute Engine --
|
# -- Google Compute Engine --
|
||||||
# ---------------------------
|
# ---------------------------
|
||||||
@ -711,7 +716,7 @@ DISABLE_LOCAL_AUTH = False
|
|||||||
TOWER_URL_BASE = "https://platformhost"
|
TOWER_URL_BASE = "https://platformhost"
|
||||||
|
|
||||||
INSIGHTS_URL_BASE = "https://example.org"
|
INSIGHTS_URL_BASE = "https://example.org"
|
||||||
INSIGHTS_OIDC_ENDPOINT = "https://sso.example.org"
|
INSIGHTS_OIDC_ENDPOINT = "https://sso.example.org/"
|
||||||
INSIGHTS_AGENT_MIME = 'application/example'
|
INSIGHTS_AGENT_MIME = 'application/example'
|
||||||
# See https://github.com/ansible/awx-facts-playbooks
|
# See https://github.com/ansible/awx-facts-playbooks
|
||||||
INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id'
|
INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id'
|
||||||
@ -1069,6 +1074,7 @@ ANSIBLE_BASE_CACHE_PARENT_PERMISSIONS = True
|
|||||||
# Currently features are enabled to keep compatibility with old system, except custom roles
|
# Currently features are enabled to keep compatibility with old system, except custom roles
|
||||||
ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN = False
|
ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN = False
|
||||||
# ANSIBLE_BASE_ALLOW_CUSTOM_ROLES = True
|
# ANSIBLE_BASE_ALLOW_CUSTOM_ROLES = True
|
||||||
|
ANSIBLE_BASE_ALLOW_TEAM_PARENTS = False
|
||||||
ANSIBLE_BASE_ALLOW_CUSTOM_TEAM_ROLES = False
|
ANSIBLE_BASE_ALLOW_CUSTOM_TEAM_ROLES = False
|
||||||
ANSIBLE_BASE_ALLOW_SINGLETON_USER_ROLES = True
|
ANSIBLE_BASE_ALLOW_SINGLETON_USER_ROLES = True
|
||||||
ANSIBLE_BASE_ALLOW_SINGLETON_TEAM_ROLES = False # System auditor has always been restricted to users
|
ANSIBLE_BASE_ALLOW_SINGLETON_TEAM_ROLES = False # System auditor has always been restricted to users
|
||||||
@ -1089,6 +1095,9 @@ INDIRECT_HOST_QUERY_FALLBACK_GIVEUP_DAYS = 3
|
|||||||
# Older records will be cleaned up
|
# Older records will be cleaned up
|
||||||
INDIRECT_HOST_AUDIT_RECORD_MAX_AGE_DAYS = 7
|
INDIRECT_HOST_AUDIT_RECORD_MAX_AGE_DAYS = 7
|
||||||
|
|
||||||
|
# setting for Policy as Code feature
|
||||||
|
FEATURE_POLICY_AS_CODE_ENABLED = False
|
||||||
|
|
||||||
OPA_HOST = '' # The hostname used to connect to the OPA server. If empty, policy enforcement will be disabled.
|
OPA_HOST = '' # The hostname used to connect to the OPA server. If empty, policy enforcement will be disabled.
|
||||||
OPA_PORT = 8181 # The port used to connect to the OPA server. Defaults to 8181.
|
OPA_PORT = 8181 # The port used to connect to the OPA server. Defaults to 8181.
|
||||||
OPA_SSL = False # Enable or disable the use of SSL to connect to the OPA server. Defaults to false.
|
OPA_SSL = False # Enable or disable the use of SSL to connect to the OPA server. Defaults to false.
|
||||||
|
|||||||
@ -73,5 +73,4 @@ AWX_DISABLE_TASK_MANAGERS = False
|
|||||||
def set_dev_flags(settings):
|
def set_dev_flags(settings):
|
||||||
defaults_flags = settings.get("FLAGS", {})
|
defaults_flags = settings.get("FLAGS", {})
|
||||||
defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
||||||
defaults_flags['FEATURE_DISPATCHERD_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
|
||||||
return {'FLAGS': defaults_flags}
|
return {'FLAGS': defaults_flags}
|
||||||
|
|||||||
@ -23,13 +23,8 @@ ALLOWED_HOSTS = []
|
|||||||
# only used for deprecated fields and management commands for them
|
# only used for deprecated fields and management commands for them
|
||||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||||
|
|
||||||
# Switch to a writable location for the dispatcher sockfile location
|
|
||||||
DISPATCHERD_DEBUGGING_SOCKFILE = os.path.realpath('/var/run/tower/dispatcherd.sock')
|
|
||||||
|
|
||||||
# Very important that this is editable (not read_only) in the API
|
# Very important that this is editable (not read_only) in the API
|
||||||
AWX_ISOLATION_SHOW_PATHS = [
|
AWX_ISOLATION_SHOW_PATHS = [
|
||||||
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
|
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
|
||||||
'/usr/share/pki:/usr/share/pki:O',
|
'/usr/share/pki:/usr/share/pki:O',
|
||||||
]
|
]
|
||||||
|
|
||||||
del os
|
|
||||||
|
|||||||
469
awx/sso/backends.py
Normal file
469
awx/sso/backends.py
Normal file
@ -0,0 +1,469 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
# Python
|
||||||
|
from collections import OrderedDict
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import ldap
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.dispatch import receiver
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
from django.conf import settings as django_settings
|
||||||
|
from django.core.signals import setting_changed
|
||||||
|
from django.utils.encoding import force_str
|
||||||
|
from django.http import HttpResponse
|
||||||
|
|
||||||
|
# django-auth-ldap
|
||||||
|
from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings
|
||||||
|
from django_auth_ldap.backend import LDAPBackend as BaseLDAPBackend
|
||||||
|
from django_auth_ldap.backend import populate_user
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
|
||||||
|
# radiusauth
|
||||||
|
from radiusauth.backends import RADIUSBackend as BaseRADIUSBackend
|
||||||
|
|
||||||
|
# tacacs+ auth
|
||||||
|
import tacacs_plus
|
||||||
|
|
||||||
|
# social
|
||||||
|
from social_core.backends.saml import OID_USERID
|
||||||
|
from social_core.backends.saml import SAMLAuth as BaseSAMLAuth
|
||||||
|
from social_core.backends.saml import SAMLIdentityProvider as BaseSAMLIdentityProvider
|
||||||
|
|
||||||
|
# Ansible Tower
|
||||||
|
from awx.sso.models import UserEnterpriseAuth
|
||||||
|
from awx.sso.common import create_org_and_teams, reconcile_users_org_team_mappings
|
||||||
|
|
||||||
|
logger = logging.getLogger('awx.sso.backends')
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPSettings(BaseLDAPSettings):
|
||||||
|
defaults = dict(list(BaseLDAPSettings.defaults.items()) + list({'ORGANIZATION_MAP': {}, 'TEAM_MAP': {}, 'GROUP_TYPE_PARAMS': {}}.items()))
|
||||||
|
|
||||||
|
def __init__(self, prefix='AUTH_LDAP_', defaults={}):
|
||||||
|
super(LDAPSettings, self).__init__(prefix, defaults)
|
||||||
|
|
||||||
|
# If a DB-backed setting is specified that wipes out the
|
||||||
|
# OPT_NETWORK_TIMEOUT, fall back to a sane default
|
||||||
|
if ldap.OPT_NETWORK_TIMEOUT not in getattr(self, 'CONNECTION_OPTIONS', {}):
|
||||||
|
options = getattr(self, 'CONNECTION_OPTIONS', {})
|
||||||
|
options[ldap.OPT_NETWORK_TIMEOUT] = 30
|
||||||
|
self.CONNECTION_OPTIONS = options
|
||||||
|
|
||||||
|
# when specifying `.set_option()` calls for TLS in python-ldap, the
|
||||||
|
# *order* in which you invoke them *matters*, particularly in Python3,
|
||||||
|
# where dictionary insertion order is persisted
|
||||||
|
#
|
||||||
|
# specifically, it is *critical* that `ldap.OPT_X_TLS_NEWCTX` be set *last*
|
||||||
|
# this manual sorting puts `OPT_X_TLS_NEWCTX` *after* other TLS-related
|
||||||
|
# options
|
||||||
|
#
|
||||||
|
# see: https://github.com/python-ldap/python-ldap/issues/55
|
||||||
|
newctx_option = self.CONNECTION_OPTIONS.pop(ldap.OPT_X_TLS_NEWCTX, None)
|
||||||
|
self.CONNECTION_OPTIONS = OrderedDict(self.CONNECTION_OPTIONS)
|
||||||
|
if newctx_option is not None:
|
||||||
|
self.CONNECTION_OPTIONS[ldap.OPT_X_TLS_NEWCTX] = newctx_option
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPBackend(BaseLDAPBackend):
|
||||||
|
"""
|
||||||
|
Custom LDAP backend for AWX.
|
||||||
|
"""
|
||||||
|
|
||||||
|
settings_prefix = 'AUTH_LDAP_'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self._dispatch_uid = uuid.uuid4()
|
||||||
|
super(LDAPBackend, self).__init__(*args, **kwargs)
|
||||||
|
setting_changed.connect(self._on_setting_changed, dispatch_uid=self._dispatch_uid)
|
||||||
|
|
||||||
|
def _on_setting_changed(self, sender, **kwargs):
|
||||||
|
# If any AUTH_LDAP_* setting changes, force settings to be reloaded for
|
||||||
|
# this backend instance.
|
||||||
|
if kwargs.get('setting', '').startswith(self.settings_prefix):
|
||||||
|
self._settings = None
|
||||||
|
|
||||||
|
def _get_settings(self):
|
||||||
|
if self._settings is None:
|
||||||
|
self._settings = LDAPSettings(self.settings_prefix)
|
||||||
|
return self._settings
|
||||||
|
|
||||||
|
def _set_settings(self, settings):
|
||||||
|
self._settings = settings
|
||||||
|
|
||||||
|
settings = property(_get_settings, _set_settings)
|
||||||
|
|
||||||
|
def authenticate(self, request, username, password):
|
||||||
|
if self.settings.START_TLS and ldap.OPT_X_TLS_REQUIRE_CERT in self.settings.CONNECTION_OPTIONS:
|
||||||
|
# with python-ldap, if you want to set connection-specific TLS
|
||||||
|
# parameters, you must also specify OPT_X_TLS_NEWCTX = 0
|
||||||
|
# see: https://stackoverflow.com/a/29722445
|
||||||
|
# see: https://stackoverflow.com/a/38136255
|
||||||
|
self.settings.CONNECTION_OPTIONS[ldap.OPT_X_TLS_NEWCTX] = 0
|
||||||
|
|
||||||
|
if not self.settings.SERVER_URI:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
user = User.objects.get(username=username)
|
||||||
|
if user and (not user.profile or not user.profile.ldap_dn):
|
||||||
|
return None
|
||||||
|
except User.DoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
for setting_name, type_ in [('GROUP_SEARCH', 'LDAPSearch'), ('GROUP_TYPE', 'LDAPGroupType')]:
|
||||||
|
if getattr(self.settings, setting_name) is None:
|
||||||
|
raise ImproperlyConfigured("{} must be an {} instance.".format(setting_name, type_))
|
||||||
|
ldap_user = super(LDAPBackend, self).authenticate(request, username, password)
|
||||||
|
# If we have an LDAP user and that user we found has an ldap_user internal object and that object has a bound connection
|
||||||
|
# Then we can try and force an unbind to close the sticky connection
|
||||||
|
if ldap_user and ldap_user.ldap_user and ldap_user.ldap_user._connection_bound:
|
||||||
|
logger.debug("Forcing LDAP connection to close")
|
||||||
|
try:
|
||||||
|
ldap_user.ldap_user._connection.unbind_s()
|
||||||
|
ldap_user.ldap_user._connection_bound = False
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Got unexpected LDAP exception when forcing LDAP disconnect for user {ldap_user}, login will still proceed")
|
||||||
|
return ldap_user
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Encountered an error authenticating to LDAP")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
if not self.settings.SERVER_URI:
|
||||||
|
return None
|
||||||
|
return super(LDAPBackend, self).get_user(user_id)
|
||||||
|
|
||||||
|
# Disable any LDAP based authorization / permissions checking.
|
||||||
|
|
||||||
|
def has_perm(self, user, perm, obj=None):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_module_perms(self, user, app_label):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_all_permissions(self, user, obj=None):
|
||||||
|
return set()
|
||||||
|
|
||||||
|
def get_group_permissions(self, user, obj=None):
|
||||||
|
return set()
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPBackend1(LDAPBackend):
|
||||||
|
settings_prefix = 'AUTH_LDAP_1_'
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPBackend2(LDAPBackend):
|
||||||
|
settings_prefix = 'AUTH_LDAP_2_'
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPBackend3(LDAPBackend):
|
||||||
|
settings_prefix = 'AUTH_LDAP_3_'
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPBackend4(LDAPBackend):
|
||||||
|
settings_prefix = 'AUTH_LDAP_4_'
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPBackend5(LDAPBackend):
|
||||||
|
settings_prefix = 'AUTH_LDAP_5_'
|
||||||
|
|
||||||
|
|
||||||
|
def _decorate_enterprise_user(user, provider):
|
||||||
|
user.set_unusable_password()
|
||||||
|
user.save()
|
||||||
|
enterprise_auth, _ = UserEnterpriseAuth.objects.get_or_create(user=user, provider=provider)
|
||||||
|
return enterprise_auth
|
||||||
|
|
||||||
|
|
||||||
|
def _get_or_set_enterprise_user(username, password, provider):
|
||||||
|
created = False
|
||||||
|
try:
|
||||||
|
user = User.objects.prefetch_related('enterprise_auth').get(username=username)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
user = User(username=username)
|
||||||
|
enterprise_auth = _decorate_enterprise_user(user, provider)
|
||||||
|
logger.debug("Created enterprise user %s via %s backend." % (username, enterprise_auth.get_provider_display()))
|
||||||
|
created = True
|
||||||
|
if created or user.is_in_enterprise_category(provider):
|
||||||
|
return user
|
||||||
|
logger.warning("Enterprise user %s already defined in Tower." % username)
|
||||||
|
|
||||||
|
|
||||||
|
class RADIUSBackend(BaseRADIUSBackend):
|
||||||
|
"""
|
||||||
|
Custom Radius backend to verify license status
|
||||||
|
"""
|
||||||
|
|
||||||
|
def authenticate(self, request, username, password):
|
||||||
|
if not django_settings.RADIUS_SERVER:
|
||||||
|
return None
|
||||||
|
return super(RADIUSBackend, self).authenticate(request, username, password)
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
if not django_settings.RADIUS_SERVER:
|
||||||
|
return None
|
||||||
|
user = super(RADIUSBackend, self).get_user(user_id)
|
||||||
|
if not user.has_usable_password():
|
||||||
|
return user
|
||||||
|
|
||||||
|
def get_django_user(self, username, password=None, groups=[], is_staff=False, is_superuser=False):
|
||||||
|
return _get_or_set_enterprise_user(force_str(username), force_str(password), 'radius')
|
||||||
|
|
||||||
|
|
||||||
|
class TACACSPlusBackend(object):
|
||||||
|
"""
|
||||||
|
Custom TACACS+ auth backend for AWX
|
||||||
|
"""
|
||||||
|
|
||||||
|
def authenticate(self, request, username, password):
|
||||||
|
if not django_settings.TACACSPLUS_HOST:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
# Upstream TACACS+ client does not accept non-string, so convert if needed.
|
||||||
|
tacacs_client = tacacs_plus.TACACSClient(
|
||||||
|
django_settings.TACACSPLUS_HOST,
|
||||||
|
django_settings.TACACSPLUS_PORT,
|
||||||
|
django_settings.TACACSPLUS_SECRET,
|
||||||
|
timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT,
|
||||||
|
)
|
||||||
|
auth_kwargs = {'authen_type': tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL]}
|
||||||
|
if django_settings.TACACSPLUS_AUTH_PROTOCOL:
|
||||||
|
client_ip = self._get_client_ip(request)
|
||||||
|
if client_ip:
|
||||||
|
auth_kwargs['rem_addr'] = client_ip
|
||||||
|
auth = tacacs_client.authenticate(username, password, **auth_kwargs)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("TACACS+ Authentication Error: %s" % str(e))
|
||||||
|
return None
|
||||||
|
if auth.valid:
|
||||||
|
return _get_or_set_enterprise_user(username, password, 'tacacs+')
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
if not django_settings.TACACSPLUS_HOST:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return User.objects.get(pk=user_id)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_client_ip(self, request):
|
||||||
|
if not request or not hasattr(request, 'META'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||||
|
if x_forwarded_for:
|
||||||
|
ip = x_forwarded_for.split(',')[0]
|
||||||
|
else:
|
||||||
|
ip = request.META.get('REMOTE_ADDR')
|
||||||
|
return ip
|
||||||
|
|
||||||
|
|
||||||
|
class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||||
|
"""
|
||||||
|
Custom Identity Provider to make attributes to what we expect.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_user_permanent_id(self, attributes):
|
||||||
|
uid = attributes[self.conf.get('attr_user_permanent_id', OID_USERID)]
|
||||||
|
if isinstance(uid, str):
|
||||||
|
return uid
|
||||||
|
return uid[0]
|
||||||
|
|
||||||
|
def get_attr(self, attributes, conf_key, default_attribute):
|
||||||
|
"""
|
||||||
|
Get the attribute 'default_attribute' out of the attributes,
|
||||||
|
unless self.conf[conf_key] overrides the default by specifying
|
||||||
|
another attribute to use.
|
||||||
|
"""
|
||||||
|
key = self.conf.get(conf_key, default_attribute)
|
||||||
|
value = attributes[key] if key in attributes else None
|
||||||
|
# In certain implementations (like https://pagure.io/ipsilon) this value is a string, not a list
|
||||||
|
if isinstance(value, (list, tuple)):
|
||||||
|
value = value[0]
|
||||||
|
if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None:
|
||||||
|
logger.warning(
|
||||||
|
"Could not map user detail '%s' from SAML attribute '%s'; update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
|
||||||
|
conf_key[5:],
|
||||||
|
key,
|
||||||
|
self.name,
|
||||||
|
conf_key,
|
||||||
|
)
|
||||||
|
return str(value) if value is not None else value
|
||||||
|
|
||||||
|
|
||||||
|
class SAMLAuth(BaseSAMLAuth):
|
||||||
|
"""
|
||||||
|
Custom SAMLAuth backend to verify license status
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_idp(self, idp_name):
|
||||||
|
idp_config = self.setting('ENABLED_IDPS')[idp_name]
|
||||||
|
return TowerSAMLIdentityProvider(idp_name, **idp_config)
|
||||||
|
|
||||||
|
def authenticate(self, request, *args, **kwargs):
|
||||||
|
if not all(
|
||||||
|
[
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
|
||||||
|
]
|
||||||
|
):
|
||||||
|
return None
|
||||||
|
pipeline_result = super(SAMLAuth, self).authenticate(request, *args, **kwargs)
|
||||||
|
|
||||||
|
if isinstance(pipeline_result, HttpResponse):
|
||||||
|
return pipeline_result
|
||||||
|
else:
|
||||||
|
user = pipeline_result
|
||||||
|
|
||||||
|
# Comes from https://github.com/omab/python-social-auth/blob/v0.2.21/social/backends/base.py#L91
|
||||||
|
if getattr(user, 'is_new', False):
|
||||||
|
enterprise_auth = _decorate_enterprise_user(user, 'saml')
|
||||||
|
logger.debug("Created enterprise user %s from %s backend." % (user.username, enterprise_auth.get_provider_display()))
|
||||||
|
elif user and not user.is_in_enterprise_category('saml'):
|
||||||
|
return None
|
||||||
|
if user:
|
||||||
|
logger.debug("Enterprise user %s already created in Tower." % user.username)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
if not all(
|
||||||
|
[
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
|
||||||
|
django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
|
||||||
|
]
|
||||||
|
):
|
||||||
|
return None
|
||||||
|
return super(SAMLAuth, self).get_user(user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def _update_m2m_from_groups(ldap_user, opts, remove=True):
|
||||||
|
"""
|
||||||
|
Hepler function to evaluate the LDAP team/org options to determine if LDAP user should
|
||||||
|
be a member of the team/org based on their ldap group dns.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True - User should be added
|
||||||
|
False - User should be removed
|
||||||
|
None - Users membership should not be changed
|
||||||
|
"""
|
||||||
|
if opts is None:
|
||||||
|
return None
|
||||||
|
elif not opts:
|
||||||
|
pass
|
||||||
|
elif isinstance(opts, bool) and opts is True:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
if isinstance(opts, str):
|
||||||
|
opts = [opts]
|
||||||
|
# If any of the users groups matches any of the list options
|
||||||
|
for group_dn in opts:
|
||||||
|
if not isinstance(group_dn, str):
|
||||||
|
continue
|
||||||
|
if ldap_user._get_groups().is_member_of(group_dn):
|
||||||
|
return True
|
||||||
|
if remove:
|
||||||
|
return False
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(populate_user, dispatch_uid='populate-ldap-user')
|
||||||
|
def on_populate_user(sender, **kwargs):
|
||||||
|
"""
|
||||||
|
Handle signal from LDAP backend to populate the user object. Update user
|
||||||
|
organization/team memberships according to their LDAP groups.
|
||||||
|
"""
|
||||||
|
user = kwargs['user']
|
||||||
|
ldap_user = kwargs['ldap_user']
|
||||||
|
backend = ldap_user.backend
|
||||||
|
|
||||||
|
# Boolean to determine if we should force an user update
|
||||||
|
# to avoid duplicate SQL update statements
|
||||||
|
force_user_update = False
|
||||||
|
|
||||||
|
# Prefetch user's groups to prevent LDAP queries for each org/team when
|
||||||
|
# checking membership.
|
||||||
|
ldap_user._get_groups().get_group_dns()
|
||||||
|
|
||||||
|
# If the LDAP user has a first or last name > $maxlen chars, truncate it
|
||||||
|
for field in ('first_name', 'last_name'):
|
||||||
|
max_len = User._meta.get_field(field).max_length
|
||||||
|
field_len = len(getattr(user, field))
|
||||||
|
if field_len > max_len:
|
||||||
|
setattr(user, field, getattr(user, field)[:max_len])
|
||||||
|
force_user_update = True
|
||||||
|
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
|
||||||
|
|
||||||
|
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||||
|
team_map_settings = getattr(backend.settings, 'TEAM_MAP', {})
|
||||||
|
orgs_list = list(org_map.keys())
|
||||||
|
team_map = {}
|
||||||
|
for team_name, team_opts in team_map_settings.items():
|
||||||
|
if not team_opts.get('organization', None):
|
||||||
|
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
|
||||||
|
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
|
||||||
|
continue
|
||||||
|
team_map[team_name] = team_opts['organization']
|
||||||
|
|
||||||
|
create_org_and_teams(orgs_list, team_map, 'LDAP')
|
||||||
|
|
||||||
|
# Compute in memory what the state is of the different LDAP orgs
|
||||||
|
org_roles_and_ldap_attributes = {'admin_role': 'admins', 'auditor_role': 'auditors', 'member_role': 'users'}
|
||||||
|
desired_org_states = {}
|
||||||
|
for org_name, org_opts in org_map.items():
|
||||||
|
remove = bool(org_opts.get('remove', True))
|
||||||
|
desired_org_states[org_name] = {}
|
||||||
|
for org_role_name in org_roles_and_ldap_attributes.keys():
|
||||||
|
ldap_name = org_roles_and_ldap_attributes[org_role_name]
|
||||||
|
opts = org_opts.get(ldap_name, None)
|
||||||
|
remove = bool(org_opts.get('remove_{}'.format(ldap_name), remove))
|
||||||
|
desired_org_states[org_name][org_role_name] = _update_m2m_from_groups(ldap_user, opts, remove)
|
||||||
|
|
||||||
|
# If everything returned None (because there was no configuration) we can remove this org from our map
|
||||||
|
# This will prevent us from loading the org in the next query
|
||||||
|
if all(desired_org_states[org_name][org_role_name] is None for org_role_name in org_roles_and_ldap_attributes.keys()):
|
||||||
|
del desired_org_states[org_name]
|
||||||
|
|
||||||
|
# Compute in memory what the state is of the different LDAP teams
|
||||||
|
desired_team_states = {}
|
||||||
|
for team_name, team_opts in team_map_settings.items():
|
||||||
|
if 'organization' not in team_opts:
|
||||||
|
continue
|
||||||
|
users_opts = team_opts.get('users', None)
|
||||||
|
remove = bool(team_opts.get('remove', True))
|
||||||
|
state = _update_m2m_from_groups(ldap_user, users_opts, remove)
|
||||||
|
if state is not None:
|
||||||
|
organization = team_opts['organization']
|
||||||
|
if organization not in desired_team_states:
|
||||||
|
desired_team_states[organization] = {}
|
||||||
|
desired_team_states[organization][team_name] = {'member_role': state}
|
||||||
|
|
||||||
|
# Check if user.profile is available, otherwise force user.save()
|
||||||
|
try:
|
||||||
|
_ = user.profile
|
||||||
|
except ValueError:
|
||||||
|
force_user_update = True
|
||||||
|
finally:
|
||||||
|
if force_user_update:
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
# Update user profile to store LDAP DN.
|
||||||
|
profile = user.profile
|
||||||
|
if profile.ldap_dn != ldap_user.dn:
|
||||||
|
profile.ldap_dn = ldap_user.dn
|
||||||
|
profile.save()
|
||||||
|
|
||||||
|
reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, 'LDAP')
|
||||||
83
awx/sso/middleware.py
Normal file
83
awx/sso/middleware.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
# Python
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.conf import settings
|
||||||
|
from django.utils.functional import LazyObject
|
||||||
|
from django.shortcuts import redirect
|
||||||
|
|
||||||
|
# Python Social Auth
|
||||||
|
from social_core.exceptions import SocialAuthBaseException
|
||||||
|
from social_core.utils import social_logger
|
||||||
|
from social_django import utils
|
||||||
|
from social_django.middleware import SocialAuthExceptionMiddleware
|
||||||
|
|
||||||
|
|
||||||
|
class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
|
||||||
|
def __call__(self, request):
|
||||||
|
return self.process_request(request)
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
if request.path.startswith('/sso'):
|
||||||
|
# See upgrade blocker note in requirements/README.md
|
||||||
|
utils.BACKENDS = settings.AUTHENTICATION_BACKENDS
|
||||||
|
token_key = request.COOKIES.get('token', '')
|
||||||
|
token_key = urllib.parse.quote(urllib.parse.unquote(token_key).strip('"'))
|
||||||
|
|
||||||
|
if not hasattr(request, 'successful_authenticator'):
|
||||||
|
request.successful_authenticator = None
|
||||||
|
|
||||||
|
if not request.path.startswith('/sso/') and 'migrations_notran' not in request.path:
|
||||||
|
if request.user and request.user.is_authenticated:
|
||||||
|
# The rest of the code base rely hevily on type/inheritance checks,
|
||||||
|
# LazyObject sent from Django auth middleware can be buggy if not
|
||||||
|
# converted back to its original object.
|
||||||
|
if isinstance(request.user, LazyObject) and request.user._wrapped:
|
||||||
|
request.user = request.user._wrapped
|
||||||
|
request.session.pop('social_auth_error', None)
|
||||||
|
request.session.pop('social_auth_last_backend', None)
|
||||||
|
return self.get_response(request)
|
||||||
|
|
||||||
|
def process_view(self, request, callback, callback_args, callback_kwargs):
|
||||||
|
if request.path.startswith('/sso/login/'):
|
||||||
|
request.session['social_auth_last_backend'] = callback_kwargs['backend']
|
||||||
|
|
||||||
|
def process_exception(self, request, exception):
|
||||||
|
strategy = getattr(request, 'social_strategy', None)
|
||||||
|
if strategy is None or self.raise_exception(request, exception):
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(exception, SocialAuthBaseException) or request.path.startswith('/sso/'):
|
||||||
|
backend = getattr(request, 'backend', None)
|
||||||
|
backend_name = getattr(backend, 'name', 'unknown-backend')
|
||||||
|
|
||||||
|
message = self.get_message(request, exception)
|
||||||
|
if request.session.get('social_auth_last_backend') != backend_name:
|
||||||
|
backend_name = request.session.get('social_auth_last_backend')
|
||||||
|
message = request.GET.get('error_description', message)
|
||||||
|
|
||||||
|
full_backend_name = backend_name
|
||||||
|
try:
|
||||||
|
idp_name = strategy.request_data()['RelayState']
|
||||||
|
full_backend_name = '%s:%s' % (backend_name, idp_name)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
social_logger.error(message)
|
||||||
|
|
||||||
|
url = self.get_redirect_uri(request, exception)
|
||||||
|
request.session['social_auth_error'] = (full_backend_name, message)
|
||||||
|
return redirect(url)
|
||||||
|
|
||||||
|
def get_message(self, request, exception):
|
||||||
|
msg = str(exception)
|
||||||
|
if msg and msg[-1] not in '.?!':
|
||||||
|
msg = msg + '.'
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def get_redirect_uri(self, request, exception):
|
||||||
|
strategy = getattr(request, 'social_strategy', None)
|
||||||
|
return strategy.session_get('next', '') or strategy.setting('LOGIN_ERROR_URL')
|
||||||
150
awx/sso/tests/conftest.py
Normal file
150
awx/sso/tests/conftest.py
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
|
||||||
|
from awx.sso.backends import TACACSPlusBackend
|
||||||
|
from awx.sso.models import UserEnterpriseAuth
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def tacacsplus_backend():
|
||||||
|
return TACACSPlusBackend()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def existing_normal_user():
|
||||||
|
try:
|
||||||
|
user = User.objects.get(username="alice")
|
||||||
|
except User.DoesNotExist:
|
||||||
|
user = User(username="alice", password="password")
|
||||||
|
user.save()
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def existing_tacacsplus_user():
|
||||||
|
try:
|
||||||
|
user = User.objects.get(username="foo")
|
||||||
|
except User.DoesNotExist:
|
||||||
|
user = User(username="foo")
|
||||||
|
user.set_unusable_password()
|
||||||
|
user.save()
|
||||||
|
enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+')
|
||||||
|
enterprise_auth.save()
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_radius_config(settings):
|
||||||
|
settings.RADIUS_SERVER = '127.0.0.1'
|
||||||
|
settings.RADIUS_PORT = 1812
|
||||||
|
settings.RADIUS_SECRET = 'secret'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def basic_saml_config(settings):
|
||||||
|
settings.SAML_SECURITY_CONFIG = {
|
||||||
|
"wantNameId": True,
|
||||||
|
"signMetadata": False,
|
||||||
|
"digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
|
||||||
|
"nameIdEncrypted": False,
|
||||||
|
"signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256",
|
||||||
|
"authnRequestsSigned": False,
|
||||||
|
"logoutRequestSigned": False,
|
||||||
|
"wantNameIdEncrypted": False,
|
||||||
|
"logoutResponseSigned": False,
|
||||||
|
"wantAssertionsSigned": True,
|
||||||
|
"requestedAuthnContext": False,
|
||||||
|
"wantAssertionsEncrypted": False,
|
||||||
|
}
|
||||||
|
settings.SOCIAL_AUTH_SAML_ENABLED_IDPS = {
|
||||||
|
"example": {
|
||||||
|
"attr_email": "email",
|
||||||
|
"attr_first_name": "first_name",
|
||||||
|
"attr_last_name": "last_name",
|
||||||
|
"attr_user_permanent_id": "username",
|
||||||
|
"attr_username": "username",
|
||||||
|
"entity_id": "https://www.example.com/realms/sample",
|
||||||
|
"url": "https://www.example.com/realms/sample/protocol/saml",
|
||||||
|
"x509cert": "A" * 64 + "B" * 64 + "C" * 23,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
settings.SOCIAL_AUTH_SAML_TEAM_ATTR = {
|
||||||
|
"remove": False,
|
||||||
|
"saml_attr": "group_name",
|
||||||
|
"team_org_map": [
|
||||||
|
{"team": "internal:unix:domain:admins", "team_alias": "Administrators", "organization": "Default"},
|
||||||
|
{"team": "East Coast", "organization": "North America"},
|
||||||
|
{"team": "developers", "organization": "North America"},
|
||||||
|
{"team": "developers", "organization": "South America"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
settings.SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR = {
|
||||||
|
"is_superuser_role": ["wilma"],
|
||||||
|
"is_superuser_attr": "friends",
|
||||||
|
"is_superuser_value": ["barney", "fred"],
|
||||||
|
"remove_superusers": False,
|
||||||
|
"is_system_auditor_role": ["fred"],
|
||||||
|
"is_system_auditor_attr": "auditor",
|
||||||
|
"is_system_auditor_value": ["bamm-bamm"],
|
||||||
|
}
|
||||||
|
|
||||||
|
settings.SOCIAL_AUTH_SAML_ORGANIZATION_ATTR = {"saml_attr": "member-of", "remove": True, "saml_admin_attr": "admin-of", "remove_admins": False}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_tacacs_config(settings):
|
||||||
|
settings.TACACSPLUS_HOST = "tacacshost"
|
||||||
|
settings.TACACSPLUS_PORT = 49
|
||||||
|
settings.TACACSPLUS_SECRET = "secret"
|
||||||
|
settings.TACACSPLUS_SESSION_TIMEOUT = 10
|
||||||
|
settings.TACACSPLUS_AUTH_PROTOCOL = "pap"
|
||||||
|
settings.TACACSPLUS_REM_ADDR = True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def saml_config_user_flags_no_value(settings):
|
||||||
|
settings.SAML_SECURITY_CONFIG = {
|
||||||
|
"wantNameId": True,
|
||||||
|
"signMetadata": False,
|
||||||
|
"digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
|
||||||
|
"nameIdEncrypted": False,
|
||||||
|
"signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256",
|
||||||
|
"authnRequestsSigned": False,
|
||||||
|
"logoutRequestSigned": False,
|
||||||
|
"wantNameIdEncrypted": False,
|
||||||
|
"logoutResponseSigned": False,
|
||||||
|
"wantAssertionsSigned": True,
|
||||||
|
"requestedAuthnContext": False,
|
||||||
|
"wantAssertionsEncrypted": False,
|
||||||
|
}
|
||||||
|
settings.SOCIAL_AUTH_SAML_ENABLED_IDPS = {
|
||||||
|
"example": {
|
||||||
|
"attr_email": "email",
|
||||||
|
"attr_first_name": "first_name",
|
||||||
|
"attr_last_name": "last_name",
|
||||||
|
"attr_user_permanent_id": "username",
|
||||||
|
"attr_username": "username",
|
||||||
|
"entity_id": "https://www.example.com/realms/sample",
|
||||||
|
"url": "https://www.example.com/realms/sample/protocol/saml",
|
||||||
|
"x509cert": "A" * 64 + "B" * 64 + "C" * 23,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
settings.SOCIAL_AUTH_SAML_TEAM_ATTR = {
|
||||||
|
"remove": False,
|
||||||
|
"saml_attr": "group_name",
|
||||||
|
"team_org_map": [
|
||||||
|
{"team": "internal:unix:domain:admins", "team_alias": "Administrators", "organization": "Default"},
|
||||||
|
{"team": "East Coast", "organization": "North America"},
|
||||||
|
{"team": "developers", "organization": "North America"},
|
||||||
|
{"team": "developers", "organization": "South America"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
settings.SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR = {
|
||||||
|
"is_superuser_role": ["wilma"],
|
||||||
|
"is_superuser_attr": "friends",
|
||||||
|
}
|
||||||
104
awx/sso/tests/unit/test_google_oauth2_migrator.py
Normal file
104
awx/sso/tests/unit/test_google_oauth2_migrator.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_google_config(settings):
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = "test_key"
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = "test_secret"
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL = "https://tower.example.com/sso/complete/google-oauth2/"
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP = {"My Org": {"users": True}}
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP = {"My Team": {"organization": "My Org", "users": True}}
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = ["profile", "email"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config(test_google_config):
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = GoogleOAuth2Migrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
assert len(result) == 1
|
||||||
|
config = result[0]
|
||||||
|
assert config['category'] == 'Google OAuth2'
|
||||||
|
settings = config['settings']
|
||||||
|
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'] == 'test_key'
|
||||||
|
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'] == 'test_secret'
|
||||||
|
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL'] == "https://tower.example.com/sso/complete/google-oauth2/"
|
||||||
|
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE'] == ["profile", "email"]
|
||||||
|
# Assert that other settings are not present in the returned config
|
||||||
|
assert 'SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP' not in settings
|
||||||
|
assert 'SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP' not in settings
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_create_gateway_authenticator(mocker, test_google_config):
|
||||||
|
mocker.patch('django.conf.settings.LOGGING', {})
|
||||||
|
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = GoogleOAuth2Migrator(gateway_client, command_obj)
|
||||||
|
mock_submit = MagicMock(return_value=True)
|
||||||
|
obj.submit_authenticator = mock_submit
|
||||||
|
|
||||||
|
configs = obj.get_controller_config()
|
||||||
|
result = obj.create_gateway_authenticator(configs[0])
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
mock_submit.assert_called_once()
|
||||||
|
|
||||||
|
# Assert payload sent to gateway
|
||||||
|
payload = mock_submit.call_args[0][0]
|
||||||
|
assert payload['name'] == 'google'
|
||||||
|
assert payload['slug'] == 'aap-google-oauth2-google-oauth2'
|
||||||
|
assert payload['type'] == 'ansible_base.authentication.authenticator_plugins.google_oauth2'
|
||||||
|
assert payload['enabled'] is False
|
||||||
|
assert payload['create_objects'] is True
|
||||||
|
assert payload['remove_users'] is False
|
||||||
|
|
||||||
|
# Assert configuration details
|
||||||
|
configuration = payload['configuration']
|
||||||
|
assert configuration['KEY'] == 'test_key'
|
||||||
|
assert configuration['SECRET'] == 'test_secret'
|
||||||
|
assert configuration['CALLBACK_URL'] == 'https://tower.example.com/sso/complete/google-oauth2/'
|
||||||
|
assert configuration['SCOPE'] == ['profile', 'email']
|
||||||
|
|
||||||
|
# Assert mappers
|
||||||
|
assert len(payload['mappers']) == 2
|
||||||
|
assert payload['mappers'][0]['map_type'] == 'organization'
|
||||||
|
assert payload['mappers'][1]['map_type'] == 'team'
|
||||||
|
|
||||||
|
# Assert ignore_keys
|
||||||
|
ignore_keys = mock_submit.call_args[0][1]
|
||||||
|
assert ignore_keys == ["ACCESS_TOKEN_METHOD", "REVOKE_TOKEN_METHOD"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_create_gateway_authenticator_no_optional_values(mocker, settings):
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = "test_key"
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = "test_secret"
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP = {}
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP = {}
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = None
|
||||||
|
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL = None
|
||||||
|
|
||||||
|
mocker.patch('django.conf.settings.LOGGING', {})
|
||||||
|
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = GoogleOAuth2Migrator(gateway_client, command_obj)
|
||||||
|
mock_submit = MagicMock(return_value=True)
|
||||||
|
obj.submit_authenticator = mock_submit
|
||||||
|
|
||||||
|
configs = obj.get_controller_config()
|
||||||
|
obj.create_gateway_authenticator(configs[0])
|
||||||
|
|
||||||
|
payload = mock_submit.call_args[0][0]
|
||||||
|
assert 'CALLBACK_URL' not in payload['configuration']
|
||||||
|
assert 'SCOPE' not in payload['configuration']
|
||||||
|
|
||||||
|
ignore_keys = mock_submit.call_args[0][1]
|
||||||
|
assert 'CALLBACK_URL' in ignore_keys
|
||||||
|
assert 'SCOPE' in ignore_keys
|
||||||
17
awx/sso/tests/unit/test_radius_migrator.py
Normal file
17
awx/sso/tests/unit/test_radius_migrator.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
from awx.sso.utils.radius_migrator import RADIUSMigrator
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config(test_radius_config):
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = RADIUSMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
config = result[0]['settings']['configuration']
|
||||||
|
assert config['SERVER'] == '127.0.0.1'
|
||||||
|
assert config['PORT'] == 1812
|
||||||
|
assert config['SECRET'] == 'secret'
|
||||||
|
assert len(config) == 3
|
||||||
272
awx/sso/tests/unit/test_saml_migrator.py
Normal file
272
awx/sso/tests/unit/test_saml_migrator.py
Normal file
@ -0,0 +1,272 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
from awx.sso.utils.saml_migrator import SAMLMigrator
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config(basic_saml_config):
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = SAMLMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
lines = result[0]['settings']['configuration']['IDP_X509_CERT'].splitlines()
|
||||||
|
assert lines[0] == '-----BEGIN CERTIFICATE-----'
|
||||||
|
assert lines[1] == "A" * 64
|
||||||
|
assert lines[2] == "B" * 64
|
||||||
|
assert lines[3] == "C" * 23
|
||||||
|
assert lines[-1] == '-----END CERTIFICATE-----'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config_with_mapper(saml_config_user_flags_no_value):
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = SAMLMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
expected_maps = [
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'Default',
|
||||||
|
'team': 'Administrators',
|
||||||
|
'name': 'Team-Administrators-Default',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['internal:unix:domain:admins']}, 'join_condition': 'or'}},
|
||||||
|
'order': 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'North America',
|
||||||
|
'team': 'East Coast',
|
||||||
|
'name': 'Team-East Coast-North America',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['East Coast']}, 'join_condition': 'or'}},
|
||||||
|
'order': 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'North America',
|
||||||
|
'team': 'developers',
|
||||||
|
'name': 'Team-developers-North America',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
|
||||||
|
'order': 3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'South America',
|
||||||
|
'team': 'developers',
|
||||||
|
'name': 'Team-developers-South America',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
|
||||||
|
'order': 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'is_superuser',
|
||||||
|
'role': None,
|
||||||
|
'name': 'Role-is_superuser',
|
||||||
|
'organization': None,
|
||||||
|
'team': None,
|
||||||
|
'revoke': True,
|
||||||
|
'order': 5,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'Role': {'in': ['wilma']}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'is_superuser',
|
||||||
|
'role': None,
|
||||||
|
'name': 'Role-is_superuser-attr',
|
||||||
|
'organization': None,
|
||||||
|
'team': None,
|
||||||
|
'revoke': True,
|
||||||
|
'order': 6,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'friends': {}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
assert result[0]['team_mappers'] == expected_maps
|
||||||
|
extra_data = result[0]['settings']['configuration']['EXTRA_DATA']
|
||||||
|
assert ['Role', 'Role'] in extra_data
|
||||||
|
assert ['friends', 'friends'] in extra_data
|
||||||
|
assert ['group_name', 'group_name'] in extra_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config_with_roles(basic_saml_config):
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = SAMLMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
|
||||||
|
expected_maps = [
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'Default',
|
||||||
|
'team': 'Administrators',
|
||||||
|
'name': 'Team-Administrators-Default',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['internal:unix:domain:admins']}, 'join_condition': 'or'}},
|
||||||
|
'order': 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'North America',
|
||||||
|
'team': 'East Coast',
|
||||||
|
'name': 'Team-East Coast-North America',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['East Coast']}, 'join_condition': 'or'}},
|
||||||
|
'order': 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'North America',
|
||||||
|
'team': 'developers',
|
||||||
|
'name': 'Team-developers-North America',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
|
||||||
|
'order': 3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'team',
|
||||||
|
'role': 'Team Member',
|
||||||
|
'organization': 'South America',
|
||||||
|
'team': 'developers',
|
||||||
|
'name': 'Team-developers-South America',
|
||||||
|
'revoke': False,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
|
||||||
|
'order': 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'is_superuser',
|
||||||
|
'role': None,
|
||||||
|
'name': 'Role-is_superuser',
|
||||||
|
'organization': None,
|
||||||
|
'team': None,
|
||||||
|
'revoke': False,
|
||||||
|
'order': 5,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'Role': {'in': ['wilma']}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'role',
|
||||||
|
'role': 'Platform Auditor',
|
||||||
|
'name': 'Role-Platform Auditor',
|
||||||
|
'organization': None,
|
||||||
|
'team': None,
|
||||||
|
'revoke': True,
|
||||||
|
'order': 6,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'Role': {'in': ['fred']}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'is_superuser',
|
||||||
|
'role': None,
|
||||||
|
'name': 'Role-is_superuser-attr',
|
||||||
|
'organization': None,
|
||||||
|
'team': None,
|
||||||
|
'revoke': False,
|
||||||
|
'order': 7,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'friends': {'in': ['barney', 'fred']}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'role',
|
||||||
|
'role': 'Platform Auditor',
|
||||||
|
'name': 'Role-Platform Auditor-attr',
|
||||||
|
'organization': None,
|
||||||
|
'team': None,
|
||||||
|
'revoke': True,
|
||||||
|
'order': 8,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'auditor': {'in': ['bamm-bamm']}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'organization',
|
||||||
|
'role': 'Organization Member',
|
||||||
|
'name': 'Role-Organization Member-attr',
|
||||||
|
'organization': "{% for_attr_value('member-of') %}",
|
||||||
|
'team': None,
|
||||||
|
'revoke': True,
|
||||||
|
'order': 9,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'member-of': {}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'map_type': 'organization',
|
||||||
|
'role': 'Organization Admin',
|
||||||
|
'name': 'Role-Organization Admin-attr',
|
||||||
|
'organization': "{% for_attr_value('admin-of') %}",
|
||||||
|
'team': None,
|
||||||
|
'revoke': False,
|
||||||
|
'order': 10,
|
||||||
|
'authenticator': -1,
|
||||||
|
'triggers': {'attributes': {'admin-of': {}, 'join_condition': 'or'}},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
assert result[0]['team_mappers'] == expected_maps
|
||||||
|
extra_data = result[0]['settings']['configuration']['EXTRA_DATA']
|
||||||
|
extra_data_items = [
|
||||||
|
['member-of', 'member-of'],
|
||||||
|
['admin-of', 'admin-of'],
|
||||||
|
['Role', 'Role'],
|
||||||
|
['friends', 'friends'],
|
||||||
|
['group_name', 'group_name'],
|
||||||
|
]
|
||||||
|
for item in extra_data_items:
|
||||||
|
assert item in extra_data
|
||||||
|
assert extra_data.count(item) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config_enabled_false(basic_saml_config):
|
||||||
|
"""SAML controller export marks settings.enabled False by default."""
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = SAMLMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
assert isinstance(result, list) and len(result) >= 1
|
||||||
|
assert result[0]['settings']['enabled'] is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_create_gateway_authenticator_submits_disabled(basic_saml_config):
|
||||||
|
"""Submitted Gateway authenticator config must have enabled=False and correct ignore keys."""
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = SAMLMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
config = obj.get_controller_config()[0]
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
obj,
|
||||||
|
'submit_authenticator',
|
||||||
|
return_value={'success': True, 'action': 'created', 'error': None},
|
||||||
|
) as submit_mock:
|
||||||
|
obj.create_gateway_authenticator(config)
|
||||||
|
|
||||||
|
# Extract submitted args: gateway_config, ignore_keys, original_config
|
||||||
|
submitted_gateway_config = submit_mock.call_args[0][0]
|
||||||
|
ignore_keys = submit_mock.call_args[0][1]
|
||||||
|
|
||||||
|
assert submitted_gateway_config['enabled'] is False
|
||||||
|
assert 'CALLBACK_URL' in ignore_keys
|
||||||
|
assert 'SP_PRIVATE_KEY' in ignore_keys
|
||||||
384
awx/sso/tests/unit/test_settings_migrator.py
Normal file
384
awx/sso/tests/unit/test_settings_migrator.py
Normal file
@ -0,0 +1,384 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for SettingsMigrator class.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock, patch
|
||||||
|
from awx.sso.utils.settings_migrator import SettingsMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class TestSettingsMigrator:
|
||||||
|
"""Tests for SettingsMigrator class."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Set up test fixtures."""
|
||||||
|
self.gateway_client = Mock()
|
||||||
|
self.command = Mock()
|
||||||
|
self.migrator = SettingsMigrator(self.gateway_client, self.command)
|
||||||
|
|
||||||
|
def test_get_authenticator_type(self):
|
||||||
|
"""Test that get_authenticator_type returns 'Settings'."""
|
||||||
|
assert self.migrator.get_authenticator_type() == "Settings"
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"input_name,expected_output",
|
||||||
|
[
|
||||||
|
('CUSTOM_LOGIN_INFO', 'custom_login_info'),
|
||||||
|
('CUSTOM_LOGO', 'custom_logo'),
|
||||||
|
('UNKNOWN_SETTING', 'UNKNOWN_SETTING'),
|
||||||
|
('ANOTHER_UNKNOWN', 'ANOTHER_UNKNOWN'),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_convert_setting_name(self, input_name, expected_output):
|
||||||
|
"""Test setting name conversion."""
|
||||||
|
result = self.migrator._convert_setting_name(input_name)
|
||||||
|
assert result == expected_output
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"transformer_method,test_values",
|
||||||
|
[
|
||||||
|
('_transform_social_auth_username_is_full_email', [True, False]),
|
||||||
|
('_transform_allow_oauth2_for_external_users', [True, False]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_boolean_transformers(self, transformer_method, test_values):
|
||||||
|
"""Test that boolean transformers return values as-is."""
|
||||||
|
transformer = getattr(self.migrator, transformer_method)
|
||||||
|
for value in test_values:
|
||||||
|
assert transformer(value) is value
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"settings_values,expected_count",
|
||||||
|
[
|
||||||
|
# Test case: all settings are None
|
||||||
|
(
|
||||||
|
{
|
||||||
|
'SESSION_COOKIE_AGE': None,
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE': None,
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS': None,
|
||||||
|
'MANAGE_ORGANIZATION_AUTH': None,
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
# Test case: all settings are empty strings
|
||||||
|
(
|
||||||
|
{
|
||||||
|
'SESSION_COOKIE_AGE': "",
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': "",
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': "",
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE': "",
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS': "",
|
||||||
|
'MANAGE_ORGANIZATION_AUTH': "",
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
# Test case: only new settings have values
|
||||||
|
(
|
||||||
|
{
|
||||||
|
'SESSION_COOKIE_AGE': None,
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE': None,
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS': True,
|
||||||
|
'MANAGE_ORGANIZATION_AUTH': False,
|
||||||
|
},
|
||||||
|
2,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@patch('awx.sso.utils.settings_migrator.settings')
|
||||||
|
def test_get_controller_config_various_scenarios(self, mock_settings, settings_values, expected_count):
|
||||||
|
"""Test get_controller_config with various setting combinations."""
|
||||||
|
# Apply the settings values to the mock
|
||||||
|
for setting_name, setting_value in settings_values.items():
|
||||||
|
setattr(mock_settings, setting_name, setting_value)
|
||||||
|
|
||||||
|
result = self.migrator.get_controller_config()
|
||||||
|
assert len(result) == expected_count
|
||||||
|
|
||||||
|
# Verify structure if we have results
|
||||||
|
if result:
|
||||||
|
for config in result:
|
||||||
|
assert config['category'] == 'global-settings'
|
||||||
|
assert 'setting_name' in config
|
||||||
|
assert 'setting_value' in config
|
||||||
|
assert config['org_mappers'] == []
|
||||||
|
assert config['team_mappers'] == []
|
||||||
|
assert config['role_mappers'] == []
|
||||||
|
assert config['allow_mappers'] == []
|
||||||
|
|
||||||
|
@patch('awx.sso.utils.settings_migrator.settings')
|
||||||
|
def test_get_controller_config_with_all_settings(self, mock_settings):
|
||||||
|
"""Test get_controller_config with all settings configured."""
|
||||||
|
# Mock all settings with valid values
|
||||||
|
mock_settings.SESSION_COOKIE_AGE = 3600
|
||||||
|
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True
|
||||||
|
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = False
|
||||||
|
mock_settings.LOGIN_REDIRECT_OVERRIDE = "https://example.com/login"
|
||||||
|
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = True
|
||||||
|
mock_settings.MANAGE_ORGANIZATION_AUTH = False
|
||||||
|
|
||||||
|
# Mock the login redirect override to not be set by migrator
|
||||||
|
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False):
|
||||||
|
result = self.migrator.get_controller_config()
|
||||||
|
|
||||||
|
assert len(result) == 6
|
||||||
|
|
||||||
|
# Check that all expected settings are present
|
||||||
|
setting_names = [config['setting_name'] for config in result]
|
||||||
|
expected_settings = [
|
||||||
|
'SESSION_COOKIE_AGE',
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL',
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE',
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS',
|
||||||
|
'MANAGE_ORGANIZATION_AUTH',
|
||||||
|
]
|
||||||
|
|
||||||
|
for setting in expected_settings:
|
||||||
|
assert setting in setting_names
|
||||||
|
|
||||||
|
# Verify structure of returned configs
|
||||||
|
for config in result:
|
||||||
|
assert config['category'] == 'global-settings'
|
||||||
|
assert 'setting_name' in config
|
||||||
|
assert 'setting_value' in config
|
||||||
|
assert config['org_mappers'] == []
|
||||||
|
assert config['team_mappers'] == []
|
||||||
|
assert config['role_mappers'] == []
|
||||||
|
assert config['allow_mappers'] == []
|
||||||
|
|
||||||
|
@patch('awx.sso.utils.settings_migrator.settings')
|
||||||
|
def test_get_controller_config_with_new_settings_only(self, mock_settings):
|
||||||
|
"""Test get_controller_config with only the new settings configured."""
|
||||||
|
# Mock only the new settings
|
||||||
|
mock_settings.SESSION_COOKIE_AGE = None
|
||||||
|
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = None
|
||||||
|
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None
|
||||||
|
mock_settings.LOGIN_REDIRECT_OVERRIDE = None
|
||||||
|
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = True
|
||||||
|
mock_settings.MANAGE_ORGANIZATION_AUTH = False
|
||||||
|
|
||||||
|
result = self.migrator.get_controller_config()
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
# Check the new settings are present
|
||||||
|
setting_names = [config['setting_name'] for config in result]
|
||||||
|
assert 'ORG_ADMINS_CAN_SEE_ALL_USERS' in setting_names
|
||||||
|
assert 'MANAGE_ORGANIZATION_AUTH' in setting_names
|
||||||
|
|
||||||
|
# Verify the values
|
||||||
|
org_admins_config = next(c for c in result if c['setting_name'] == 'ORG_ADMINS_CAN_SEE_ALL_USERS')
|
||||||
|
assert org_admins_config['setting_value'] is True
|
||||||
|
|
||||||
|
manage_org_auth_config = next(c for c in result if c['setting_name'] == 'MANAGE_ORGANIZATION_AUTH')
|
||||||
|
assert manage_org_auth_config['setting_value'] is False
|
||||||
|
|
||||||
|
@patch('awx.sso.utils.settings_migrator.settings')
|
||||||
|
def test_get_controller_config_with_login_redirect_override_from_migrator(self, mock_settings):
|
||||||
|
"""Test get_controller_config when LOGIN_REDIRECT_OVERRIDE is set by migrator."""
|
||||||
|
# Mock settings
|
||||||
|
mock_settings.SESSION_COOKIE_AGE = None
|
||||||
|
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = None
|
||||||
|
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None
|
||||||
|
mock_settings.LOGIN_REDIRECT_OVERRIDE = "https://original.com/login"
|
||||||
|
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = None
|
||||||
|
mock_settings.MANAGE_ORGANIZATION_AUTH = None
|
||||||
|
|
||||||
|
# Mock the login redirect override to be set by migrator
|
||||||
|
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', True):
|
||||||
|
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_new_url', 'https://new.com/login'):
|
||||||
|
result = self.migrator.get_controller_config()
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]['setting_name'] == 'LOGIN_REDIRECT_OVERRIDE'
|
||||||
|
assert result[0]['setting_value'] == 'https://new.com/login' # Should use the migrator URL
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"config,current_value,expected_action,should_update",
|
||||||
|
[
|
||||||
|
# Test case: setting needs update
|
||||||
|
({'setting_name': 'ORG_ADMINS_CAN_SEE_ALL_USERS', 'setting_value': True}, False, 'updated', True),
|
||||||
|
# Test case: setting is unchanged
|
||||||
|
({'setting_name': 'MANAGE_ORGANIZATION_AUTH', 'setting_value': False}, False, 'skipped', False),
|
||||||
|
# Test case: another setting needs update
|
||||||
|
({'setting_name': 'SESSION_COOKIE_AGE', 'setting_value': 7200}, 3600, 'updated', True),
|
||||||
|
# Test case: another setting is unchanged
|
||||||
|
({'setting_name': 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', 'setting_value': True}, True, 'skipped', False),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_create_gateway_authenticator_success_scenarios(self, config, current_value, expected_action, should_update):
|
||||||
|
"""Test create_gateway_authenticator success scenarios."""
|
||||||
|
# Mock gateway client methods
|
||||||
|
self.gateway_client.get_gateway_setting.return_value = current_value
|
||||||
|
self.gateway_client.update_gateway_setting.return_value = None
|
||||||
|
|
||||||
|
result = self.migrator.create_gateway_authenticator(config)
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
assert result['action'] == expected_action
|
||||||
|
assert result['error'] is None
|
||||||
|
|
||||||
|
# Verify gateway client calls
|
||||||
|
expected_setting_name = config['setting_name']
|
||||||
|
self.gateway_client.get_gateway_setting.assert_called_once_with(expected_setting_name)
|
||||||
|
|
||||||
|
if should_update:
|
||||||
|
self.gateway_client.update_gateway_setting.assert_called_once_with(expected_setting_name, config['setting_value'])
|
||||||
|
else:
|
||||||
|
self.gateway_client.update_gateway_setting.assert_not_called()
|
||||||
|
|
||||||
|
# Reset mocks for next iteration
|
||||||
|
self.gateway_client.reset_mock()
|
||||||
|
|
||||||
|
def test_create_gateway_authenticator_with_setting_name_conversion(self):
|
||||||
|
"""Test create_gateway_authenticator with setting name that needs conversion."""
|
||||||
|
config = {'setting_name': 'CUSTOM_LOGIN_INFO', 'setting_value': 'Some custom info'}
|
||||||
|
|
||||||
|
# Mock gateway client methods
|
||||||
|
self.gateway_client.get_gateway_setting.return_value = 'Old info' # Different value
|
||||||
|
self.gateway_client.update_gateway_setting.return_value = None
|
||||||
|
|
||||||
|
result = self.migrator.create_gateway_authenticator(config)
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
assert result['action'] == 'updated'
|
||||||
|
|
||||||
|
# Verify gateway client was called with converted name
|
||||||
|
self.gateway_client.get_gateway_setting.assert_called_once_with('custom_login_info')
|
||||||
|
self.gateway_client.update_gateway_setting.assert_called_once_with('custom_login_info', 'Some custom info')
|
||||||
|
|
||||||
|
def test_create_gateway_authenticator_failure(self):
|
||||||
|
"""Test create_gateway_authenticator when gateway update fails."""
|
||||||
|
config = {'setting_name': 'SESSION_COOKIE_AGE', 'setting_value': 7200}
|
||||||
|
|
||||||
|
# Mock gateway client to raise exception
|
||||||
|
self.gateway_client.get_gateway_setting.return_value = 3600
|
||||||
|
self.gateway_client.update_gateway_setting.side_effect = Exception("Gateway error")
|
||||||
|
|
||||||
|
result = self.migrator.create_gateway_authenticator(config)
|
||||||
|
|
||||||
|
assert result['success'] is False
|
||||||
|
assert result['action'] == 'failed'
|
||||||
|
assert result['error'] == 'Gateway error'
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"scenario,settings_config,gateway_responses,update_side_effects,expected_counts",
|
||||||
|
[
|
||||||
|
# Scenario 1: No settings configured
|
||||||
|
(
|
||||||
|
"no_settings",
|
||||||
|
{
|
||||||
|
'SESSION_COOKIE_AGE': None,
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE': None,
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS': None,
|
||||||
|
'MANAGE_ORGANIZATION_AUTH': None,
|
||||||
|
},
|
||||||
|
[], # No gateway calls expected
|
||||||
|
[], # No update calls expected
|
||||||
|
{'settings_created': 0, 'settings_updated': 0, 'settings_unchanged': 0, 'settings_failed': 0},
|
||||||
|
),
|
||||||
|
# Scenario 2: All updates successful
|
||||||
|
(
|
||||||
|
"successful_updates",
|
||||||
|
{
|
||||||
|
'SESSION_COOKIE_AGE': None,
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE': None,
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS': True,
|
||||||
|
'MANAGE_ORGANIZATION_AUTH': False,
|
||||||
|
},
|
||||||
|
[False, True], # Different values to trigger updates
|
||||||
|
[None, None], # Successful updates
|
||||||
|
{'settings_created': 0, 'settings_updated': 2, 'settings_unchanged': 0, 'settings_failed': 0},
|
||||||
|
),
|
||||||
|
# Scenario 3: One unchanged, one updated
|
||||||
|
(
|
||||||
|
"mixed_results",
|
||||||
|
{
|
||||||
|
'SESSION_COOKIE_AGE': None,
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE': None,
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS': True,
|
||||||
|
'MANAGE_ORGANIZATION_AUTH': False,
|
||||||
|
},
|
||||||
|
[True, True], # Gateway returns: ORG_ADMINS_CAN_SEE_ALL_USERS=True (unchanged), MANAGE_ORGANIZATION_AUTH=True (needs update)
|
||||||
|
[ValueError("Update failed")], # Only one update call (for MANAGE_ORGANIZATION_AUTH), and it fails
|
||||||
|
{'settings_created': 0, 'settings_updated': 0, 'settings_unchanged': 1, 'settings_failed': 1},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@patch('awx.sso.utils.settings_migrator.settings')
|
||||||
|
def test_migrate_scenarios(self, mock_settings, scenario, settings_config, gateway_responses, update_side_effects, expected_counts):
|
||||||
|
"""Test migrate method with various scenarios."""
|
||||||
|
# Apply settings configuration
|
||||||
|
for setting_name, setting_value in settings_config.items():
|
||||||
|
setattr(mock_settings, setting_name, setting_value)
|
||||||
|
|
||||||
|
# Mock gateway client responses
|
||||||
|
if gateway_responses:
|
||||||
|
self.gateway_client.get_gateway_setting.side_effect = gateway_responses
|
||||||
|
if update_side_effects:
|
||||||
|
self.gateway_client.update_gateway_setting.side_effect = update_side_effects
|
||||||
|
|
||||||
|
# Mock the login redirect override to not be set by migrator for these tests
|
||||||
|
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False):
|
||||||
|
result = self.migrator.migrate()
|
||||||
|
|
||||||
|
# Verify expected counts
|
||||||
|
for key, expected_value in expected_counts.items():
|
||||||
|
assert result[key] == expected_value, f"Scenario {scenario}: Expected {key}={expected_value}, got {result[key]}"
|
||||||
|
|
||||||
|
# All authenticator/mapper counts should be 0 since settings don't have them
|
||||||
|
authenticator_mapper_keys = ['created', 'updated', 'unchanged', 'failed', 'mappers_created', 'mappers_updated', 'mappers_failed']
|
||||||
|
for key in authenticator_mapper_keys:
|
||||||
|
assert result[key] == 0, f"Scenario {scenario}: Expected {key}=0, got {result[key]}"
|
||||||
|
|
||||||
|
def test_setting_transformers_defined(self):
|
||||||
|
"""Test that setting transformers are properly defined."""
|
||||||
|
expected_transformers = {'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS'}
|
||||||
|
|
||||||
|
actual_transformers = set(self.migrator.setting_transformers.keys())
|
||||||
|
assert actual_transformers == expected_transformers
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"transformer_return_value,expected_result_count",
|
||||||
|
[
|
||||||
|
(None, 0), # Transformer returns None - should be excluded
|
||||||
|
("", 0), # Transformer returns empty string - should be excluded
|
||||||
|
(True, 1), # Transformer returns valid value - should be included
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@patch('awx.sso.utils.settings_migrator.settings')
|
||||||
|
def test_get_controller_config_transformer_edge_cases(self, mock_settings, transformer_return_value, expected_result_count):
|
||||||
|
"""Test get_controller_config when transformer returns various edge case values."""
|
||||||
|
# Mock settings - only one setting with a value that has a transformer
|
||||||
|
mock_settings.SESSION_COOKIE_AGE = None
|
||||||
|
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True
|
||||||
|
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None
|
||||||
|
mock_settings.LOGIN_REDIRECT_OVERRIDE = None
|
||||||
|
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = None
|
||||||
|
mock_settings.MANAGE_ORGANIZATION_AUTH = None
|
||||||
|
|
||||||
|
# Mock transformer to return the specified value
|
||||||
|
# We need to patch the transformer in the dictionary, not just the method
|
||||||
|
original_transformer = self.migrator.setting_transformers.get('SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL')
|
||||||
|
self.migrator.setting_transformers['SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL'] = lambda x: transformer_return_value
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Mock the login redirect override to not be set by migrator
|
||||||
|
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False):
|
||||||
|
result = self.migrator.get_controller_config()
|
||||||
|
finally:
|
||||||
|
# Restore the original transformer
|
||||||
|
if original_transformer:
|
||||||
|
self.migrator.setting_transformers['SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL'] = original_transformer
|
||||||
|
|
||||||
|
assert len(result) == expected_result_count
|
||||||
37
awx/sso/tests/unit/test_tacacs_migrator.py
Normal file
37
awx/sso/tests/unit/test_tacacs_migrator.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
from awx.sso.utils.tacacs_migrator import TACACSMigrator
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config(test_tacacs_config):
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = TACACSMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
assert len(result) == 1
|
||||||
|
config = result[0]
|
||||||
|
assert config['category'] == 'TACACSPLUS'
|
||||||
|
settings_data = config['settings']
|
||||||
|
assert settings_data['name'] == 'default'
|
||||||
|
assert settings_data['type'] == 'ansible_base.authentication.authenticator_plugins.tacacs'
|
||||||
|
|
||||||
|
configuration = settings_data['configuration']
|
||||||
|
assert configuration['HOST'] == 'tacacshost'
|
||||||
|
assert configuration['PORT'] == 49
|
||||||
|
assert configuration['SECRET'] == 'secret'
|
||||||
|
assert configuration['SESSION_TIMEOUT'] == 10
|
||||||
|
assert configuration['AUTH_PROTOCOL'] == 'pap'
|
||||||
|
assert configuration['REM_ADDR'] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_get_controller_config_no_host(settings):
|
||||||
|
settings.TACACSPLUS_HOST = ""
|
||||||
|
gateway_client = MagicMock()
|
||||||
|
command_obj = MagicMock()
|
||||||
|
obj = TACACSMigrator(gateway_client, command_obj)
|
||||||
|
|
||||||
|
result = obj.get_controller_config()
|
||||||
|
assert len(result) == 0
|
||||||
17
awx/sso/utils/__init__.py
Normal file
17
awx/sso/utils/__init__.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from awx.sso.utils.azure_ad_migrator import AzureADMigrator
|
||||||
|
from awx.sso.utils.github_migrator import GitHubMigrator
|
||||||
|
from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator
|
||||||
|
from awx.sso.utils.ldap_migrator import LDAPMigrator
|
||||||
|
from awx.sso.utils.oidc_migrator import OIDCMigrator
|
||||||
|
from awx.sso.utils.radius_migrator import RADIUSMigrator
|
||||||
|
from awx.sso.utils.saml_migrator import SAMLMigrator
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'AzureADMigrator',
|
||||||
|
'GitHubMigrator',
|
||||||
|
'GoogleOAuth2Migrator',
|
||||||
|
'LDAPMigrator',
|
||||||
|
'OIDCMigrator',
|
||||||
|
'RADIUSMigrator',
|
||||||
|
'SAMLMigrator',
|
||||||
|
]
|
||||||
97
awx/sso/utils/azure_ad_migrator.py
Normal file
97
awx/sso/utils/azure_ad_migrator.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
"""
|
||||||
|
Azure AD authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of Azure AD authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class AzureADMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of Azure AD authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "Azure AD"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export Azure AD authenticators. An Azure AD authenticator is only exported if
|
||||||
|
KEY and SECRET are configured.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured Azure AD authentication providers with their settings
|
||||||
|
"""
|
||||||
|
key_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', None)
|
||||||
|
secret_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET', None)
|
||||||
|
|
||||||
|
# Skip this category if OIDC Key and/or Secret are not configured
|
||||||
|
if not key_value or not secret_value:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# If we have both key and secret, collect all settings
|
||||||
|
org_map_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP', None)
|
||||||
|
team_map_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_TEAM_MAP', None)
|
||||||
|
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
|
||||||
|
|
||||||
|
# Convert GitHub org and team mappings from AWX to the Gateway format
|
||||||
|
# Start with order 1 and maintain sequence across both org and team mappers
|
||||||
|
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1)
|
||||||
|
team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order)
|
||||||
|
|
||||||
|
category = 'AzureAD'
|
||||||
|
|
||||||
|
# Generate authenticator name and slug
|
||||||
|
authenticator_name = "Azure AD"
|
||||||
|
authenticator_slug = self._generate_authenticator_slug("azure_ad", category)
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
'category': category,
|
||||||
|
'settings': {
|
||||||
|
"name": authenticator_name,
|
||||||
|
"slug": authenticator_slug,
|
||||||
|
"type": "ansible_base.authentication.authenticator_plugins.azuread",
|
||||||
|
"enabled": False,
|
||||||
|
"create_objects": True,
|
||||||
|
"remove_users": False,
|
||||||
|
"configuration": {
|
||||||
|
"KEY": key_value,
|
||||||
|
"SECRET": secret_value,
|
||||||
|
"GROUPS_CLAIM": "groups",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'org_mappers': org_mappers,
|
||||||
|
'team_mappers': team_mappers,
|
||||||
|
'login_redirect_override': login_redirect_override,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create an Azure AD authenticator in Gateway."""
|
||||||
|
|
||||||
|
category = config["category"]
|
||||||
|
gateway_config = config["settings"]
|
||||||
|
|
||||||
|
self._write_output(f"\n--- Processing {category} authenticator ---")
|
||||||
|
self._write_output(f"Name: {gateway_config['name']}")
|
||||||
|
self._write_output(f"Slug: {gateway_config['slug']}")
|
||||||
|
self._write_output(f"Type: {gateway_config['type']}")
|
||||||
|
|
||||||
|
# CALLBACK_URL - automatically created by Gateway
|
||||||
|
# GROUPS_CLAIM - Not an AWX feature
|
||||||
|
# ADDITIONAL_UNVERIFIED_ARGS - Not an AWX feature
|
||||||
|
ignore_keys = ["CALLBACK_URL", "GROUPS_CLAIM"]
|
||||||
|
|
||||||
|
# Submit the authenticator (create or update as needed)
|
||||||
|
result = self.submit_authenticator(gateway_config, ignore_keys, config)
|
||||||
|
|
||||||
|
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
|
||||||
|
valid_login_urls = ['/sso/login/azuread-oauth2']
|
||||||
|
self.handle_login_override(config, valid_login_urls)
|
||||||
|
|
||||||
|
return result
|
||||||
679
awx/sso/utils/base_migrator.py
Normal file
679
awx/sso/utils/base_migrator.py
Normal file
@ -0,0 +1,679 @@
|
|||||||
|
"""
|
||||||
|
Base authenticator migrator class.
|
||||||
|
|
||||||
|
This module defines the contract that all specific authenticator migrators must follow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from urllib.parse import urlparse, parse_qs, urlencode
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.main.utils.gateway_client import GatewayAPIError
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAuthenticatorMigrator:
|
||||||
|
"""
|
||||||
|
Base class for all authenticator migrators.
|
||||||
|
Defines the contract that all specific authenticator migrators must follow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
KEYS_TO_PRESERVE = ['idp']
|
||||||
|
# Class-level flag to track if LOGIN_REDIRECT_OVERRIDE was set by any migrator
|
||||||
|
login_redirect_override_set_by_migrator = False
|
||||||
|
# Class-level variable to store the new LOGIN_REDIRECT_OVERRIDE URL computed by migrators
|
||||||
|
login_redirect_override_new_url = None
|
||||||
|
|
||||||
|
def __init__(self, gateway_client=None, command=None, force=False):
|
||||||
|
"""
|
||||||
|
Initialize the authenticator migrator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gateway_client: GatewayClient instance for API calls
|
||||||
|
command: Optional Django management command instance (for styled output)
|
||||||
|
force: If True, force migration even if configurations already exist
|
||||||
|
"""
|
||||||
|
self.gateway_client = gateway_client
|
||||||
|
self.command = command
|
||||||
|
self.force = force
|
||||||
|
self.encrypted_fields = [
|
||||||
|
# LDAP Fields
|
||||||
|
'BIND_PASSWORD',
|
||||||
|
# The following authenticators all use the same key to store encrypted information:
|
||||||
|
# Generic OIDC
|
||||||
|
# RADIUS
|
||||||
|
# TACACS+
|
||||||
|
# GitHub OAuth2
|
||||||
|
# Azure AD OAuth2
|
||||||
|
# Google OAuth2
|
||||||
|
'SECRET',
|
||||||
|
# SAML Fields
|
||||||
|
'SP_PRIVATE_KEY',
|
||||||
|
]
|
||||||
|
|
||||||
|
def migrate(self):
|
||||||
|
"""
|
||||||
|
Main entry point - orchestrates the migration process.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary of migration results
|
||||||
|
"""
|
||||||
|
# Get configuration from AWX/Controller
|
||||||
|
configs = self.get_controller_config()
|
||||||
|
|
||||||
|
if not configs:
|
||||||
|
self._write_output(f'No {self.get_authenticator_type()} authenticators found to migrate.', 'warning')
|
||||||
|
return {'created': 0, 'updated': 0, 'unchanged': 0, 'failed': 0, 'mappers_created': 0, 'mappers_updated': 0, 'mappers_failed': 0}
|
||||||
|
|
||||||
|
self._write_output(f'Found {len(configs)} {self.get_authenticator_type()} authentication configuration(s).', 'success')
|
||||||
|
|
||||||
|
# Process each authenticator configuration
|
||||||
|
created_authenticators = []
|
||||||
|
updated_authenticators = []
|
||||||
|
unchanged_authenticators = []
|
||||||
|
failed_authenticators = []
|
||||||
|
|
||||||
|
for config in configs:
|
||||||
|
result = self.create_gateway_authenticator(config)
|
||||||
|
if result['success']:
|
||||||
|
if result['action'] == 'created':
|
||||||
|
created_authenticators.append(config)
|
||||||
|
elif result['action'] == 'updated':
|
||||||
|
updated_authenticators.append(config)
|
||||||
|
elif result['action'] == 'skipped':
|
||||||
|
unchanged_authenticators.append(config)
|
||||||
|
else:
|
||||||
|
failed_authenticators.append(config)
|
||||||
|
|
||||||
|
# Process mappers for successfully created/updated/unchanged authenticators
|
||||||
|
mappers_created = 0
|
||||||
|
mappers_updated = 0
|
||||||
|
mappers_failed = 0
|
||||||
|
successful_authenticators = created_authenticators + updated_authenticators + unchanged_authenticators
|
||||||
|
if successful_authenticators:
|
||||||
|
self._write_output('\n=== Processing Authenticator Mappers ===', 'success')
|
||||||
|
for config in successful_authenticators:
|
||||||
|
mapper_result = self._process_gateway_mappers(config)
|
||||||
|
mappers_created += mapper_result['created']
|
||||||
|
mappers_updated += mapper_result['updated']
|
||||||
|
mappers_failed += mapper_result['failed']
|
||||||
|
|
||||||
|
# Authenticators don't have settings, so settings counts are always 0
|
||||||
|
return {
|
||||||
|
'created': len(created_authenticators),
|
||||||
|
'updated': len(updated_authenticators),
|
||||||
|
'unchanged': len(unchanged_authenticators),
|
||||||
|
'failed': len(failed_authenticators),
|
||||||
|
'mappers_created': mappers_created,
|
||||||
|
'mappers_updated': mappers_updated,
|
||||||
|
'mappers_failed': mappers_failed,
|
||||||
|
'settings_created': 0,
|
||||||
|
'settings_updated': 0,
|
||||||
|
'settings_unchanged': 0,
|
||||||
|
'settings_failed': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Gather configuration from AWX/Controller.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configuration dictionaries
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("Subclasses must implement get_controller_config()")
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""
|
||||||
|
Create authenticator in Gateway.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Configuration dictionary from get_controller_config()
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if authenticator was created successfully, False otherwise
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("Subclasses must implement create_gateway_authenticator()")
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""
|
||||||
|
Get the human-readable authenticator type name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Authenticator type name for logging
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("Subclasses must implement get_authenticator_type()")
|
||||||
|
|
||||||
|
def _generate_authenticator_slug(self, auth_type, category):
|
||||||
|
"""Generate a deterministic slug for an authenticator."""
|
||||||
|
return f"aap-{auth_type}-{category}".lower()
|
||||||
|
|
||||||
|
def submit_authenticator(self, gateway_config, ignore_keys=[], config={}):
|
||||||
|
"""
|
||||||
|
Submit an authenticator to Gateway - either create new or update existing.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gateway_config: Complete Gateway authenticator configuration
|
||||||
|
ignore_keys: List of configuration keys to ignore during comparison
|
||||||
|
config: Optional AWX config dict to store result data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Result with 'success' (bool), 'action' ('created', 'updated', 'skipped'), 'error' (str or None)
|
||||||
|
"""
|
||||||
|
authenticator_slug = gateway_config.get('slug')
|
||||||
|
if not authenticator_slug:
|
||||||
|
self._write_output('Gateway config missing slug, cannot submit authenticator', 'error')
|
||||||
|
return {'success': False, 'action': None, 'error': 'Missing slug'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if authenticator already exists by slug
|
||||||
|
existing_authenticator = self.gateway_client.get_authenticator_by_slug(authenticator_slug)
|
||||||
|
|
||||||
|
if existing_authenticator:
|
||||||
|
# Authenticator exists, check if configuration matches
|
||||||
|
authenticator_id = existing_authenticator.get('id')
|
||||||
|
|
||||||
|
configs_match, differences = self._authenticator_configs_match(existing_authenticator, gateway_config, ignore_keys)
|
||||||
|
|
||||||
|
if configs_match:
|
||||||
|
self._write_output(f'⚠ Authenticator already exists with matching configuration (ID: {authenticator_id})', 'warning')
|
||||||
|
# Store the existing result for mapper creation
|
||||||
|
config['gateway_authenticator_id'] = authenticator_id
|
||||||
|
config['gateway_authenticator'] = existing_authenticator
|
||||||
|
return {'success': True, 'action': 'skipped', 'error': None}
|
||||||
|
else:
|
||||||
|
self._write_output(f'⚠ Authenticator exists but configuration differs (ID: {authenticator_id})', 'warning')
|
||||||
|
self._write_output(' Configuration comparison:')
|
||||||
|
|
||||||
|
# Log differences between the existing and the new configuration in case of an update
|
||||||
|
for difference in differences:
|
||||||
|
self._write_output(f' {difference}')
|
||||||
|
|
||||||
|
# Update the existing authenticator
|
||||||
|
self._write_output(' Updating authenticator with new configuration...')
|
||||||
|
try:
|
||||||
|
# Don't include the slug in the update since it shouldn't change
|
||||||
|
update_config = gateway_config.copy()
|
||||||
|
if 'slug' in update_config:
|
||||||
|
del update_config['slug']
|
||||||
|
|
||||||
|
result = self.gateway_client.update_authenticator(authenticator_id, update_config)
|
||||||
|
self._write_output(f'✓ Successfully updated authenticator with ID: {authenticator_id}', 'success')
|
||||||
|
|
||||||
|
# Store the updated result for mapper creation
|
||||||
|
config['gateway_authenticator_id'] = authenticator_id
|
||||||
|
config['gateway_authenticator'] = result
|
||||||
|
return {'success': True, 'action': 'updated', 'error': None}
|
||||||
|
except GatewayAPIError as e:
|
||||||
|
self._write_output(f'✗ Failed to update authenticator: {e.message}', 'error')
|
||||||
|
if e.response_data:
|
||||||
|
self._write_output(f' Details: {e.response_data}', 'error')
|
||||||
|
return {'success': False, 'action': 'update_failed', 'error': e.message}
|
||||||
|
else:
|
||||||
|
# Authenticator doesn't exist, create it
|
||||||
|
self._write_output('Creating new authenticator...')
|
||||||
|
|
||||||
|
# Create the authenticator
|
||||||
|
result = self.gateway_client.create_authenticator(gateway_config)
|
||||||
|
|
||||||
|
self._write_output(f'✓ Successfully created authenticator with ID: {result.get("id")}', 'success')
|
||||||
|
|
||||||
|
# Store the result for potential mapper creation later
|
||||||
|
config['gateway_authenticator_id'] = result.get('id')
|
||||||
|
config['gateway_authenticator'] = result
|
||||||
|
return {'success': True, 'action': 'created', 'error': None}
|
||||||
|
|
||||||
|
except GatewayAPIError as e:
|
||||||
|
self._write_output(f'✗ Failed to submit authenticator: {e.message}', 'error')
|
||||||
|
if e.response_data:
|
||||||
|
self._write_output(f' Details: {e.response_data}', 'error')
|
||||||
|
return {'success': False, 'action': 'failed', 'error': e.message}
|
||||||
|
except Exception as e:
|
||||||
|
self._write_output(f'✗ Unexpected error submitting authenticator: {str(e)}', 'error')
|
||||||
|
return {'success': False, 'action': 'failed', 'error': str(e)}
|
||||||
|
|
||||||
|
def _authenticator_configs_match(self, existing_auth, new_config, ignore_keys=[]):
|
||||||
|
"""
|
||||||
|
Compare existing authenticator configuration with new configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
existing_auth: Existing authenticator data from Gateway
|
||||||
|
new_config: New authenticator configuration to be created
|
||||||
|
ignore_keys: List of configuration keys to ignore during comparison
|
||||||
|
(e.g., ['CALLBACK_URL'] for auto-generated fields)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if configurations match, False otherwise
|
||||||
|
"""
|
||||||
|
# Add encrypted fields to ignore_keys if force flag is not set
|
||||||
|
# This prevents secrets from being updated unless explicitly forced
|
||||||
|
effective_ignore_keys = ignore_keys.copy()
|
||||||
|
if not self.force:
|
||||||
|
effective_ignore_keys.extend(self.encrypted_fields)
|
||||||
|
|
||||||
|
# Keep track of the differences between the existing and the new configuration
|
||||||
|
# Logging them makes debugging much easier
|
||||||
|
differences = []
|
||||||
|
|
||||||
|
if existing_auth.get('name') != new_config.get('name'):
|
||||||
|
differences.append(f' name: existing="{existing_auth.get("name")}" vs new="{new_config.get("name")}"')
|
||||||
|
elif existing_auth.get('type') != new_config.get('type'):
|
||||||
|
differences.append(f' type: existing="{existing_auth.get("type")}" vs new="{new_config.get("type")}"')
|
||||||
|
elif existing_auth.get('enabled') != new_config.get('enabled'):
|
||||||
|
differences.append(f' enabled: existing="{existing_auth.get("enabled")}" vs new="{new_config.get("enabled")}"')
|
||||||
|
elif existing_auth.get('create_objects') != new_config.get('create_objects'):
|
||||||
|
differences.append(f' create_objects: existing="{existing_auth.get("create_objects")}" vs new="{new_config.get("create_objects")}"')
|
||||||
|
elif existing_auth.get('remove_users') != new_config.get('remove_users'):
|
||||||
|
differences.append(f' create_objects: existing="{existing_auth.get("remove_users")}" vs new="{new_config.get("remove_users")}"')
|
||||||
|
|
||||||
|
# Compare configuration section
|
||||||
|
existing_config = existing_auth.get('configuration', {})
|
||||||
|
new_config_section = new_config.get('configuration', {})
|
||||||
|
|
||||||
|
# Helper function to check if a key should be ignored
|
||||||
|
def should_ignore_key(config_key):
|
||||||
|
return config_key in effective_ignore_keys
|
||||||
|
|
||||||
|
# Check if all keys in new config exist in existing config with same values
|
||||||
|
for key, value in new_config_section.items():
|
||||||
|
if should_ignore_key(key):
|
||||||
|
continue
|
||||||
|
if key not in existing_config:
|
||||||
|
differences.append(f' {key}: existing=<missing> vs new="{value}"')
|
||||||
|
elif existing_config[key] != value:
|
||||||
|
differences.append(f' {key}: existing="{existing_config.get(key)}" vs new="{value}"')
|
||||||
|
|
||||||
|
# Check if existing config has extra keys that new config doesn't have
|
||||||
|
# (this might indicate configuration drift), but ignore keys in ignore_keys
|
||||||
|
for key in existing_config:
|
||||||
|
if should_ignore_key(key):
|
||||||
|
continue
|
||||||
|
if key not in new_config_section:
|
||||||
|
differences.append(f' {key}: existing="{existing_config.get(key)}" vs new=<missing>')
|
||||||
|
|
||||||
|
return len(differences) == 0, differences
|
||||||
|
|
||||||
|
def _compare_mapper_lists(self, existing_mappers, new_mappers, ignore_keys=None):
|
||||||
|
"""
|
||||||
|
Compare existing and new mapper lists to determine which need updates vs creation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
existing_mappers: List of existing mapper configurations from Gateway
|
||||||
|
new_mappers: List of new mapper configurations to be created/updated
|
||||||
|
ignore_keys: List of keys to ignore during comparison (e.g., auto-generated fields)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (mappers_to_update, mappers_to_create)
|
||||||
|
mappers_to_update: List of tuples (existing_mapper, new_mapper) for updates
|
||||||
|
mappers_to_create: List of new_mapper configs that don't match any existing
|
||||||
|
"""
|
||||||
|
if ignore_keys is None:
|
||||||
|
ignore_keys = []
|
||||||
|
|
||||||
|
mappers_to_update = []
|
||||||
|
mappers_to_create = []
|
||||||
|
|
||||||
|
for new_mapper in new_mappers:
|
||||||
|
matched_existing = None
|
||||||
|
|
||||||
|
# Try to find a matching existing mapper
|
||||||
|
for existing_mapper in existing_mappers:
|
||||||
|
if self._mappers_match_structurally(existing_mapper, new_mapper):
|
||||||
|
matched_existing = existing_mapper
|
||||||
|
break
|
||||||
|
|
||||||
|
if matched_existing:
|
||||||
|
# Check if the configuration actually differs (ignoring auto-generated fields)
|
||||||
|
if not self._mapper_configs_match(matched_existing, new_mapper, ignore_keys):
|
||||||
|
mappers_to_update.append((matched_existing, new_mapper))
|
||||||
|
# If configs match exactly, no action needed (mapper is up to date)
|
||||||
|
else:
|
||||||
|
# No matching existing mapper found, needs to be created
|
||||||
|
mappers_to_create.append(new_mapper)
|
||||||
|
|
||||||
|
return mappers_to_update, mappers_to_create
|
||||||
|
|
||||||
|
def _mappers_match_structurally(self, existing_mapper, new_mapper):
|
||||||
|
"""
|
||||||
|
Check if two mappers match structurally (same organization, team, map_type, role).
|
||||||
|
This identifies if they represent the same logical mapping.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
existing_mapper: Existing mapper configuration from Gateway
|
||||||
|
new_mapper: New mapper configuration
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if mappers represent the same logical mapping
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Compare key structural fields that identify the same logical mapper
|
||||||
|
structural_fields = ['name']
|
||||||
|
|
||||||
|
for field in structural_fields:
|
||||||
|
if existing_mapper.get(field) != new_mapper.get(field):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _mapper_configs_match(self, existing_mapper, new_mapper, ignore_keys=None):
|
||||||
|
"""
|
||||||
|
Compare mapper configurations to check if they are identical.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
existing_mapper: Existing mapper configuration from Gateway
|
||||||
|
new_mapper: New mapper configuration
|
||||||
|
ignore_keys: List of keys to ignore during comparison
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if configurations match, False otherwise
|
||||||
|
"""
|
||||||
|
if ignore_keys is None:
|
||||||
|
ignore_keys = []
|
||||||
|
|
||||||
|
# Helper function to check if a key should be ignored
|
||||||
|
def should_ignore_key(config_key):
|
||||||
|
return config_key in ignore_keys
|
||||||
|
|
||||||
|
# Compare all mapper fields except ignored ones
|
||||||
|
all_keys = set(existing_mapper.keys()) | set(new_mapper.keys())
|
||||||
|
|
||||||
|
for key in all_keys:
|
||||||
|
if should_ignore_key(key):
|
||||||
|
continue
|
||||||
|
|
||||||
|
existing_value = existing_mapper.get(key)
|
||||||
|
new_value = new_mapper.get(key)
|
||||||
|
|
||||||
|
if existing_value != new_value:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _process_gateway_mappers(self, config):
|
||||||
|
"""Process authenticator mappers in Gateway from AWX config - create or update as needed."""
|
||||||
|
authenticator_id = config.get('gateway_authenticator_id')
|
||||||
|
if not authenticator_id:
|
||||||
|
self._write_output(f'No authenticator ID found for {config["category"]}, skipping mappers', 'error')
|
||||||
|
return {'created': 0, 'updated': 0, 'failed': 0}
|
||||||
|
|
||||||
|
category = config['category']
|
||||||
|
org_mappers = config.get('org_mappers', [])
|
||||||
|
team_mappers = config.get('team_mappers', [])
|
||||||
|
role_mappers = config.get('role_mappers', [])
|
||||||
|
allow_mappers = config.get('allow_mappers', [])
|
||||||
|
all_new_mappers = org_mappers + team_mappers + role_mappers + allow_mappers
|
||||||
|
|
||||||
|
if len(all_new_mappers) == 0:
|
||||||
|
self._write_output(f'No mappers to process for {category} authenticator')
|
||||||
|
return {'created': 0, 'updated': 0, 'failed': 0}
|
||||||
|
|
||||||
|
self._write_output(f'\n--- Processing mappers for {category} authenticator (ID: {authenticator_id}) ---')
|
||||||
|
self._write_output(f'Organization mappers: {len(org_mappers)}')
|
||||||
|
self._write_output(f'Team mappers: {len(team_mappers)}')
|
||||||
|
self._write_output(f'Role mappers: {len(role_mappers)}')
|
||||||
|
self._write_output(f'Allow mappers: {len(allow_mappers)}')
|
||||||
|
|
||||||
|
# Get existing mappers from Gateway
|
||||||
|
try:
|
||||||
|
existing_mappers = self.gateway_client.get_authenticator_maps(authenticator_id)
|
||||||
|
except GatewayAPIError as e:
|
||||||
|
self._write_output(f'Failed to retrieve existing mappers: {e.message}', 'error')
|
||||||
|
return {'created': 0, 'updated': 0, 'failed': len(all_new_mappers)}
|
||||||
|
|
||||||
|
# Define mapper-specific ignore keys (can be overridden by subclasses)
|
||||||
|
ignore_keys = self._get_mapper_ignore_keys()
|
||||||
|
|
||||||
|
# Compare existing vs new mappers
|
||||||
|
mappers_to_update, mappers_to_create = self._compare_mapper_lists(existing_mappers, all_new_mappers, ignore_keys)
|
||||||
|
|
||||||
|
self._write_output(f'Mappers to create: {len(mappers_to_create)}')
|
||||||
|
self._write_output(f'Mappers to update: {len(mappers_to_update)}')
|
||||||
|
|
||||||
|
created_count = 0
|
||||||
|
updated_count = 0
|
||||||
|
failed_count = 0
|
||||||
|
|
||||||
|
# Process updates
|
||||||
|
for existing_mapper, new_mapper in mappers_to_update:
|
||||||
|
if self._update_single_mapper(existing_mapper, new_mapper):
|
||||||
|
updated_count += 1
|
||||||
|
else:
|
||||||
|
failed_count += 1
|
||||||
|
|
||||||
|
# Process creations
|
||||||
|
for new_mapper in mappers_to_create:
|
||||||
|
mapper_type = new_mapper.get('map_type', 'unknown')
|
||||||
|
if self._create_single_mapper(authenticator_id, new_mapper, mapper_type):
|
||||||
|
created_count += 1
|
||||||
|
else:
|
||||||
|
failed_count += 1
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
self._write_output(f'Mappers created: {created_count}, updated: {updated_count}, failed: {failed_count}')
|
||||||
|
return {'created': created_count, 'updated': updated_count, 'failed': failed_count}
|
||||||
|
|
||||||
|
def _get_mapper_ignore_keys(self):
|
||||||
|
"""
|
||||||
|
Get list of mapper keys to ignore during comparison.
|
||||||
|
Can be overridden by subclasses for mapper-specific ignore keys.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of keys to ignore (e.g., auto-generated fields)
|
||||||
|
"""
|
||||||
|
return ['id', 'authenticator', 'created', 'modified', 'summary_fields', 'modified_by', 'created_by', 'related', 'url']
|
||||||
|
|
||||||
|
def _update_single_mapper(self, existing_mapper, new_mapper):
|
||||||
|
"""Update a single mapper in Gateway.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
existing_mapper: Existing mapper data from Gateway
|
||||||
|
new_mapper: New mapper configuration to update to
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if mapper was updated successfully, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
mapper_id = existing_mapper.get('id')
|
||||||
|
if not mapper_id:
|
||||||
|
self._write_output(' ✗ Existing mapper missing ID, cannot update', 'error')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Prepare update config - don't include fields that shouldn't be updated
|
||||||
|
update_config = new_mapper.copy()
|
||||||
|
|
||||||
|
# Remove fields that shouldn't be updated (read-only or auto-generated)
|
||||||
|
fields_to_remove = ['id', 'authenticator', 'created', 'modified']
|
||||||
|
for field in fields_to_remove:
|
||||||
|
update_config.pop(field, None)
|
||||||
|
|
||||||
|
# Update the mapper
|
||||||
|
self.gateway_client.update_authenticator_map(mapper_id, update_config)
|
||||||
|
|
||||||
|
mapper_name = new_mapper.get('name', 'Unknown')
|
||||||
|
self._write_output(f' ✓ Updated mapper: {mapper_name}', 'success')
|
||||||
|
return True
|
||||||
|
|
||||||
|
except GatewayAPIError as e:
|
||||||
|
mapper_name = new_mapper.get('name', 'Unknown')
|
||||||
|
self._write_output(f' ✗ Failed to update mapper "{mapper_name}": {e.message}', 'error')
|
||||||
|
if e.response_data:
|
||||||
|
self._write_output(f' Details: {e.response_data}', 'error')
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
mapper_name = new_mapper.get('name', 'Unknown')
|
||||||
|
self._write_output(f' ✗ Unexpected error updating mapper "{mapper_name}": {str(e)}', 'error')
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _create_single_mapper(self, authenticator_id, mapper_config, mapper_type):
|
||||||
|
"""Create a single mapper in Gateway."""
|
||||||
|
try:
|
||||||
|
# Update the mapper config with the correct authenticator ID
|
||||||
|
mapper_config = mapper_config.copy() # Don't modify the original
|
||||||
|
mapper_config['authenticator'] = authenticator_id
|
||||||
|
|
||||||
|
# Create the mapper
|
||||||
|
self.gateway_client.create_authenticator_map(authenticator_id, mapper_config)
|
||||||
|
|
||||||
|
mapper_name = mapper_config.get('name', 'Unknown')
|
||||||
|
self._write_output(f' ✓ Created {mapper_type} mapper: {mapper_name}', 'success')
|
||||||
|
return True
|
||||||
|
|
||||||
|
except GatewayAPIError as e:
|
||||||
|
mapper_name = mapper_config.get('name', 'Unknown')
|
||||||
|
self._write_output(f' ✗ Failed to create {mapper_type} mapper "{mapper_name}": {e.message}', 'error')
|
||||||
|
if e.response_data:
|
||||||
|
self._write_output(f' Details: {e.response_data}', 'error')
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
mapper_name = mapper_config.get('name', 'Unknown')
|
||||||
|
self._write_output(f' ✗ Unexpected error creating {mapper_type} mapper "{mapper_name}": {str(e)}', 'error')
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_social_org_map(self, authenticator_setting_name=None):
|
||||||
|
"""
|
||||||
|
Get social auth organization map with fallback to global setting.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
authenticator_setting_name: Name of the authenticator-specific organization map setting
|
||||||
|
(e.g., 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Organization mapping configuration, with fallback to global setting
|
||||||
|
"""
|
||||||
|
# Try authenticator-specific setting first
|
||||||
|
if authenticator_setting_name:
|
||||||
|
if authenticator_map := getattr(settings, authenticator_setting_name, None):
|
||||||
|
return authenticator_map
|
||||||
|
|
||||||
|
# Fall back to global setting
|
||||||
|
global_map = getattr(settings, 'SOCIAL_AUTH_ORGANIZATION_MAP', {})
|
||||||
|
return global_map
|
||||||
|
|
||||||
|
def get_social_team_map(self, authenticator_setting_name=None):
|
||||||
|
"""
|
||||||
|
Get social auth team map with fallback to global setting.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
authenticator_setting_name: Name of the authenticator-specific team map setting
|
||||||
|
(e.g., 'SOCIAL_AUTH_GITHUB_TEAM_MAP')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Team mapping configuration, with fallback to global setting
|
||||||
|
"""
|
||||||
|
# Try authenticator-specific setting first
|
||||||
|
if authenticator_setting_name:
|
||||||
|
if authenticator_map := getattr(settings, authenticator_setting_name, None):
|
||||||
|
return authenticator_map
|
||||||
|
|
||||||
|
# Fall back to global setting
|
||||||
|
global_map = getattr(settings, 'SOCIAL_AUTH_TEAM_MAP', {})
|
||||||
|
return global_map
|
||||||
|
|
||||||
|
def handle_login_override(self, config, valid_login_urls):
|
||||||
|
"""
|
||||||
|
Handle LOGIN_REDIRECT_OVERRIDE setting for this authenticator.
|
||||||
|
|
||||||
|
This method checks if the login_redirect_override from the config matches
|
||||||
|
any of the provided valid_login_urls. If it matches, it updates the
|
||||||
|
LOGIN_REDIRECT_OVERRIDE setting in Gateway with the new authenticator's
|
||||||
|
URL and sets the class flag to indicate it was handled.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Configuration dictionary containing:
|
||||||
|
- login_redirect_override: The current LOGIN_REDIRECT_OVERRIDE value
|
||||||
|
- gateway_authenticator: The created/updated authenticator info
|
||||||
|
valid_login_urls: List of URL patterns to match against
|
||||||
|
"""
|
||||||
|
# Check if another migrator has already handled login redirect override
|
||||||
|
if BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator:
|
||||||
|
raise RuntimeError("LOGIN_REDIRECT_OVERRIDE has already been handled by another migrator")
|
||||||
|
|
||||||
|
login_redirect_override = config.get('login_redirect_override')
|
||||||
|
if not login_redirect_override:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if the login_redirect_override matches any of the provided valid URLs
|
||||||
|
url_matches = False
|
||||||
|
parsed_redirect = urlparse(login_redirect_override)
|
||||||
|
self.redirect_query_dict = parse_qs(parsed_redirect.query, keep_blank_values=True) if parsed_redirect.query else {}
|
||||||
|
|
||||||
|
for valid_url in valid_login_urls:
|
||||||
|
parsed_valid = urlparse(valid_url)
|
||||||
|
|
||||||
|
# Compare path: redirect path should match or contain the valid path at proper boundaries
|
||||||
|
if parsed_redirect.path == parsed_valid.path:
|
||||||
|
path_matches = True
|
||||||
|
elif parsed_redirect.path.startswith(parsed_valid.path):
|
||||||
|
# Ensure the match is at a path boundary (followed by '/' or end of string)
|
||||||
|
next_char_pos = len(parsed_valid.path)
|
||||||
|
if next_char_pos >= len(parsed_redirect.path) or parsed_redirect.path[next_char_pos] in ['/', '?']:
|
||||||
|
path_matches = True
|
||||||
|
else:
|
||||||
|
path_matches = False
|
||||||
|
else:
|
||||||
|
path_matches = False
|
||||||
|
|
||||||
|
# Compare query: if valid URL has query params, they should be present in redirect URL
|
||||||
|
query_matches = True
|
||||||
|
if parsed_valid.query:
|
||||||
|
# Parse query parameters for both URLs
|
||||||
|
valid_params = parse_qs(parsed_valid.query, keep_blank_values=True)
|
||||||
|
|
||||||
|
# All valid URL query params must be present in redirect URL with same values
|
||||||
|
query_matches = all(param in self.redirect_query_dict and self.redirect_query_dict[param] == values for param, values in valid_params.items())
|
||||||
|
|
||||||
|
if path_matches and query_matches:
|
||||||
|
url_matches = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not url_matches:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Extract the created authenticator from config
|
||||||
|
gateway_authenticator = config.get('gateway_authenticator')
|
||||||
|
if not gateway_authenticator:
|
||||||
|
return
|
||||||
|
|
||||||
|
sso_login_url = gateway_authenticator.get('sso_login_url')
|
||||||
|
if not sso_login_url:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Compute the new LOGIN_REDIRECT_OVERRIDE URL with the Gateway URL
|
||||||
|
gateway_base_url = self.gateway_client.get_base_url()
|
||||||
|
parsed_sso = urlparse(sso_login_url)
|
||||||
|
parsed_gw = urlparse(gateway_base_url)
|
||||||
|
updated_query = self._updated_query_string(parsed_sso)
|
||||||
|
complete_url = parsed_redirect._replace(scheme=parsed_gw.scheme, path=parsed_sso.path, netloc=parsed_gw.netloc, query=updated_query).geturl()
|
||||||
|
self._write_output(f'LOGIN_REDIRECT_OVERRIDE will be updated to: {complete_url}')
|
||||||
|
|
||||||
|
# Store the new URL in class variable for settings migrator to use
|
||||||
|
BaseAuthenticatorMigrator.login_redirect_override_new_url = complete_url
|
||||||
|
|
||||||
|
# Set the class-level flag to indicate LOGIN_REDIRECT_OVERRIDE was handled by a migrator
|
||||||
|
BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator = True
|
||||||
|
|
||||||
|
def _updated_query_string(self, parsed_sso):
|
||||||
|
if parsed_sso.query:
|
||||||
|
parsed_sso_dict = parse_qs(parsed_sso.query, keep_blank_values=True)
|
||||||
|
else:
|
||||||
|
parsed_sso_dict = {}
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
for k, v in self.redirect_query_dict.items():
|
||||||
|
if k in self.KEYS_TO_PRESERVE and k in parsed_sso_dict:
|
||||||
|
v = parsed_sso_dict[k]
|
||||||
|
|
||||||
|
if isinstance(v, list) and len(v) == 1:
|
||||||
|
result[k] = v[0]
|
||||||
|
else:
|
||||||
|
result[k] = v
|
||||||
|
|
||||||
|
return urlencode(result, doseq=True) if result else ""
|
||||||
|
|
||||||
|
def _write_output(self, message, style=None):
|
||||||
|
"""Write output message if command is available."""
|
||||||
|
if self.command:
|
||||||
|
if style == 'success':
|
||||||
|
self.command.stdout.write(self.command.style.SUCCESS(message))
|
||||||
|
elif style == 'warning':
|
||||||
|
self.command.stdout.write(self.command.style.WARNING(message))
|
||||||
|
elif style == 'error':
|
||||||
|
self.command.stdout.write(self.command.style.ERROR(message))
|
||||||
|
else:
|
||||||
|
self.command.stdout.write(message)
|
||||||
217
awx/sso/utils/github_migrator.py
Normal file
217
awx/sso/utils/github_migrator.py
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
"""
|
||||||
|
GitHub authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of GitHub authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.conf import settings_registry
|
||||||
|
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class GitHubMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of GitHub authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "GitHub"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export all GitHub authenticators. A GitHub authenticator is only exported if both,
|
||||||
|
id and secret, are defined. Otherwise it will be skipped.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured GitHub authentication providers with their settings
|
||||||
|
"""
|
||||||
|
github_categories = ['github', 'github-org', 'github-team', 'github-enterprise', 'github-enterprise-org', 'github-enterprise-team']
|
||||||
|
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
|
||||||
|
|
||||||
|
found_configs = []
|
||||||
|
|
||||||
|
for category in github_categories:
|
||||||
|
try:
|
||||||
|
category_settings = settings_registry.get_registered_settings(category_slug=category)
|
||||||
|
if category_settings:
|
||||||
|
config_data = {}
|
||||||
|
|
||||||
|
key_setting = None
|
||||||
|
secret_setting = None
|
||||||
|
|
||||||
|
# Ensure category_settings is iterable and contains strings
|
||||||
|
if isinstance(category_settings, re.Pattern) or not hasattr(category_settings, '__iter__') or isinstance(category_settings, str):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for setting_name in category_settings:
|
||||||
|
# Skip if setting_name is not a string (e.g., regex pattern)
|
||||||
|
if not isinstance(setting_name, str):
|
||||||
|
continue
|
||||||
|
if setting_name.endswith('_KEY'):
|
||||||
|
key_setting = setting_name
|
||||||
|
elif setting_name.endswith('_SECRET'):
|
||||||
|
secret_setting = setting_name
|
||||||
|
|
||||||
|
# Skip this category if KEY or SECRET is missing or empty
|
||||||
|
if not key_setting or not secret_setting:
|
||||||
|
continue
|
||||||
|
|
||||||
|
key_value = getattr(settings, key_setting, None)
|
||||||
|
secret_value = getattr(settings, secret_setting, None)
|
||||||
|
|
||||||
|
# Skip this category if OIDC Key and/or Secret are not configured
|
||||||
|
if not key_value or not secret_value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we have both key and secret, collect all settings
|
||||||
|
org_map_setting_name = None
|
||||||
|
team_map_setting_name = None
|
||||||
|
|
||||||
|
for setting_name in category_settings:
|
||||||
|
# Skip if setting_name is not a string (e.g., regex pattern)
|
||||||
|
if not isinstance(setting_name, str):
|
||||||
|
continue
|
||||||
|
value = getattr(settings, setting_name, None)
|
||||||
|
config_data[setting_name] = value
|
||||||
|
|
||||||
|
# Capture org and team map setting names for special processing
|
||||||
|
if setting_name.endswith('_ORGANIZATION_MAP'):
|
||||||
|
org_map_setting_name = setting_name
|
||||||
|
elif setting_name.endswith('_TEAM_MAP'):
|
||||||
|
team_map_setting_name = setting_name
|
||||||
|
|
||||||
|
# Get org and team mappings using the new fallback functions
|
||||||
|
org_map_value = self.get_social_org_map(org_map_setting_name) if org_map_setting_name else {}
|
||||||
|
team_map_value = self.get_social_team_map(team_map_setting_name) if team_map_setting_name else {}
|
||||||
|
|
||||||
|
# Convert GitHub org and team mappings from AWX to the Gateway format
|
||||||
|
# Start with order 1 and maintain sequence across both org and team mappers
|
||||||
|
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1)
|
||||||
|
team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order)
|
||||||
|
|
||||||
|
found_configs.append(
|
||||||
|
{
|
||||||
|
'category': category,
|
||||||
|
'settings': config_data,
|
||||||
|
'org_mappers': org_mappers,
|
||||||
|
'team_mappers': team_mappers,
|
||||||
|
'login_redirect_override': login_redirect_override,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f'Could not retrieve {category} settings: {str(e)}')
|
||||||
|
|
||||||
|
return found_configs
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create a GitHub/OIDC authenticator in Gateway."""
|
||||||
|
category = config['category']
|
||||||
|
settings = config['settings']
|
||||||
|
|
||||||
|
# Extract the OAuth2 credentials
|
||||||
|
key_value = None
|
||||||
|
secret_value = None
|
||||||
|
|
||||||
|
for setting_name, value in settings.items():
|
||||||
|
if setting_name.endswith('_KEY') and value:
|
||||||
|
key_value = value
|
||||||
|
elif setting_name.endswith('_SECRET') and value:
|
||||||
|
secret_value = value
|
||||||
|
|
||||||
|
if not key_value or not secret_value:
|
||||||
|
self._write_output(f'Skipping {category}: missing OAuth2 credentials', 'warning')
|
||||||
|
return {'success': False, 'action': 'skipped', 'error': 'Missing OAuth2 credentials'}
|
||||||
|
|
||||||
|
# Generate authenticator name and slug
|
||||||
|
authenticator_name = category
|
||||||
|
authenticator_slug = self._generate_authenticator_slug('github', category)
|
||||||
|
|
||||||
|
# Map AWX category to Gateway authenticator type
|
||||||
|
type_mapping = {
|
||||||
|
'github': 'ansible_base.authentication.authenticator_plugins.github',
|
||||||
|
'github-org': 'ansible_base.authentication.authenticator_plugins.github_org',
|
||||||
|
'github-team': 'ansible_base.authentication.authenticator_plugins.github_team',
|
||||||
|
'github-enterprise': 'ansible_base.authentication.authenticator_plugins.github_enterprise',
|
||||||
|
'github-enterprise-org': 'ansible_base.authentication.authenticator_plugins.github_enterprise_org',
|
||||||
|
'github-enterprise-team': 'ansible_base.authentication.authenticator_plugins.github_enterprise_team',
|
||||||
|
}
|
||||||
|
|
||||||
|
authenticator_type = type_mapping.get(category)
|
||||||
|
if not authenticator_type:
|
||||||
|
self._write_output(f'Unknown category {category}, skipping', 'warning')
|
||||||
|
return {'success': False, 'action': 'skipped', 'error': f'Unknown category {category}'}
|
||||||
|
|
||||||
|
self._write_output(f'\n--- Processing {category} authenticator ---')
|
||||||
|
self._write_output(f'Name: {authenticator_name}')
|
||||||
|
self._write_output(f'Slug: {authenticator_slug}')
|
||||||
|
self._write_output(f'Type: {authenticator_type}')
|
||||||
|
self._write_output(f'Client ID: {key_value}')
|
||||||
|
self._write_output(f'Client Secret: {"*" * 8}')
|
||||||
|
|
||||||
|
# Build Gateway authenticator configuration
|
||||||
|
gateway_config = {
|
||||||
|
"name": authenticator_name,
|
||||||
|
"slug": authenticator_slug,
|
||||||
|
"type": authenticator_type,
|
||||||
|
"enabled": False,
|
||||||
|
"create_objects": True, # Allow Gateway to create users/orgs/teams
|
||||||
|
"remove_users": False, # Don't remove users by default
|
||||||
|
"configuration": {"KEY": key_value, "SECRET": secret_value},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add any additional configuration based on AWX settings
|
||||||
|
additional_config = self._build_additional_config(category, settings)
|
||||||
|
gateway_config["configuration"].update(additional_config)
|
||||||
|
|
||||||
|
# GitHub authenticators have auto-generated fields that should be ignored during comparison
|
||||||
|
# CALLBACK_URL - automatically created by Gateway
|
||||||
|
# SCOPE - relevant for mappers with team/org requirement, allows to read the org or team
|
||||||
|
# SECRET - the secret is encrypted in Gateway, we have no way of comparing the decrypted value
|
||||||
|
ignore_keys = ['CALLBACK_URL', 'SCOPE']
|
||||||
|
|
||||||
|
# Submit the authenticator (create or update as needed)
|
||||||
|
result = self.submit_authenticator(gateway_config, ignore_keys, config)
|
||||||
|
|
||||||
|
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
|
||||||
|
valid_login_urls = [f'/sso/login/{category}', f'/sso/login/{category}/']
|
||||||
|
self.handle_login_override(config, valid_login_urls)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _build_additional_config(self, category, settings):
|
||||||
|
"""Build additional configuration for specific authenticator types."""
|
||||||
|
additional_config = {}
|
||||||
|
|
||||||
|
# Add scope configuration if present
|
||||||
|
for setting_name, value in settings.items():
|
||||||
|
if setting_name.endswith('_SCOPE') and value:
|
||||||
|
additional_config['SCOPE'] = value
|
||||||
|
break
|
||||||
|
|
||||||
|
# Add GitHub Enterprise URL if present
|
||||||
|
if 'enterprise' in category:
|
||||||
|
for setting_name, value in settings.items():
|
||||||
|
if setting_name.endswith('_API_URL') and value:
|
||||||
|
additional_config['API_URL'] = value
|
||||||
|
elif setting_name.endswith('_URL') and value:
|
||||||
|
additional_config['URL'] = value
|
||||||
|
|
||||||
|
# Add organization name for org-specific authenticators
|
||||||
|
if 'org' in category:
|
||||||
|
for setting_name, value in settings.items():
|
||||||
|
if setting_name.endswith('_NAME') and value:
|
||||||
|
additional_config['NAME'] = value
|
||||||
|
break
|
||||||
|
|
||||||
|
# Add team ID for team-specific authenticators
|
||||||
|
if 'team' in category:
|
||||||
|
for setting_name, value in settings.items():
|
||||||
|
if setting_name.endswith('_ID') and value:
|
||||||
|
additional_config['ID'] = value
|
||||||
|
break
|
||||||
|
|
||||||
|
return additional_config
|
||||||
102
awx/sso/utils/google_oauth2_migrator.py
Normal file
102
awx/sso/utils/google_oauth2_migrator.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
"""
|
||||||
|
Google OAuth2 authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of Google OAuth2 authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleOAuth2Migrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of Google OAuth2 authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "Google OAuth2"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export Google OAuth2 authenticators. A Google OAuth2 authenticator is only exported if
|
||||||
|
KEY and SECRET are configured.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured Google OAuth2 authentication providers with their settings
|
||||||
|
"""
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
if not getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None):
|
||||||
|
return []
|
||||||
|
|
||||||
|
config_data = {
|
||||||
|
'SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL,
|
||||||
|
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY,
|
||||||
|
'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET,
|
||||||
|
'SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE,
|
||||||
|
}
|
||||||
|
|
||||||
|
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"category": self.get_authenticator_type(),
|
||||||
|
"settings": config_data,
|
||||||
|
"login_redirect_override": login_redirect_override,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def _build_mappers(self):
|
||||||
|
org_map = self.get_social_org_map('SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP')
|
||||||
|
team_map = self.get_social_team_map('SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP')
|
||||||
|
|
||||||
|
mappers, order = org_map_to_gateway_format(org_map, 1)
|
||||||
|
team_mappers, _ = team_map_to_gateway_format(team_map, order)
|
||||||
|
|
||||||
|
mappers.extend(team_mappers)
|
||||||
|
|
||||||
|
return mappers
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create a Google OAuth2 authenticator in Gateway."""
|
||||||
|
category = config["category"]
|
||||||
|
config_settings = config['settings']
|
||||||
|
|
||||||
|
authenticator_slug = self._generate_authenticator_slug('google-oauth2', category.replace(" ", "-"))
|
||||||
|
|
||||||
|
self._write_output(f"\n--- Processing {category} authenticator ---")
|
||||||
|
|
||||||
|
gateway_config = {
|
||||||
|
"name": "google",
|
||||||
|
"slug": authenticator_slug,
|
||||||
|
"type": "ansible_base.authentication.authenticator_plugins.google_oauth2",
|
||||||
|
"enabled": False,
|
||||||
|
"create_objects": True, # Allow Gateway to create users/orgs/teams
|
||||||
|
"remove_users": False, # Don't remove users by default
|
||||||
|
"configuration": {
|
||||||
|
"KEY": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'),
|
||||||
|
"SECRET": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'),
|
||||||
|
"REDIRECT_STATE": True,
|
||||||
|
},
|
||||||
|
"mappers": self._build_mappers(),
|
||||||
|
}
|
||||||
|
|
||||||
|
ignore_keys = ["ACCESS_TOKEN_METHOD", "REVOKE_TOKEN_METHOD"]
|
||||||
|
optional = {
|
||||||
|
"CALLBACK_URL": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL'),
|
||||||
|
"SCOPE": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE'),
|
||||||
|
}
|
||||||
|
for key, value in optional.items():
|
||||||
|
if value:
|
||||||
|
gateway_config["configuration"][key] = value
|
||||||
|
else:
|
||||||
|
ignore_keys.append(key)
|
||||||
|
|
||||||
|
result = self.submit_authenticator(gateway_config, ignore_keys, config)
|
||||||
|
|
||||||
|
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
|
||||||
|
valid_login_urls = ['/sso/login/google-oauth2']
|
||||||
|
self.handle_login_override(config, valid_login_urls)
|
||||||
|
|
||||||
|
return result
|
||||||
368
awx/sso/utils/ldap_migrator.py
Normal file
368
awx/sso/utils/ldap_migrator.py
Normal file
@ -0,0 +1,368 @@
|
|||||||
|
"""
|
||||||
|
LDAP authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of LDAP authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format, role_map_to_gateway_format
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
import ldap
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of LDAP authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "LDAP"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export all LDAP authenticators. An LDAP authenticator is only exported if
|
||||||
|
SERVER_URI is configured. Otherwise it will be skipped.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured LDAP authentication providers with their settings
|
||||||
|
"""
|
||||||
|
# AWX supports up to 6 LDAP configurations: AUTH_LDAP (default) and AUTH_LDAP_1 through AUTH_LDAP_5
|
||||||
|
ldap_instances = [None, 1, 2, 3, 4, 5] # None represents the default AUTH_LDAP_ configuration
|
||||||
|
found_configs = []
|
||||||
|
|
||||||
|
for instance in ldap_instances:
|
||||||
|
# Build the prefix for this LDAP instance
|
||||||
|
prefix = f"AUTH_LDAP_{instance}_" if instance is not None else "AUTH_LDAP_"
|
||||||
|
# The authenticator category is always "ldap"
|
||||||
|
category = "ldap"
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get all LDAP settings for this instance
|
||||||
|
config_data = self._get_ldap_instance_config(prefix)
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f'Could not retrieve {category} settings: {str(e)}')
|
||||||
|
|
||||||
|
# Skip if SERVER_URI is not configured (required for LDAP to function)
|
||||||
|
if not config_data.get('SERVER_URI'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Convert organization, team, and role mappings to Gateway format
|
||||||
|
org_map_value = config_data.get('ORGANIZATION_MAP', {})
|
||||||
|
team_map_value = config_data.get('TEAM_MAP', {})
|
||||||
|
role_map_value = config_data.get('USER_FLAGS_BY_GROUP', {})
|
||||||
|
require_group_value = config_data.get('REQUIRE_GROUP', {})
|
||||||
|
deny_group_value = config_data.get('DENY_GROUP', {})
|
||||||
|
|
||||||
|
allow_mappers = []
|
||||||
|
|
||||||
|
# Start with order 1 and maintain sequence across org, team, and role mappers
|
||||||
|
allow_mappers, next_order = self._ldap_group_allow_to_gateway_format(allow_mappers, deny_group_value, deny=True, start_order=1)
|
||||||
|
allow_mappers, next_order = self._ldap_group_allow_to_gateway_format(allow_mappers, require_group_value, deny=False, start_order=next_order)
|
||||||
|
|
||||||
|
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=next_order, auth_type='ldap')
|
||||||
|
team_mappers, next_order = team_map_to_gateway_format(team_map_value, start_order=next_order, auth_type='ldap')
|
||||||
|
role_mappers, _ = role_map_to_gateway_format(role_map_value, start_order=next_order)
|
||||||
|
|
||||||
|
found_configs.append(
|
||||||
|
{
|
||||||
|
'category': category,
|
||||||
|
'settings': config_data,
|
||||||
|
'org_mappers': org_mappers,
|
||||||
|
'team_mappers': team_mappers,
|
||||||
|
'role_mappers': role_mappers,
|
||||||
|
'allow_mappers': allow_mappers,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return found_configs
|
||||||
|
|
||||||
|
def _get_ldap_instance_config(self, prefix):
|
||||||
|
"""
|
||||||
|
Get all LDAP configuration settings for a specific instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prefix: The setting prefix (e.g., 'AUTH_LDAP_' or 'AUTH_LDAP_1_')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary of LDAP configuration settings
|
||||||
|
"""
|
||||||
|
# Define all LDAP setting keys
|
||||||
|
ldap_keys = [
|
||||||
|
'SERVER_URI', # Required: LDAP server URI(s)
|
||||||
|
'BIND_DN', # Optional: Bind DN for authentication
|
||||||
|
'BIND_PASSWORD', # Optional: Bind password
|
||||||
|
'START_TLS', # Optional: Enable TLS
|
||||||
|
'CONNECTION_OPTIONS', # Optional: LDAP connection options
|
||||||
|
'USER_SEARCH', # Optional: User search configuration
|
||||||
|
'USER_DN_TEMPLATE', # Optional: User DN template
|
||||||
|
'USER_ATTR_MAP', # Optional: User attribute mapping
|
||||||
|
'GROUP_SEARCH', # Optional: Group search configuration
|
||||||
|
'GROUP_TYPE', # Optional: Group type class
|
||||||
|
'GROUP_TYPE_PARAMS', # Optional: Group type parameters
|
||||||
|
'REQUIRE_GROUP', # Optional: Required group DN
|
||||||
|
'DENY_GROUP', # Optional: Denied group DN
|
||||||
|
'USER_FLAGS_BY_GROUP', # Optional: User flags mapping
|
||||||
|
'ORGANIZATION_MAP', # Optional: Organization mapping
|
||||||
|
'TEAM_MAP', # Optional: Team mapping
|
||||||
|
]
|
||||||
|
|
||||||
|
config_data = {}
|
||||||
|
|
||||||
|
for key in ldap_keys:
|
||||||
|
setting_name = f"{prefix}{key}"
|
||||||
|
value = getattr(settings, setting_name, None)
|
||||||
|
|
||||||
|
# Handle special field types that need conversion
|
||||||
|
if key == 'GROUP_TYPE' and value:
|
||||||
|
# Convert GROUP_TYPE class to string representation
|
||||||
|
config_data[key] = type(value).__name__
|
||||||
|
elif key == 'SERVER_URI' and value:
|
||||||
|
# Convert SERVER_URI to list format if it's a comma-separated string
|
||||||
|
config_data[key] = [uri.strip() for uri in value.split(',')]
|
||||||
|
elif key in ['USER_SEARCH', 'GROUP_SEARCH'] and value:
|
||||||
|
# Convert LDAPSearch objects to list format [base_dn, scope, filter]
|
||||||
|
if hasattr(value, 'base_dn') and hasattr(value, 'filterstr'):
|
||||||
|
# Get the actual scope instead of hardcoding SCOPE_SUBTREE
|
||||||
|
scope = getattr(value, 'scope', ldap.SCOPE_SUBTREE) # 2 is SCOPE_SUBTREE default
|
||||||
|
scope_name = {ldap.SCOPE_BASE: 'SCOPE_BASE', ldap.SCOPE_ONELEVEL: 'SCOPE_ONELEVEL', ldap.SCOPE_SUBTREE: 'SCOPE_SUBTREE'}.get(
|
||||||
|
scope, 'SCOPE_SUBTREE'
|
||||||
|
)
|
||||||
|
config_data[key] = [value.base_dn, scope_name, value.filterstr]
|
||||||
|
else:
|
||||||
|
config_data[key] = value
|
||||||
|
elif key in ['USER_ATTR_MAP', 'GROUP_TYPE_PARAMS', 'USER_FLAGS_BY_GROUP', 'ORGANIZATION_MAP', 'TEAM_MAP']:
|
||||||
|
# Ensure dict fields are properly handled
|
||||||
|
config_data[key] = value if value is not None else {}
|
||||||
|
elif key == 'CONNECTION_OPTIONS' and value:
|
||||||
|
# CONNECTION_OPTIONS is a dict of LDAP options
|
||||||
|
config_data[key] = value if value is not None else {}
|
||||||
|
else:
|
||||||
|
# Store the value as-is for other fields
|
||||||
|
config_data[key] = value
|
||||||
|
|
||||||
|
return config_data
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create an LDAP authenticator in Gateway."""
|
||||||
|
category = config['category']
|
||||||
|
settings = config['settings']
|
||||||
|
|
||||||
|
# Extract the first server URI for slug generation
|
||||||
|
authenticator_slug = self._generate_authenticator_slug('ldap', category)
|
||||||
|
|
||||||
|
# Build the gateway payload
|
||||||
|
gateway_config = {
|
||||||
|
'name': category,
|
||||||
|
'slug': authenticator_slug,
|
||||||
|
'type': 'ansible_base.authentication.authenticator_plugins.ldap',
|
||||||
|
'create_objects': True,
|
||||||
|
'remove_users': False,
|
||||||
|
'enabled': True,
|
||||||
|
'configuration': self._build_ldap_configuration(settings),
|
||||||
|
}
|
||||||
|
|
||||||
|
self._write_output(f'Creating LDAP authenticator: {gateway_config["name"]}')
|
||||||
|
|
||||||
|
# LDAP authenticators have auto-generated fields that should be ignored during comparison
|
||||||
|
# BIND_PASSWORD - encrypted value, can't be compared
|
||||||
|
ignore_keys = []
|
||||||
|
|
||||||
|
# Submit the authenticator using the base class method
|
||||||
|
return self.submit_authenticator(gateway_config, config=config, ignore_keys=ignore_keys)
|
||||||
|
|
||||||
|
def _build_ldap_configuration(self, settings):
|
||||||
|
"""Build the LDAP configuration section for Gateway."""
|
||||||
|
config = {}
|
||||||
|
|
||||||
|
# Server URI is required
|
||||||
|
if settings.get('SERVER_URI'):
|
||||||
|
config['SERVER_URI'] = settings['SERVER_URI']
|
||||||
|
|
||||||
|
# Authentication settings
|
||||||
|
if settings.get('BIND_DN'):
|
||||||
|
config['BIND_DN'] = settings['BIND_DN']
|
||||||
|
if settings.get('BIND_PASSWORD'):
|
||||||
|
config['BIND_PASSWORD'] = settings['BIND_PASSWORD']
|
||||||
|
|
||||||
|
# TLS settings
|
||||||
|
if settings.get('START_TLS') is not None:
|
||||||
|
config['START_TLS'] = settings['START_TLS']
|
||||||
|
|
||||||
|
# User search configuration
|
||||||
|
if settings.get('USER_SEARCH'):
|
||||||
|
config['USER_SEARCH'] = settings['USER_SEARCH']
|
||||||
|
|
||||||
|
# User attribute mapping
|
||||||
|
if settings.get('USER_ATTR_MAP'):
|
||||||
|
config['USER_ATTR_MAP'] = settings['USER_ATTR_MAP']
|
||||||
|
|
||||||
|
# Group search configuration
|
||||||
|
if settings.get('GROUP_SEARCH'):
|
||||||
|
config['GROUP_SEARCH'] = settings['GROUP_SEARCH']
|
||||||
|
|
||||||
|
# Group type and parameters
|
||||||
|
if settings.get('GROUP_TYPE'):
|
||||||
|
config['GROUP_TYPE'] = settings['GROUP_TYPE']
|
||||||
|
if settings.get('GROUP_TYPE_PARAMS'):
|
||||||
|
config['GROUP_TYPE_PARAMS'] = settings['GROUP_TYPE_PARAMS']
|
||||||
|
|
||||||
|
# Connection options - convert numeric LDAP constants to string keys
|
||||||
|
if settings.get('CONNECTION_OPTIONS'):
|
||||||
|
config['CONNECTION_OPTIONS'] = self._convert_ldap_connection_options(settings['CONNECTION_OPTIONS'])
|
||||||
|
|
||||||
|
# User DN template
|
||||||
|
if settings.get('USER_DN_TEMPLATE'):
|
||||||
|
config['USER_DN_TEMPLATE'] = settings['USER_DN_TEMPLATE']
|
||||||
|
|
||||||
|
# REQUIRE_GROUP and DENY_GROUP are handled as allow mappers, not included in config
|
||||||
|
# USER_FLAGS_BY_GROUP is handled as role mappers, not included in config
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
def _convert_ldap_connection_options(self, connection_options):
|
||||||
|
"""
|
||||||
|
Convert numeric LDAP connection option constants to their string representations.
|
||||||
|
Uses the actual constants from the python-ldap library.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
connection_options: Dictionary with numeric LDAP option keys
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary with string LDAP option keys
|
||||||
|
"""
|
||||||
|
# Comprehensive mapping using LDAP constants as keys
|
||||||
|
ldap_option_map = {
|
||||||
|
# Basic LDAP options
|
||||||
|
ldap.OPT_API_INFO: 'OPT_API_INFO',
|
||||||
|
ldap.OPT_DEREF: 'OPT_DEREF',
|
||||||
|
ldap.OPT_SIZELIMIT: 'OPT_SIZELIMIT',
|
||||||
|
ldap.OPT_TIMELIMIT: 'OPT_TIMELIMIT',
|
||||||
|
ldap.OPT_REFERRALS: 'OPT_REFERRALS',
|
||||||
|
ldap.OPT_RESULT_CODE: 'OPT_RESULT_CODE',
|
||||||
|
ldap.OPT_ERROR_NUMBER: 'OPT_ERROR_NUMBER',
|
||||||
|
ldap.OPT_RESTART: 'OPT_RESTART',
|
||||||
|
ldap.OPT_PROTOCOL_VERSION: 'OPT_PROTOCOL_VERSION',
|
||||||
|
ldap.OPT_SERVER_CONTROLS: 'OPT_SERVER_CONTROLS',
|
||||||
|
ldap.OPT_CLIENT_CONTROLS: 'OPT_CLIENT_CONTROLS',
|
||||||
|
ldap.OPT_API_FEATURE_INFO: 'OPT_API_FEATURE_INFO',
|
||||||
|
ldap.OPT_HOST_NAME: 'OPT_HOST_NAME',
|
||||||
|
ldap.OPT_DESC: 'OPT_DESC',
|
||||||
|
ldap.OPT_DIAGNOSTIC_MESSAGE: 'OPT_DIAGNOSTIC_MESSAGE',
|
||||||
|
ldap.OPT_ERROR_STRING: 'OPT_ERROR_STRING',
|
||||||
|
ldap.OPT_MATCHED_DN: 'OPT_MATCHED_DN',
|
||||||
|
ldap.OPT_DEBUG_LEVEL: 'OPT_DEBUG_LEVEL',
|
||||||
|
ldap.OPT_TIMEOUT: 'OPT_TIMEOUT',
|
||||||
|
ldap.OPT_REFHOPLIMIT: 'OPT_REFHOPLIMIT',
|
||||||
|
ldap.OPT_NETWORK_TIMEOUT: 'OPT_NETWORK_TIMEOUT',
|
||||||
|
ldap.OPT_URI: 'OPT_URI',
|
||||||
|
# TLS options
|
||||||
|
ldap.OPT_X_TLS: 'OPT_X_TLS',
|
||||||
|
ldap.OPT_X_TLS_CTX: 'OPT_X_TLS_CTX',
|
||||||
|
ldap.OPT_X_TLS_CACERTFILE: 'OPT_X_TLS_CACERTFILE',
|
||||||
|
ldap.OPT_X_TLS_CACERTDIR: 'OPT_X_TLS_CACERTDIR',
|
||||||
|
ldap.OPT_X_TLS_CERTFILE: 'OPT_X_TLS_CERTFILE',
|
||||||
|
ldap.OPT_X_TLS_KEYFILE: 'OPT_X_TLS_KEYFILE',
|
||||||
|
ldap.OPT_X_TLS_REQUIRE_CERT: 'OPT_X_TLS_REQUIRE_CERT',
|
||||||
|
ldap.OPT_X_TLS_CIPHER_SUITE: 'OPT_X_TLS_CIPHER_SUITE',
|
||||||
|
ldap.OPT_X_TLS_RANDOM_FILE: 'OPT_X_TLS_RANDOM_FILE',
|
||||||
|
ldap.OPT_X_TLS_DHFILE: 'OPT_X_TLS_DHFILE',
|
||||||
|
ldap.OPT_X_TLS_NEVER: 'OPT_X_TLS_NEVER',
|
||||||
|
ldap.OPT_X_TLS_HARD: 'OPT_X_TLS_HARD',
|
||||||
|
ldap.OPT_X_TLS_DEMAND: 'OPT_X_TLS_DEMAND',
|
||||||
|
ldap.OPT_X_TLS_ALLOW: 'OPT_X_TLS_ALLOW',
|
||||||
|
ldap.OPT_X_TLS_TRY: 'OPT_X_TLS_TRY',
|
||||||
|
ldap.OPT_X_TLS_CRL_NONE: 'OPT_X_TLS_CRL_NONE',
|
||||||
|
ldap.OPT_X_TLS_CRL_PEER: 'OPT_X_TLS_CRL_PEER',
|
||||||
|
ldap.OPT_X_TLS_CRL_ALL: 'OPT_X_TLS_CRL_ALL',
|
||||||
|
# SASL options
|
||||||
|
ldap.OPT_X_SASL_MECH: 'OPT_X_SASL_MECH',
|
||||||
|
ldap.OPT_X_SASL_REALM: 'OPT_X_SASL_REALM',
|
||||||
|
ldap.OPT_X_SASL_AUTHCID: 'OPT_X_SASL_AUTHCID',
|
||||||
|
ldap.OPT_X_SASL_AUTHZID: 'OPT_X_SASL_AUTHZID',
|
||||||
|
ldap.OPT_X_SASL_SSF: 'OPT_X_SASL_SSF',
|
||||||
|
ldap.OPT_X_SASL_SSF_EXTERNAL: 'OPT_X_SASL_SSF_EXTERNAL',
|
||||||
|
ldap.OPT_X_SASL_SECPROPS: 'OPT_X_SASL_SECPROPS',
|
||||||
|
ldap.OPT_X_SASL_SSF_MIN: 'OPT_X_SASL_SSF_MIN',
|
||||||
|
ldap.OPT_X_SASL_SSF_MAX: 'OPT_X_SASL_SSF_MAX',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add optional options that may not be available in all versions
|
||||||
|
optional_options = [
|
||||||
|
'OPT_TCP_USER_TIMEOUT',
|
||||||
|
'OPT_DEFBASE',
|
||||||
|
'OPT_X_TLS_VERSION',
|
||||||
|
'OPT_X_TLS_CIPHER',
|
||||||
|
'OPT_X_TLS_PEERCERT',
|
||||||
|
'OPT_X_TLS_CRLCHECK',
|
||||||
|
'OPT_X_TLS_CRLFILE',
|
||||||
|
'OPT_X_TLS_NEWCTX',
|
||||||
|
'OPT_X_TLS_PROTOCOL_MIN',
|
||||||
|
'OPT_X_TLS_PACKAGE',
|
||||||
|
'OPT_X_TLS_ECNAME',
|
||||||
|
'OPT_X_TLS_REQUIRE_SAN',
|
||||||
|
'OPT_X_TLS_PROTOCOL_MAX',
|
||||||
|
'OPT_X_TLS_PROTOCOL_SSL3',
|
||||||
|
'OPT_X_TLS_PROTOCOL_TLS1_0',
|
||||||
|
'OPT_X_TLS_PROTOCOL_TLS1_1',
|
||||||
|
'OPT_X_TLS_PROTOCOL_TLS1_2',
|
||||||
|
'OPT_X_TLS_PROTOCOL_TLS1_3',
|
||||||
|
'OPT_X_SASL_NOCANON',
|
||||||
|
'OPT_X_SASL_USERNAME',
|
||||||
|
'OPT_CONNECT_ASYNC',
|
||||||
|
'OPT_X_KEEPALIVE_IDLE',
|
||||||
|
'OPT_X_KEEPALIVE_PROBES',
|
||||||
|
'OPT_X_KEEPALIVE_INTERVAL',
|
||||||
|
]
|
||||||
|
|
||||||
|
for option_name in optional_options:
|
||||||
|
if hasattr(ldap, option_name):
|
||||||
|
ldap_option_map[getattr(ldap, option_name)] = option_name
|
||||||
|
|
||||||
|
converted_options = {}
|
||||||
|
|
||||||
|
for key, value in connection_options.items():
|
||||||
|
if key in ldap_option_map:
|
||||||
|
converted_options[ldap_option_map[key]] = value
|
||||||
|
|
||||||
|
return converted_options
|
||||||
|
|
||||||
|
def _ldap_group_allow_to_gateway_format(self, result: list, ldap_group: str, deny=False, start_order=1):
|
||||||
|
"""Convert an LDAP require or deny group to a Gateway mapper
|
||||||
|
|
||||||
|
Args:
|
||||||
|
result: array to append the mapper to
|
||||||
|
ldap_group: An LDAP group query
|
||||||
|
deny: Whether the mapper denies or requires users to be in the group
|
||||||
|
start_order: Starting order value for the mappers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (List of Gateway-compatible organization mappers, next_order)
|
||||||
|
"""
|
||||||
|
if ldap_group is None:
|
||||||
|
return result, start_order
|
||||||
|
|
||||||
|
if deny:
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": "LDAP-DenyGroup",
|
||||||
|
"authenticator": -1,
|
||||||
|
"map_type": "allow",
|
||||||
|
"revoke": True,
|
||||||
|
"triggers": {"groups": {"has_or": [ldap_group]}},
|
||||||
|
"order": start_order,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": "LDAP-RequireGroup",
|
||||||
|
"authenticator": -1,
|
||||||
|
"map_type": "allow",
|
||||||
|
"revoke": False,
|
||||||
|
"triggers": {"groups": {"has_and": [ldap_group]}},
|
||||||
|
"order": start_order,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return result, start_order + 1
|
||||||
113
awx/sso/utils/oidc_migrator.py
Normal file
113
awx/sso/utils/oidc_migrator.py
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
"""
|
||||||
|
Generic OIDC authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of generic OIDC authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class OIDCMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of generic OIDC authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
CATEGORY = "OIDC"
|
||||||
|
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.oidc"
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "OIDC"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export generic OIDC authenticators. An OIDC authenticator is only exported if both,
|
||||||
|
key and secret, are defined. Otherwise it will be skipped.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured OIDC authentication providers with their settings
|
||||||
|
"""
|
||||||
|
key_value = getattr(settings, "SOCIAL_AUTH_OIDC_KEY", None)
|
||||||
|
secret_value = getattr(settings, "SOCIAL_AUTH_OIDC_SECRET", None)
|
||||||
|
oidc_endpoint = getattr(settings, "SOCIAL_AUTH_OIDC_OIDC_ENDPOINT", None)
|
||||||
|
|
||||||
|
# Skip if required settings are not configured
|
||||||
|
if not key_value or not secret_value or not oidc_endpoint:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Get additional OIDC configuration
|
||||||
|
verify_ssl = getattr(settings, "SOCIAL_AUTH_OIDC_VERIFY_SSL", True)
|
||||||
|
|
||||||
|
# Get organization and team mappings
|
||||||
|
org_map_value = self.get_social_org_map()
|
||||||
|
team_map_value = self.get_social_team_map()
|
||||||
|
|
||||||
|
# Convert org and team mappings from AWX to the Gateway format
|
||||||
|
# Start with order 1 and maintain sequence across both org and team mappers
|
||||||
|
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1)
|
||||||
|
team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order)
|
||||||
|
|
||||||
|
config_data = {
|
||||||
|
"name": "default",
|
||||||
|
"type": self.AUTH_TYPE,
|
||||||
|
"enabled": False,
|
||||||
|
"create_objects": True,
|
||||||
|
"remove_users": False,
|
||||||
|
"configuration": {
|
||||||
|
"OIDC_ENDPOINT": oidc_endpoint,
|
||||||
|
"KEY": key_value,
|
||||||
|
"SECRET": secret_value,
|
||||||
|
"VERIFY_SSL": verify_ssl,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"category": self.CATEGORY,
|
||||||
|
"settings": config_data,
|
||||||
|
"org_mappers": org_mappers,
|
||||||
|
"team_mappers": team_mappers,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create a generic OIDC authenticator in Gateway."""
|
||||||
|
category = config["category"]
|
||||||
|
config_settings = config["settings"]
|
||||||
|
|
||||||
|
# Generate authenticator name and slug
|
||||||
|
authenticator_name = "oidc"
|
||||||
|
authenticator_slug = self._generate_authenticator_slug("oidc", category)
|
||||||
|
|
||||||
|
self._write_output(f"\n--- Processing {category} authenticator ---")
|
||||||
|
self._write_output(f"Name: {authenticator_name}")
|
||||||
|
self._write_output(f"Slug: {authenticator_slug}")
|
||||||
|
self._write_output(f"Type: {config_settings['type']}")
|
||||||
|
|
||||||
|
# Build Gateway authenticator configuration
|
||||||
|
gateway_config = {
|
||||||
|
"name": authenticator_name,
|
||||||
|
"slug": authenticator_slug,
|
||||||
|
"type": config_settings["type"],
|
||||||
|
"enabled": config_settings["enabled"],
|
||||||
|
"create_objects": config_settings["create_objects"],
|
||||||
|
"remove_users": config_settings["remove_users"],
|
||||||
|
"configuration": config_settings["configuration"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# OIDC authenticators have auto-generated fields that should be ignored during comparison
|
||||||
|
# CALLBACK_URL - automatically created by Gateway
|
||||||
|
# SCOPE - defaults are set by Gateway plugin
|
||||||
|
# SECRET - the secret is encrypted in Gateway, we have no way of comparing the decrypted value
|
||||||
|
ignore_keys = ['CALLBACK_URL', 'SCOPE']
|
||||||
|
|
||||||
|
# Submit the authenticator (create or update as needed)
|
||||||
|
result = self.submit_authenticator(gateway_config, ignore_keys, config)
|
||||||
|
|
||||||
|
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
|
||||||
|
valid_login_urls = ['/sso/login/oidc']
|
||||||
|
self.handle_login_override(config, valid_login_urls)
|
||||||
|
|
||||||
|
return result
|
||||||
85
awx/sso/utils/radius_migrator.py
Normal file
85
awx/sso/utils/radius_migrator.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
"""
|
||||||
|
RADIUS authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of RADIUS authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class RADIUSMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of RADIUS authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
CATEGORY = "RADIUS"
|
||||||
|
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.radius"
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "RADIUS"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export RADIUS authenticators. A RADIUS authenticator is only exported if
|
||||||
|
required configuration is present.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured RADIUS authentication providers with their settings
|
||||||
|
"""
|
||||||
|
server = getattr(settings, "RADIUS_SERVER", None)
|
||||||
|
if not server:
|
||||||
|
return []
|
||||||
|
|
||||||
|
port = getattr(settings, "RADIUS_PORT", 1812)
|
||||||
|
secret = getattr(settings, "RADIUS_SECRET", "")
|
||||||
|
|
||||||
|
config_data = {
|
||||||
|
"name": "default",
|
||||||
|
"type": self.AUTH_TYPE,
|
||||||
|
"enabled": True,
|
||||||
|
"create_objects": True,
|
||||||
|
"remove_users": False,
|
||||||
|
"configuration": {
|
||||||
|
"SERVER": server,
|
||||||
|
"PORT": port,
|
||||||
|
"SECRET": secret,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"category": self.CATEGORY,
|
||||||
|
"settings": config_data,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create a RADIUS authenticator in Gateway."""
|
||||||
|
category = config["category"]
|
||||||
|
config_settings = config["settings"]
|
||||||
|
|
||||||
|
# Generate authenticator name and slug
|
||||||
|
authenticator_name = "radius"
|
||||||
|
authenticator_slug = self._generate_authenticator_slug("radius", category)
|
||||||
|
|
||||||
|
self._write_output(f"\n--- Processing {category} authenticator ---")
|
||||||
|
self._write_output(f"Name: {authenticator_name}")
|
||||||
|
self._write_output(f"Slug: {authenticator_slug}")
|
||||||
|
self._write_output(f"Type: {config_settings['type']}")
|
||||||
|
|
||||||
|
# Build Gateway authenticator configuration
|
||||||
|
gateway_config = {
|
||||||
|
"name": authenticator_name,
|
||||||
|
"slug": authenticator_slug,
|
||||||
|
"type": config_settings["type"],
|
||||||
|
"enabled": config_settings["enabled"],
|
||||||
|
"create_objects": config_settings["create_objects"],
|
||||||
|
"remove_users": config_settings["remove_users"],
|
||||||
|
"configuration": config_settings["configuration"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Submit the authenticator (create or update as needed)
|
||||||
|
return self.submit_authenticator(gateway_config, config=config)
|
||||||
308
awx/sso/utils/saml_migrator.py
Normal file
308
awx/sso/utils/saml_migrator.py
Normal file
@ -0,0 +1,308 @@
|
|||||||
|
"""
|
||||||
|
SAML authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of SAML authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
|
||||||
|
ROLE_MAPPER = {
|
||||||
|
"is_superuser_role": {"role": None, "map_type": "is_superuser", "revoke": "remove_superusers"},
|
||||||
|
"is_system_auditor_role": {"role": "Platform Auditor", "map_type": "role", "revoke": "remove_system_auditors"},
|
||||||
|
}
|
||||||
|
|
||||||
|
ATTRIBUTE_VALUE_MAPPER = {
|
||||||
|
"is_superuser_attr": {"role": None, "map_type": "is_superuser", "value": "is_superuser_value", "revoke": "remove_superusers"},
|
||||||
|
"is_system_auditor_attr": {"role": "Platform Auditor", "map_type": "role", "value": "is_system_auditor_value", "revoke": "remove_system_auditors"},
|
||||||
|
}
|
||||||
|
|
||||||
|
ORG_ATTRIBUTE_MAPPER = {
|
||||||
|
"saml_attr": {"role": "Organization Member", "revoke": "remove"},
|
||||||
|
"saml_admin_attr": {"role": "Organization Admin", "revoke": "remove_admins"},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _split_chunks(data: str, length: int = 64) -> list[str]:
|
||||||
|
return [data[i : i + length] for i in range(0, len(data), length)]
|
||||||
|
|
||||||
|
|
||||||
|
def _to_pem_cert(data: str) -> list[str]:
|
||||||
|
items = ["-----BEGIN CERTIFICATE-----"]
|
||||||
|
items += _split_chunks(data)
|
||||||
|
items.append("-----END CERTIFICATE-----")
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
class SAMLMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of SAML authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
CATEGORY = "SAML"
|
||||||
|
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.saml"
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.next_order = 1
|
||||||
|
self.team_mappers = []
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "SAML"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export SAML authenticators. A SAML authenticator is only exported if
|
||||||
|
required configuration is present.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured SAML authentication providers with their settings
|
||||||
|
"""
|
||||||
|
found_configs = []
|
||||||
|
|
||||||
|
enabled = False
|
||||||
|
remove_users = True
|
||||||
|
create_objects = getattr(settings, "SAML_AUTO_CREATE_OBJECTS", True)
|
||||||
|
idps = getattr(settings, "SOCIAL_AUTH_SAML_ENABLED_IDPS", {})
|
||||||
|
security_config = getattr(settings, "SOCIAL_AUTH_SAML_SECURITY_CONFIG", {})
|
||||||
|
|
||||||
|
# Get org and team mappings using the new fallback functions
|
||||||
|
org_map_value = self.get_social_org_map("SOCIAL_AUTH_SAML_ORGANIZATION_MAP")
|
||||||
|
team_map_value = self.get_social_team_map("SOCIAL_AUTH_SAML_TEAM_MAP")
|
||||||
|
self.extra_data = getattr(settings, "SOCIAL_AUTH_SAML_EXTRA_DATA", [])
|
||||||
|
self._add_to_extra_data(['Role', 'Role'])
|
||||||
|
|
||||||
|
support_contact = getattr(settings, "SOCIAL_AUTH_SAML_SUPPORT_CONTACT", {})
|
||||||
|
technical_contact = getattr(settings, "SOCIAL_AUTH_SAML_TECHNICAL_CONTACT", {})
|
||||||
|
org_info = getattr(settings, "SOCIAL_AUTH_SAML_ORG_INFO", {})
|
||||||
|
|
||||||
|
sp_private_key = getattr(settings, "SOCIAL_AUTH_SAML_SP_PRIVATE_KEY", None)
|
||||||
|
sp_public_cert = getattr(settings, "SOCIAL_AUTH_SAML_SP_PUBLIC_CERT", None)
|
||||||
|
sp_entity_id = getattr(settings, "SOCIAL_AUTH_SAML_SP_ENTITY_ID", None)
|
||||||
|
sp_extra = getattr(settings, "SOCIAL_AUTH_SAML_SP_EXTRA", {})
|
||||||
|
saml_team_attr = getattr(settings, "SOCIAL_AUTH_SAML_TEAM_ATTR", {})
|
||||||
|
org_attr = getattr(settings, "SOCIAL_AUTH_SAML_ORGANIZATION_ATTR", {})
|
||||||
|
user_flags_by_attr = getattr(settings, "SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR", {})
|
||||||
|
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
|
||||||
|
|
||||||
|
org_mappers, self.next_order = org_map_to_gateway_format(org_map_value, start_order=self.next_order)
|
||||||
|
self.team_mappers, self.next_order = team_map_to_gateway_format(team_map_value, start_order=self.next_order)
|
||||||
|
|
||||||
|
self._team_attr_to_gateway_format(saml_team_attr)
|
||||||
|
self._user_flags_by_role_to_gateway_format(user_flags_by_attr)
|
||||||
|
self._user_flags_by_attr_value_to_gateway_format(user_flags_by_attr)
|
||||||
|
self._org_attr_to_gateway_format(org_attr)
|
||||||
|
|
||||||
|
for name, value in idps.items():
|
||||||
|
config_data = {
|
||||||
|
"name": name,
|
||||||
|
"type": self.AUTH_TYPE,
|
||||||
|
"enabled": enabled,
|
||||||
|
"create_objects": create_objects,
|
||||||
|
"remove_users": remove_users,
|
||||||
|
"configuration": {
|
||||||
|
"IDP_URL": value.get("url"),
|
||||||
|
"IDP_X509_CERT": "\n".join(_to_pem_cert(value.get("x509cert"))),
|
||||||
|
"IDP_ENTITY_ID": value.get("entity_id"),
|
||||||
|
"IDP_ATTR_EMAIL": value.get("attr_email"),
|
||||||
|
"IDP_ATTR_USERNAME": value.get("attr_username"),
|
||||||
|
"IDP_ATTR_FIRST_NAME": value.get("attr_first_name"),
|
||||||
|
"IDP_ATTR_LAST_NAME": value.get("attr_last_name"),
|
||||||
|
"IDP_ATTR_USER_PERMANENT_ID": value.get("attr_user_permanent_id"),
|
||||||
|
"IDP_GROUPS": value.get("attr_groups"),
|
||||||
|
"SP_ENTITY_ID": sp_entity_id,
|
||||||
|
"SP_PUBLIC_CERT": sp_public_cert,
|
||||||
|
"SP_PRIVATE_KEY": sp_private_key,
|
||||||
|
"ORG_INFO": org_info,
|
||||||
|
"TECHNICAL_CONTACT": technical_contact,
|
||||||
|
"SUPPORT_CONTACT": support_contact,
|
||||||
|
"SECURITY_CONFIG": security_config,
|
||||||
|
"SP_EXTRA": sp_extra,
|
||||||
|
"EXTRA_DATA": self.extra_data,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
found_configs.append(
|
||||||
|
{
|
||||||
|
"category": self.CATEGORY,
|
||||||
|
"settings": config_data,
|
||||||
|
"org_mappers": org_mappers,
|
||||||
|
"team_mappers": self.team_mappers,
|
||||||
|
"login_redirect_override": login_redirect_override,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return found_configs
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create a SAML authenticator in Gateway."""
|
||||||
|
category = config["category"]
|
||||||
|
config_settings = config["settings"]
|
||||||
|
name = config_settings["name"]
|
||||||
|
|
||||||
|
# Generate authenticator name and slug
|
||||||
|
authenticator_name = f"{category.replace('-', '_').title()}-{name}"
|
||||||
|
authenticator_slug = self._generate_authenticator_slug("saml", name)
|
||||||
|
|
||||||
|
self._write_output(f"\n--- Processing {category} authenticator ---")
|
||||||
|
self._write_output(f"Name: {authenticator_name}")
|
||||||
|
self._write_output(f"Slug: {authenticator_slug}")
|
||||||
|
self._write_output(f"Type: {config_settings['type']}")
|
||||||
|
|
||||||
|
# Build Gateway authenticator configuration
|
||||||
|
gateway_config = {
|
||||||
|
"name": authenticator_name,
|
||||||
|
"slug": authenticator_slug,
|
||||||
|
"type": config_settings["type"],
|
||||||
|
"enabled": False,
|
||||||
|
"create_objects": True, # Allow Gateway to create users/orgs/teams
|
||||||
|
"remove_users": False, # Don't remove users by default
|
||||||
|
"configuration": config_settings["configuration"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# CALLBACK_URL - automatically created by Gateway
|
||||||
|
ignore_keys = ["CALLBACK_URL", "SP_PRIVATE_KEY"]
|
||||||
|
|
||||||
|
# Submit the authenticator (create or update as needed)
|
||||||
|
result = self.submit_authenticator(gateway_config, ignore_keys, config)
|
||||||
|
|
||||||
|
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
|
||||||
|
valid_login_urls = [f'/sso/login/saml/?idp={name}', f'/sso/login/saml/?idp={name}/']
|
||||||
|
self.handle_login_override(config, valid_login_urls)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _team_attr_to_gateway_format(self, saml_team_attr):
|
||||||
|
saml_attr = saml_team_attr.get("saml_attr")
|
||||||
|
if not saml_attr:
|
||||||
|
return
|
||||||
|
|
||||||
|
revoke = saml_team_attr.get('remove', True)
|
||||||
|
self._add_to_extra_data([saml_attr, saml_attr])
|
||||||
|
|
||||||
|
for item in saml_team_attr["team_org_map"]:
|
||||||
|
team_list = item["team"]
|
||||||
|
if isinstance(team_list, str):
|
||||||
|
team_list = [team_list]
|
||||||
|
team = item.get("team_alias") or item["team"]
|
||||||
|
self.team_mappers.append(
|
||||||
|
{
|
||||||
|
"map_type": "team",
|
||||||
|
"role": "Team Member",
|
||||||
|
"organization": item["organization"],
|
||||||
|
"team": team,
|
||||||
|
"name": "Team" + "-" + team + "-" + item["organization"],
|
||||||
|
"revoke": revoke,
|
||||||
|
"authenticator": -1,
|
||||||
|
"triggers": {"attributes": {saml_attr: {"in": team_list}, "join_condition": "or"}},
|
||||||
|
"order": self.next_order,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.next_order += 1
|
||||||
|
|
||||||
|
def _user_flags_by_role_to_gateway_format(self, user_flags_by_attr):
|
||||||
|
for k, v in ROLE_MAPPER.items():
|
||||||
|
if k in user_flags_by_attr:
|
||||||
|
if v['role']:
|
||||||
|
name = f"Role-{v['role']}"
|
||||||
|
else:
|
||||||
|
name = f"Role-{v['map_type']}"
|
||||||
|
|
||||||
|
revoke = user_flags_by_attr.get(v['revoke'], True)
|
||||||
|
self.team_mappers.append(
|
||||||
|
{
|
||||||
|
"map_type": v["map_type"],
|
||||||
|
"role": v["role"],
|
||||||
|
"name": name,
|
||||||
|
"organization": None,
|
||||||
|
"team": None,
|
||||||
|
"revoke": revoke,
|
||||||
|
"order": self.next_order,
|
||||||
|
"authenticator": -1,
|
||||||
|
"triggers": {
|
||||||
|
"attributes": {
|
||||||
|
"Role": {"in": user_flags_by_attr[k]},
|
||||||
|
"join_condition": "or",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.next_order += 1
|
||||||
|
|
||||||
|
def _user_flags_by_attr_value_to_gateway_format(self, user_flags_by_attr):
|
||||||
|
for k, v in ATTRIBUTE_VALUE_MAPPER.items():
|
||||||
|
if k in user_flags_by_attr:
|
||||||
|
value = user_flags_by_attr.get(v['value'])
|
||||||
|
|
||||||
|
if value:
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = {'in': value}
|
||||||
|
else:
|
||||||
|
value = {'in': [value]}
|
||||||
|
else:
|
||||||
|
value = {}
|
||||||
|
|
||||||
|
revoke = user_flags_by_attr.get(v['revoke'], True)
|
||||||
|
attr_name = user_flags_by_attr[k]
|
||||||
|
self._add_to_extra_data([attr_name, attr_name])
|
||||||
|
|
||||||
|
if v['role']:
|
||||||
|
name = f"Role-{v['role']}-attr"
|
||||||
|
else:
|
||||||
|
name = f"Role-{v['map_type']}-attr"
|
||||||
|
|
||||||
|
self.team_mappers.append(
|
||||||
|
{
|
||||||
|
"map_type": v["map_type"],
|
||||||
|
"role": v["role"],
|
||||||
|
"name": name,
|
||||||
|
"organization": None,
|
||||||
|
"team": None,
|
||||||
|
"revoke": revoke,
|
||||||
|
"order": self.next_order,
|
||||||
|
"authenticator": -1,
|
||||||
|
"triggers": {
|
||||||
|
"attributes": {
|
||||||
|
attr_name: value,
|
||||||
|
"join_condition": "or",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.next_order += 1
|
||||||
|
|
||||||
|
def _org_attr_to_gateway_format(self, org_attr):
|
||||||
|
for k, v in ORG_ATTRIBUTE_MAPPER.items():
|
||||||
|
if k in org_attr:
|
||||||
|
attr_name = org_attr.get(k)
|
||||||
|
organization = "{% " + f"for_attr_value('{attr_name}')" + " %}"
|
||||||
|
revoke = org_attr.get(v['revoke'], True)
|
||||||
|
|
||||||
|
self._add_to_extra_data([attr_name, attr_name])
|
||||||
|
|
||||||
|
name = f"Role-{v['role']}-attr"
|
||||||
|
self.team_mappers.append(
|
||||||
|
{
|
||||||
|
"map_type": 'organization',
|
||||||
|
"role": v['role'],
|
||||||
|
"name": name,
|
||||||
|
"organization": organization,
|
||||||
|
"team": None,
|
||||||
|
"revoke": revoke,
|
||||||
|
"order": self.next_order,
|
||||||
|
"authenticator": -1,
|
||||||
|
"triggers": {
|
||||||
|
"attributes": {
|
||||||
|
attr_name: {},
|
||||||
|
"join_condition": "or",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.next_order += 1
|
||||||
|
|
||||||
|
def _add_to_extra_data(self, item: list):
|
||||||
|
if item not in self.extra_data:
|
||||||
|
self.extra_data.append(item)
|
||||||
197
awx/sso/utils/settings_migrator.py
Normal file
197
awx/sso/utils/settings_migrator.py
Normal file
@ -0,0 +1,197 @@
|
|||||||
|
"""
|
||||||
|
Settings migrator.
|
||||||
|
|
||||||
|
This module handles the migration of AWX settings to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of AWX settings to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
# Define transformer functions for each setting
|
||||||
|
self.setting_transformers = {
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': self._transform_social_auth_username_is_full_email,
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': self._transform_allow_oauth2_for_external_users,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _convert_setting_name(self, setting):
|
||||||
|
keys = {
|
||||||
|
"CUSTOM_LOGIN_INFO": "custom_login_info",
|
||||||
|
"CUSTOM_LOGO": "custom_logo",
|
||||||
|
}
|
||||||
|
return keys.get(setting, setting)
|
||||||
|
|
||||||
|
def _transform_social_auth_username_is_full_email(self, value):
|
||||||
|
# SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL is a boolean and does not need to be transformed
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _transform_allow_oauth2_for_external_users(self, value):
|
||||||
|
# ALLOW_OAUTH2_FOR_EXTERNAL_USERS is a boolean and does not need to be transformed
|
||||||
|
return value
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name."""
|
||||||
|
return "Settings"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export relevant AWX settings that need to be migrated to Gateway.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured settings that need to be migrated
|
||||||
|
"""
|
||||||
|
# Define settings that should be migrated from AWX to Gateway
|
||||||
|
settings_to_migrate = [
|
||||||
|
'SESSION_COOKIE_AGE',
|
||||||
|
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL',
|
||||||
|
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
|
||||||
|
'LOGIN_REDIRECT_OVERRIDE',
|
||||||
|
'ORG_ADMINS_CAN_SEE_ALL_USERS',
|
||||||
|
'MANAGE_ORGANIZATION_AUTH',
|
||||||
|
]
|
||||||
|
|
||||||
|
found_configs = []
|
||||||
|
|
||||||
|
for setting_name in settings_to_migrate:
|
||||||
|
# Handle LOGIN_REDIRECT_OVERRIDE specially
|
||||||
|
if setting_name == 'LOGIN_REDIRECT_OVERRIDE':
|
||||||
|
if BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator:
|
||||||
|
# Use the URL computed by the authenticator migrator
|
||||||
|
setting_value = BaseAuthenticatorMigrator.login_redirect_override_new_url
|
||||||
|
else:
|
||||||
|
# Use the original controller setting value
|
||||||
|
setting_value = getattr(settings, setting_name, None)
|
||||||
|
else:
|
||||||
|
setting_value = getattr(settings, setting_name, None)
|
||||||
|
|
||||||
|
# Only include settings that have non-None and non-empty values
|
||||||
|
if setting_value is not None and setting_value != "":
|
||||||
|
# Apply transformer function if available
|
||||||
|
transformer = self.setting_transformers.get(setting_name)
|
||||||
|
if transformer:
|
||||||
|
setting_value = transformer(setting_value)
|
||||||
|
|
||||||
|
# Skip migration if transformer returned None or empty string
|
||||||
|
if setting_value is not None and setting_value != "":
|
||||||
|
found_configs.append(
|
||||||
|
{
|
||||||
|
'category': 'global-settings',
|
||||||
|
'setting_name': setting_name,
|
||||||
|
'setting_value': setting_value,
|
||||||
|
'org_mappers': [], # Settings don't have mappers
|
||||||
|
'team_mappers': [], # Settings don't have mappers
|
||||||
|
'role_mappers': [], # Settings don't have mappers
|
||||||
|
'allow_mappers': [], # Settings don't have mappers
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._write_output(f'\nIgnoring {setting_name} because it is None or empty after transformation')
|
||||||
|
else:
|
||||||
|
self._write_output(f'\nIgnoring {setting_name} because it is None or empty')
|
||||||
|
|
||||||
|
return found_configs
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""
|
||||||
|
Migrate AWX settings to Gateway.
|
||||||
|
|
||||||
|
Note: This doesn't create authenticators, but updates Gateway settings.
|
||||||
|
"""
|
||||||
|
setting_name = config['setting_name']
|
||||||
|
setting_value = config['setting_value']
|
||||||
|
|
||||||
|
self._write_output(f'\n--- Migrating setting: {setting_name} ---')
|
||||||
|
|
||||||
|
try:
|
||||||
|
gateway_setting_name = self._convert_setting_name(setting_name)
|
||||||
|
|
||||||
|
# Get current gateway setting value to check if update is needed
|
||||||
|
current_gateway_value = self.gateway_client.get_gateway_setting(gateway_setting_name)
|
||||||
|
|
||||||
|
# Compare current gateway value with controller value
|
||||||
|
if current_gateway_value == setting_value:
|
||||||
|
self._write_output(f'↷ Setting unchanged: {setting_name} (value already matches)', 'warning')
|
||||||
|
return {'success': True, 'action': 'skipped', 'error': None}
|
||||||
|
|
||||||
|
self._write_output(f'Current value: {current_gateway_value}')
|
||||||
|
self._write_output(f'New value: {setting_value}')
|
||||||
|
|
||||||
|
# Use the new update_gateway_setting method
|
||||||
|
self.gateway_client.update_gateway_setting(gateway_setting_name, setting_value)
|
||||||
|
|
||||||
|
self._write_output(f'✓ Successfully migrated setting: {setting_name}', 'success')
|
||||||
|
|
||||||
|
# Return success result in the expected format
|
||||||
|
return {'success': True, 'action': 'updated', 'error': None}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self._write_output(f'✗ Failed to migrate setting {setting_name}: {str(e)}', 'error')
|
||||||
|
return {'success': False, 'action': 'failed', 'error': str(e)}
|
||||||
|
|
||||||
|
def migrate(self):
|
||||||
|
"""
|
||||||
|
Main entry point - orchestrates the settings migration process.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary of migration results
|
||||||
|
"""
|
||||||
|
# Get settings from AWX/Controller
|
||||||
|
configs = self.get_controller_config()
|
||||||
|
|
||||||
|
if not configs:
|
||||||
|
self._write_output('No settings found to migrate.', 'warning')
|
||||||
|
return {
|
||||||
|
'created': 0,
|
||||||
|
'updated': 0,
|
||||||
|
'unchanged': 0,
|
||||||
|
'failed': 0,
|
||||||
|
'mappers_created': 0,
|
||||||
|
'mappers_updated': 0,
|
||||||
|
'mappers_failed': 0,
|
||||||
|
'settings_created': 0,
|
||||||
|
'settings_updated': 0,
|
||||||
|
'settings_unchanged': 0,
|
||||||
|
'settings_failed': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
self._write_output(f'Found {len(configs)} setting(s) to migrate.', 'success')
|
||||||
|
|
||||||
|
# Process each setting
|
||||||
|
created_settings = []
|
||||||
|
updated_settings = []
|
||||||
|
unchanged_settings = []
|
||||||
|
failed_settings = []
|
||||||
|
|
||||||
|
for config in configs:
|
||||||
|
result = self.create_gateway_authenticator(config)
|
||||||
|
if result['success']:
|
||||||
|
if result['action'] == 'created':
|
||||||
|
created_settings.append(config)
|
||||||
|
elif result['action'] == 'updated':
|
||||||
|
updated_settings.append(config)
|
||||||
|
elif result['action'] == 'skipped':
|
||||||
|
unchanged_settings.append(config)
|
||||||
|
else:
|
||||||
|
failed_settings.append(config)
|
||||||
|
|
||||||
|
# Settings don't have mappers, or authenticators, so authenticator and mapper counts are always 0
|
||||||
|
return {
|
||||||
|
'created': 0,
|
||||||
|
'updated': 0,
|
||||||
|
'unchanged': 0,
|
||||||
|
'failed': 0,
|
||||||
|
'mappers_created': 0,
|
||||||
|
'mappers_updated': 0,
|
||||||
|
'mappers_failed': 0,
|
||||||
|
'settings_created': len(created_settings),
|
||||||
|
'settings_updated': len(updated_settings),
|
||||||
|
'settings_unchanged': len(unchanged_settings),
|
||||||
|
'settings_failed': len(failed_settings),
|
||||||
|
}
|
||||||
93
awx/sso/utils/tacacs_migrator.py
Normal file
93
awx/sso/utils/tacacs_migrator.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
"""
|
||||||
|
TACACS+ authenticator migrator.
|
||||||
|
|
||||||
|
This module handles the migration of TACACS+ authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
|
||||||
|
|
||||||
|
|
||||||
|
class TACACSMigrator(BaseAuthenticatorMigrator):
|
||||||
|
"""
|
||||||
|
Handles the migration of TACACS+ authenticators from AWX to Gateway.
|
||||||
|
"""
|
||||||
|
|
||||||
|
CATEGORY = "TACACSPLUS"
|
||||||
|
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.tacacs"
|
||||||
|
|
||||||
|
def get_authenticator_type(self):
|
||||||
|
"""Get the human-readable authenticator type name.
|
||||||
|
Named TACACSPLUS because `+` is not allowed in authenticator slug.
|
||||||
|
"""
|
||||||
|
return "TACACSPLUS"
|
||||||
|
|
||||||
|
def get_controller_config(self):
|
||||||
|
"""
|
||||||
|
Export TACACS+ authenticator. A TACACS+ authenticator is only exported if
|
||||||
|
required configuration is present.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of configured TACACS+ authentication providers with their settings
|
||||||
|
"""
|
||||||
|
host = getattr(settings, "TACACSPLUS_HOST", None)
|
||||||
|
if not host:
|
||||||
|
return []
|
||||||
|
|
||||||
|
port = getattr(settings, "TACACSPLUS_PORT", 49)
|
||||||
|
secret = getattr(settings, "TACACSPLUS_SECRET", "")
|
||||||
|
session_timeout = getattr(settings, "TACACSPLUS_SESSION_TIMEOUT", 5)
|
||||||
|
auth_protocol = getattr(settings, "TACACSPLUS_AUTH_PROTOCOL", "ascii")
|
||||||
|
rem_addr = getattr(settings, "TACACSPLUS_REM_ADDR", False)
|
||||||
|
|
||||||
|
config_data = {
|
||||||
|
"name": "default",
|
||||||
|
"type": self.AUTH_TYPE,
|
||||||
|
"enabled": True,
|
||||||
|
"create_objects": True,
|
||||||
|
"remove_users": False,
|
||||||
|
"configuration": {
|
||||||
|
"HOST": host,
|
||||||
|
"PORT": port,
|
||||||
|
"SECRET": secret,
|
||||||
|
"SESSION_TIMEOUT": session_timeout,
|
||||||
|
"AUTH_PROTOCOL": auth_protocol,
|
||||||
|
"REM_ADDR": rem_addr,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"category": self.CATEGORY,
|
||||||
|
"settings": config_data,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def create_gateway_authenticator(self, config):
|
||||||
|
"""Create a TACACS+ authenticator in Gateway."""
|
||||||
|
category = config["category"]
|
||||||
|
config_settings = config["settings"]
|
||||||
|
|
||||||
|
# Generate authenticator name and slug
|
||||||
|
authenticator_name = "tacacs"
|
||||||
|
authenticator_slug = self._generate_authenticator_slug("tacacs", category)
|
||||||
|
|
||||||
|
self._write_output(f"\n--- Processing {category} authenticator ---")
|
||||||
|
self._write_output(f"Name: {authenticator_name}")
|
||||||
|
self._write_output(f"Slug: {authenticator_slug}")
|
||||||
|
self._write_output(f"Type: {config_settings['type']}")
|
||||||
|
|
||||||
|
# Build Gateway authenticator configuration
|
||||||
|
gateway_config = {
|
||||||
|
"name": authenticator_name,
|
||||||
|
"slug": authenticator_slug,
|
||||||
|
"type": config_settings["type"],
|
||||||
|
"enabled": config_settings["enabled"],
|
||||||
|
"create_objects": config_settings["create_objects"],
|
||||||
|
"remove_users": config_settings["remove_users"],
|
||||||
|
"configuration": config_settings["configuration"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Submit the authenticator (create or update as needed)
|
||||||
|
return self.submit_authenticator(gateway_config, config=config)
|
||||||
@ -5,6 +5,7 @@ from django.conf import settings
|
|||||||
from django.urls import re_path, include, path
|
from django.urls import re_path, include, path
|
||||||
|
|
||||||
from ansible_base.lib.dynamic_config.dynamic_urls import api_urls, api_version_urls, root_urls
|
from ansible_base.lib.dynamic_config.dynamic_urls import api_urls, api_version_urls, root_urls
|
||||||
|
from ansible_base.rbac.service_api.urls import rbac_service_urls
|
||||||
|
|
||||||
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
|
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
|
||||||
|
|
||||||
@ -23,6 +24,7 @@ def get_urlpatterns(prefix=None):
|
|||||||
|
|
||||||
urlpatterns += [
|
urlpatterns += [
|
||||||
path(f'api{prefix}v2/', include(resource_api_urls)),
|
path(f'api{prefix}v2/', include(resource_api_urls)),
|
||||||
|
path(f'api{prefix}v2/', include(rbac_service_urls)),
|
||||||
path(f'api{prefix}v2/', include(api_version_urls)),
|
path(f'api{prefix}v2/', include(api_version_urls)),
|
||||||
path(f'api{prefix}', include(api_urls)),
|
path(f'api{prefix}', include(api_urls)),
|
||||||
path('', include(root_urls)),
|
path('', include(root_urls)),
|
||||||
|
|||||||
@ -32,11 +32,12 @@ Installing the `tar.gz` involves no special instructions.
|
|||||||
## Running
|
## Running
|
||||||
|
|
||||||
Non-deprecated modules in this collection have no Python requirements, but
|
Non-deprecated modules in this collection have no Python requirements, but
|
||||||
may require the official [AWX CLI](https://pypi.org/project/awxkit/)
|
may require the AWX CLI
|
||||||
in the future. The `DOCUMENTATION` for each module will report this.
|
in the future. The `DOCUMENTATION` for each module will report this.
|
||||||
|
|
||||||
You can specify authentication by host, username, and password.
|
You can specify authentication by host, username, and password.
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
These can be specified via (from highest to lowest precedence):
|
These can be specified via (from highest to lowest precedence):
|
||||||
|
|
||||||
- direct module parameters
|
- direct module parameters
|
||||||
@ -54,6 +55,8 @@ verify_ssl = true
|
|||||||
username = foo
|
username = foo
|
||||||
password = bar
|
password = bar
|
||||||
```
|
```
|
||||||
|
=======
|
||||||
|
>>>>>>> tower/test_stable-2.6
|
||||||
|
|
||||||
## Release and Upgrade Notes
|
## Release and Upgrade Notes
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user