Resolve actions conflicts and delete unwatned files

Bump migrations and delete some files

Resolve remaining conflicts

Fix requirements

Flake8 fixes

Prefer devel changes for schema

Use correct versions

Remove sso connected stuff

Update to modern actions and collection fixes

Remove unwated alias

Version problems in actions

Fix more versioning problems

Update warning string

Messed it up again

Shorten exception

More removals

Remove pbr license

Remove tests deleted in devel

Remove unexpected files

Remove some content missed in the rebase

Use sleep_task from devel

Restore devel live conftest file

Add in settings that got missed

Prefer devel version of collection test

Finish repairing .github path

Remove unintended test file duplication

Undo more unintended file additions
This commit is contained in:
AlanCoding
2025-09-12 12:10:50 -04:00
parent 38f858303d
commit 55a7591f89
83 changed files with 103 additions and 13609 deletions

View File

@@ -24,31 +24,9 @@ runs:
run: |
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
<<<<<<< HEAD
- uses: ./.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ inputs.private-github-key }}
=======
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ inputs.private-github-key }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ inputs.private-github-key }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
>>>>>>> tower/test_stable-2.6
- name: Pre-pull latest devel image to warm cache
shell: bash

View File

@@ -36,7 +36,7 @@ runs:
- name: Upgrade ansible-core
shell: bash
run: python3 -m pip install --upgrade 'ansible-core<2.18.0'
run: python3 -m pip install --upgrade ansible-core
- name: Install system deps
shell: bash

View File

@@ -39,16 +39,12 @@ jobs:
command: /start_tests.sh test_collection_all
coverage-upload-name: "awx-collection"
- name: api-schema
<<<<<<< HEAD
command: >-
/start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{
github.event.pull_request.base.ref || github.ref_name
}}
coverage-upload-name: ""
=======
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
>>>>>>> tower/test_stable-2.6
steps:
- uses: actions/checkout@v4
with:
@@ -134,15 +130,9 @@ jobs:
with:
show-progress: false
<<<<<<< HEAD
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
=======
- uses: actions/setup-python@v5
with:
python-version: '3.12'
>>>>>>> tower/test_stable-2.6
- uses: ./.github/actions/run_awx_devel
id: awx
@@ -153,14 +143,11 @@ jobs:
- name: Run live dev env tests
run: docker exec tools_awx_1 /bin/bash -c "make live_test"
<<<<<<< HEAD
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
with:
log-filename: live-tests.log
=======
>>>>>>> tower/test_stable-2.6
awx-operator:
runs-on: ubuntu-latest
@@ -185,34 +172,15 @@ jobs:
repository: ansible/awx-operator
path: awx-operator
- uses: ./awx/.github/actions/setup-python
- name: Setup python, referencing action at awx relative path
uses: ./awx/.github/actions/setup-python
with:
working-directory: awx
python-version: '3.x'
- name: Install playbook dependencies
run: |
python3 -m pip install docker
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
- name: Build AWX image
working-directory: awx
run: |
@@ -311,15 +279,9 @@ jobs:
with:
show-progress: false
<<<<<<< HEAD
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
=======
- uses: actions/setup-python@v5
with:
python-version: '3.12'
>>>>>>> tower/test_stable-2.6
- uses: ./.github/actions/run_awx_devel
id: awx
@@ -395,18 +357,12 @@ jobs:
persist-credentials: false
show-progress: false
<<<<<<< HEAD
- uses: ./.github/actions/setup-python
with:
python-version: '3.x'
=======
- uses: actions/setup-python@v5
with:
python-version: '3.12'
>>>>>>> tower/test_stable-2.6
- name: Upgrade ansible-core
run: python3 -m pip install --upgrade "ansible-core<2.19"
run: python3 -m pip install --upgrade ansible-core
- name: Download coverage artifacts
uses: actions/download-artifact@v4

View File

@@ -70,31 +70,9 @@ jobs:
make ui
if: matrix.build-targets.image-name == 'awx'
<<<<<<< HEAD
- uses: ./.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
=======
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
>>>>>>> tower/test_stable-2.6
- name: Build and push AWX devel images
run: |

View File

@@ -12,11 +12,7 @@ jobs:
with:
show-progress: false
<<<<<<< HEAD
- uses: ./.github/actions/setup-python
=======
- uses: actions/setup-python@v5
>>>>>>> tower/test_stable-2.6
with:
python-version: '3.x'

View File

@@ -33,11 +33,7 @@ jobs:
with:
show-progress: false
<<<<<<< HEAD
- uses: ./.github/actions/setup-python
=======
- uses: actions/setup-python@v5
>>>>>>> tower/test_stable-2.6
with:
python-version: '3.x'

View File

@@ -24,64 +24,23 @@ jobs:
with:
show-progress: false
<<<<<<< HEAD
- uses: ./.github/actions/setup-python
=======
- name: Set lower case owner name
shell: bash
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
env:
OWNER: '${{ github.repository_owner }}'
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.py_version }}
>>>>>>> tower/test_stable-2.6
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
<<<<<<< HEAD
- uses: ./.github/actions/setup-ssh-agent
with:
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Pre-pull image to warm build cache
=======
- name: Pre-pull latest devel image to warm cache
shell: bash
>>>>>>> tower/test_stable-2.6
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
docker pull -q `make print-DEVEL_IMAGE_NAME`
continue-on-error: true
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
id: generate_key
shell: bash
run: |
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
fi
- name: Add private GitHub key to SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
- name: Build image
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \

View File

@@ -378,7 +378,7 @@ test_collection:
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi && \
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install "ansible-core<2.19"; fi
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
ansible --version
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
@@ -417,7 +417,7 @@ install_collection: build_collection
test_collection_sanity:
rm -rf awx_collection_build/
rm -rf $(COLLECTION_INSTALL)
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install "ansible-core<2.19"; fi
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
ansible --version
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
cd $(COLLECTION_INSTALL) && \

View File

@@ -162,9 +162,9 @@ def get_view_description(view, html=False):
def get_default_schema():
if settings.DYNACONF.is_development_mode:
from awx.api.swagger import AutoSchema
from awx.api.swagger import schema_view
return AutoSchema()
return schema_view
else:
return views.APIView.schema

View File

@@ -56,7 +56,6 @@ from wsgiref.util import FileWrapper
# django-ansible-base
from ansible_base.lib.utils.requests import get_remote_hosts
from ansible_base.rbac.models import RoleEvaluation
from ansible_base.rbac import permission_registry
# AWX
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields

View File

@@ -1,140 +0,0 @@
from .plugin import CredentialPlugin, CertFiles, raise_for_status
from urllib.parse import quote, urlencode, urljoin
from django.utils.translation import gettext_lazy as _
import requests as requests
aim_inputs = {
'fields': [
{
'id': 'url',
'label': _('CyberArk CCP URL'),
'type': 'string',
'format': 'url',
},
{
'id': 'webservice_id',
'label': _('Web Service ID'),
'type': 'string',
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
},
{
'id': 'app_id',
'label': _('Application ID'),
'type': 'string',
'secret': True,
},
{
'id': 'client_key',
'label': _('Client Key'),
'type': 'string',
'secret': True,
'multiline': True,
},
{
'id': 'client_cert',
'label': _('Client Certificate'),
'type': 'string',
'secret': True,
'multiline': True,
},
{
'id': 'verify',
'label': _('Verify SSL Certificates'),
'type': 'boolean',
'default': True,
},
],
'metadata': [
{
'id': 'object_query',
'label': _('Object Query'),
'type': 'string',
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
},
{'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']},
{
'id': 'object_property',
'label': _('Object Property'),
'type': 'string',
'help_text': _('The property of the object to return. Available properties: Username, Password and Address.'),
},
{
'id': 'reason',
'label': _('Reason'),
'type': 'string',
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.'),
},
],
'required': ['url', 'app_id', 'object_query'],
}
def aim_backend(**kwargs):
url = kwargs['url']
client_cert = kwargs.get('client_cert', None)
client_key = kwargs.get('client_key', None)
verify = kwargs['verify']
webservice_id = kwargs.get('webservice_id', '')
app_id = kwargs['app_id']
object_query = kwargs['object_query']
object_query_format = kwargs['object_query_format']
object_property = kwargs.get('object_property', '')
reason = kwargs.get('reason', None)
if webservice_id == '':
webservice_id = 'AIMWebService'
query_params = {
'AppId': app_id,
'Query': object_query,
'QueryFormat': object_query_format,
}
if reason:
query_params['reason'] = reason
request_qs = '?' + urlencode(query_params, quote_via=quote)
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
with CertFiles(client_cert, client_key) as cert:
res = requests.get(
request_url + request_qs,
timeout=30,
cert=cert,
verify=verify,
allow_redirects=False,
)
sensitive_query_params = {
'AppId': '****',
'Query': '****',
'QueryFormat': object_query_format,
}
if reason:
sensitive_query_params['reason'] = '****'
sensitive_request_qs = urlencode(
sensitive_query_params,
safe='*',
quote_via=quote,
)
res.url = f'{request_url}?{sensitive_request_qs}'
raise_for_status(res)
# CCP returns the property name capitalized, username is camel case
# so we need to handle that case
if object_property == '':
object_property = 'Content'
elif object_property.lower() == 'username':
object_property = 'UserName'
elif object_property.lower() == 'password':
object_property = 'Content'
elif object_property.lower() == 'address':
object_property = 'Address'
elif object_property not in res:
raise KeyError('Property {} not found in object, available properties: Username, Password and Address'.format(object_property))
else:
object_property = object_property.capitalize()
return res.json()[object_property]
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)

View File

@@ -1,114 +0,0 @@
from azure.keyvault.secrets import SecretClient
from azure.identity import (
ClientSecretCredential,
CredentialUnavailableError,
ManagedIdentityCredential,
)
from azure.core.credentials import TokenCredential
from msrestazure import azure_cloud
from .plugin import CredentialPlugin
from django.utils.translation import gettext_lazy as _
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
clouds = [vars(azure_cloud)[n] for n in dir(azure_cloud) if n.startswith("AZURE_") and n.endswith("_CLOUD")]
default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"]
azure_keyvault_inputs = {
'fields': [
{
'id': 'url',
'label': _('Vault URL (DNS Name)'),
'type': 'string',
'format': 'url',
},
{'id': 'client', 'label': _('Client ID'), 'type': 'string'},
{
'id': 'secret',
'label': _('Client Secret'),
'type': 'string',
'secret': True,
},
{'id': 'tenant', 'label': _('Tenant ID'), 'type': 'string'},
{
'id': 'cloud_name',
'label': _('Cloud Environment'),
'help_text': _('Specify which azure cloud environment to use.'),
'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
'default': default_cloud.name,
},
],
'metadata': [
{
'id': 'secret_field',
'label': _('Secret Name'),
'type': 'string',
'help_text': _('The name of the secret to look up.'),
},
{
'id': 'secret_version',
'label': _('Secret Version'),
'type': 'string',
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
},
],
'required': ['url', 'secret_field'],
}
def _initialize_credential(
tenant: str = '',
client: str = '',
secret: str = '',
) -> TokenCredential:
explicit_credentials_provided = all((tenant, client, secret))
if explicit_credentials_provided:
return ClientSecretCredential(
tenant_id=tenant,
client_id=client,
client_secret=secret,
)
return ManagedIdentityCredential()
def azure_keyvault_backend(
*, url: str, client: str = '', secret: str = '', tenant: str = '', secret_field: str, secret_version: str = '', **kwargs
) -> str | None:
"""Get a credential and retrieve a secret from an Azure Key Vault.
An empty string for an optional parameter counts as not provided.
:param url: An Azure Key Vault URI.
:param client: The Client ID (optional).
:param secret: The Client Secret (optional).
:param tenant: The Tenant ID (optional).
:param secret_field: The name of the secret to retrieve from the
vault.
:param secret_version: The version of the secret to retrieve
(optional).
:returns: The secret from the Key Vault.
:raises RuntimeError: If the software is not being run on an Azure
VM.
"""
chosen_credential = _initialize_credential(tenant, client, secret)
keyvault = SecretClient(credential=chosen_credential, vault_url=url)
try:
keyvault_secret = keyvault.get_secret(
name=secret_field,
version=secret_version,
)
except CredentialUnavailableError as secret_lookup_err:
raise RuntimeError(
'You are not operating on an Azure VM, so the Managed Identity '
'feature is unavailable. Please provide the full Client ID, '
'Client Secret, and Tenant ID or run the software on an Azure VM.',
) from secret_lookup_err
return keyvault_secret.value
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)

View File

@@ -1,176 +0,0 @@
"""GitHub App Installation Access Token Credential Plugin.
This module defines a credential plugin for making use of the
GitHub Apps mechanism, allowing authentication via GitHub App
installation-scoped access tokens.
Functions:
- :func:`extract_github_app_install_token`: Generates a GitHub App
Installation token.
- ``github_app_lookup``: Defines the credential plugin interface.
"""
from github import Auth as Auth, Github
from github.Consts import DEFAULT_BASE_URL as PUBLIC_GH_API_URL
from github.GithubException import (
BadAttributeException,
GithubException,
UnknownObjectException,
)
from django.utils.translation import gettext_lazy as _
from .plugin import CredentialPlugin
github_app_inputs = {
'fields': [
{
'id': 'github_api_url',
'label': _('GitHub API endpoint URL'),
'type': 'string',
'help_text': _(
'Specify the GitHub API URL here. In the case of an Enterprise: '
'https://gh.your.org/api/v3 (self-hosted) '
'or https://api.SUBDOMAIN.ghe.com (cloud)',
),
'default': 'https://api.github.com',
},
{
'id': 'app_or_client_id',
'label': _('GitHub App ID'),
'type': 'string',
'help_text': _(
'The GitHub App ID created by the GitHub Admin. '
'Example App ID: 1121547 '
'found on https://github.com/settings/apps/ '
'required for creating a JWT token for authentication.',
),
},
{
'id': 'install_id',
'label': _('GitHub App Installation ID'),
'type': 'string',
'help_text': _(
'The Installation ID from the GitHub App installation '
'generated by the GitHub Admin. '
'Example: 59980338 extracted from the installation link '
'https://github.com/settings/installations/59980338 '
'required for creating a limited GitHub app token.',
),
},
{
'id': 'private_rsa_key',
'label': _('RSA Private Key'),
'type': 'string',
'format': 'ssh_private_key',
'secret': True,
'multiline': True,
'help_text': _(
'Paste the contents of the PEM file that the GitHub Admin provided to you with the app and installation IDs.',
),
},
],
'metadata': [
{
'id': 'description',
'label': _('Description (Optional)'),
'type': 'string',
'help_text': _('To be removed after UI is updated'),
},
],
'required': ['app_or_client_id', 'install_id', 'private_rsa_key'],
}
GH_CLIENT_ID_TRAILER_LENGTH = 16
HEXADECIMAL_BASE = 16
def _is_intish(app_id_candidate):
return isinstance(app_id_candidate, int) or app_id_candidate.isdigit()
def _is_client_id(client_id_candidate):
client_id_prefix = 'Iv1.'
if not client_id_candidate.startswith(client_id_prefix):
return False
client_id_trailer = client_id_candidate[len(client_id_prefix) :]
if len(client_id_trailer) != GH_CLIENT_ID_TRAILER_LENGTH:
return False
try:
int(client_id_trailer, base=HEXADECIMAL_BASE)
except ValueError:
return False
return True
def _is_app_or_client_id(app_or_client_id_candidate):
if _is_intish(app_or_client_id_candidate):
return True
return _is_client_id(app_or_client_id_candidate)
def _assert_ids_look_acceptable(app_or_client_id, install_id):
if not _is_app_or_client_id(app_or_client_id):
raise ValueError(
'Expected GitHub App or Client ID to be an integer or a string '
f'starting with `Iv1.` followed by 16 hexadecimal digits, '
f'but got {app_or_client_id !r}',
)
if isinstance(app_or_client_id, str) and _is_client_id(app_or_client_id):
raise ValueError(
'Expected GitHub App ID must be an integer or a string '
f'with an all-digit value, but got {app_or_client_id !r}. '
'Client IDs are currently unsupported.',
)
if not _is_intish(install_id):
raise ValueError(
'Expected GitHub App Installation ID to be an integer' f' but got {install_id !r}',
)
def extract_github_app_install_token(github_api_url, app_or_client_id, private_rsa_key, install_id, **_discarded_kwargs):
"""Generate a GH App Installation access token."""
_assert_ids_look_acceptable(app_or_client_id, install_id)
auth = Auth.AppAuth(
app_id=str(app_or_client_id),
private_key=private_rsa_key,
).get_installation_auth(installation_id=int(install_id))
Github(
auth=auth,
base_url=github_api_url if github_api_url else PUBLIC_GH_API_URL,
)
doc_url = 'See https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app'
app_install_context = f'app_or_client_id: {app_or_client_id}, install_id: {install_id}'
try:
return auth.token
except UnknownObjectException as github_install_not_found_exc:
raise ValueError(
f'Failed to retrieve a GitHub installation token from {github_api_url} using {app_install_context}. Is the app installed? {doc_url}.'
f'\n\n{github_install_not_found_exc}',
) from github_install_not_found_exc
except GithubException as pygithub_catchall_exc:
raise RuntimeError(
f'An unexpected error happened while talking to GitHub API @ {github_api_url} ({app_install_context}). '
f'Is the app or client ID correct? And the private RSA key? {doc_url}.'
f'\n\n{pygithub_catchall_exc}',
) from pygithub_catchall_exc
except BadAttributeException as github_broken_exc:
raise RuntimeError(
f'Broken GitHub @ {github_api_url} with {app_install_context}. It is a bug, please report it to the developers.\n\n{github_broken_exc}',
) from github_broken_exc
github_app_lookup_plugin = CredentialPlugin(
'GitHub App Installation Access Token Lookup',
inputs=github_app_inputs,
backend=extract_github_app_install_token,
)

View File

@@ -1,247 +0,0 @@
import sys
import os
from django.core.management.base import BaseCommand
from urllib.parse import urlparse, urlunparse
from awx.sso.utils.azure_ad_migrator import AzureADMigrator
from awx.sso.utils.github_migrator import GitHubMigrator
from awx.sso.utils.ldap_migrator import LDAPMigrator
from awx.sso.utils.oidc_migrator import OIDCMigrator
from awx.sso.utils.saml_migrator import SAMLMigrator
from awx.sso.utils.radius_migrator import RADIUSMigrator
from awx.sso.utils.settings_migrator import SettingsMigrator
from awx.sso.utils.tacacs_migrator import TACACSMigrator
from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator
from awx.main.utils.gateway_client import GatewayClient, GatewayAPIError
from awx.main.utils.gateway_client_svc_token import GatewayClientSVCToken
from ansible_base.resource_registry.tasks.sync import create_api_client
class Command(BaseCommand):
help = 'Import existing auth provider configurations to AAP Gateway via API requests'
def add_arguments(self, parser):
parser.add_argument('--basic-auth', action='store_true', help='Use HTTP Basic Authentication between Controller and Gateway')
parser.add_argument(
'--skip-all-authenticators',
action='store_true',
help='Skip importing all authenticators [GitHub, OIDC, SAML, Azure AD, LDAP, RADIUS, TACACS+, Google OAuth2]',
)
parser.add_argument('--skip-oidc', action='store_true', help='Skip importing generic OIDC authenticators')
parser.add_argument('--skip-github', action='store_true', help='Skip importing GitHub authenticator')
parser.add_argument('--skip-ldap', action='store_true', help='Skip importing LDAP authenticators')
parser.add_argument('--skip-ad', action='store_true', help='Skip importing Azure AD authenticator')
parser.add_argument('--skip-saml', action='store_true', help='Skip importing SAML authenticator')
parser.add_argument('--skip-radius', action='store_true', help='Skip importing RADIUS authenticator')
parser.add_argument('--skip-tacacs', action='store_true', help='Skip importing TACACS+ authenticator')
parser.add_argument('--skip-google', action='store_true', help='Skip importing Google OAuth2 authenticator')
parser.add_argument('--skip-settings', action='store_true', help='Skip importing settings')
parser.add_argument(
'--force',
action='store_true',
help='Force migration even if configurations already exist. Does not apply to skipped authenticators nor skipped settings.',
)
def handle(self, *args, **options):
# Read Gateway connection parameters from environment variables
gateway_base_url = os.getenv('GATEWAY_BASE_URL')
gateway_user = os.getenv('GATEWAY_USER')
gateway_password = os.getenv('GATEWAY_PASSWORD')
gateway_skip_verify = os.getenv('GATEWAY_SKIP_VERIFY', '').lower() in ('true', '1', 'yes', 'on')
skip_all_authenticators = options['skip_all_authenticators']
skip_oidc = options['skip_oidc']
skip_github = options['skip_github']
skip_ldap = options['skip_ldap']
skip_ad = options['skip_ad']
skip_saml = options['skip_saml']
skip_radius = options['skip_radius']
skip_tacacs = options['skip_tacacs']
skip_google = options['skip_google']
skip_settings = options['skip_settings']
force = options['force']
basic_auth = options['basic_auth']
management_command_validation_errors = []
# If the management command isn't called with all parameters needed to talk to Gateway, consider
# it a dry-run and exit cleanly
if not gateway_base_url and basic_auth:
management_command_validation_errors.append('- GATEWAY_BASE_URL: Base URL of the AAP Gateway instance')
if (not gateway_user or not gateway_password) and basic_auth:
management_command_validation_errors.append('- GATEWAY_USER: Username for AAP Gateway authentication')
management_command_validation_errors.append('- GATEWAY_PASSWORD: Password for AAP Gateway authentication')
if len(management_command_validation_errors) > 0:
self.stdout.write(self.style.WARNING('Missing required environment variables:'))
for validation_error in management_command_validation_errors:
self.stdout.write(self.style.WARNING(f"{validation_error}"))
self.stdout.write(self.style.WARNING('- GATEWAY_SKIP_VERIFY: Skip SSL certificate verification (optional)'))
sys.exit(0)
resource_api_client = None
response = None
if basic_auth:
self.stdout.write(self.style.SUCCESS('HTTP Basic Auth: true'))
self.stdout.write(self.style.SUCCESS(f'Gateway Base URL: {gateway_base_url}'))
self.stdout.write(self.style.SUCCESS(f'Gateway User: {gateway_user}'))
self.stdout.write(self.style.SUCCESS('Gateway Password: *******************'))
self.stdout.write(self.style.SUCCESS(f'Skip SSL Verification: {gateway_skip_verify}'))
else:
resource_api_client = create_api_client()
resource_api_client.verify_https = not gateway_skip_verify
response = resource_api_client.get_service_metadata()
parsed_url = urlparse(resource_api_client.base_url)
resource_api_client.base_url = urlunparse((parsed_url.scheme, parsed_url.netloc, '/', '', '', ''))
self.stdout.write(self.style.SUCCESS('Gateway Service Token: true'))
self.stdout.write(self.style.SUCCESS(f'Gateway Base URL: {resource_api_client.base_url}'))
self.stdout.write(self.style.SUCCESS(f'Gateway JWT User: {resource_api_client.jwt_user_id}'))
self.stdout.write(self.style.SUCCESS(f'Gateway JWT Expiration: {resource_api_client.jwt_expiration}'))
self.stdout.write(self.style.SUCCESS(f'Skip SSL Verification: {not resource_api_client.verify_https}'))
self.stdout.write(self.style.SUCCESS(f'Connection Validated: {response.status_code == 200}'))
if response.status_code != 200:
self.stdout.write(
self.style.ERROR(
f'Gateway Service Token is unable to connect to Gateway via the base URL {resource_api_client.base_url}. Recieved HTTP response code {response.status_code}'
)
)
sys.exit(1)
# Create Gateway client and run migrations
try:
self.stdout.write(self.style.SUCCESS('\n=== Connecting to Gateway ==='))
pre_gateway_client = None
if basic_auth:
self.stdout.write(self.style.SUCCESS('\n=== With Basic HTTP Auth ==='))
pre_gateway_client = GatewayClient(
base_url=gateway_base_url, username=gateway_user, password=gateway_password, skip_verify=gateway_skip_verify, command=self
)
else:
self.stdout.write(self.style.SUCCESS('\n=== With Service Token ==='))
pre_gateway_client = GatewayClientSVCToken(resource_api_client=resource_api_client, command=self)
with pre_gateway_client as gateway_client:
self.stdout.write(self.style.SUCCESS('Successfully connected to Gateway'))
# Initialize migrators
migrators = []
if not skip_all_authenticators:
if not skip_oidc:
migrators.append(OIDCMigrator(gateway_client, self, force=force))
if not skip_github:
migrators.append(GitHubMigrator(gateway_client, self, force=force))
if not skip_saml:
migrators.append(SAMLMigrator(gateway_client, self, force=force))
if not skip_ad:
migrators.append(AzureADMigrator(gateway_client, self, force=force))
if not skip_ldap:
migrators.append(LDAPMigrator(gateway_client, self, force=force))
if not skip_radius:
migrators.append(RADIUSMigrator(gateway_client, self, force=force))
if not skip_tacacs:
migrators.append(TACACSMigrator(gateway_client, self, force=force))
if not skip_google:
migrators.append(GoogleOAuth2Migrator(gateway_client, self, force=force))
if not migrators:
self.stdout.write(self.style.WARNING('No authentication configurations found to migrate.'))
if not skip_settings:
migrators.append(SettingsMigrator(gateway_client, self, force=force))
else:
self.stdout.write(self.style.WARNING('Settings migration will not execute.'))
# Run migrations
total_results = {
'created': 0,
'updated': 0,
'unchanged': 0,
'failed': 0,
'mappers_created': 0,
'mappers_updated': 0,
'mappers_failed': 0,
'settings_created': 0,
'settings_updated': 0,
'settings_unchanged': 0,
'settings_failed': 0,
}
if not migrators:
self.stdout.write(self.style.WARNING('NO MIGRATIONS WILL EXECUTE.'))
# Exit with success code since this is not an error condition
sys.exit(0)
else:
for migrator in migrators:
self.stdout.write(self.style.SUCCESS(f'\n=== Migrating {migrator.get_authenticator_type()} Configurations ==='))
result = migrator.migrate()
self._print_export_summary(migrator.get_authenticator_type(), result)
# Accumulate results - handle missing keys gracefully
for key in total_results:
total_results[key] += result.get(key, 0)
# Overall summary
self.stdout.write(self.style.SUCCESS('\n=== Migration Summary ==='))
self.stdout.write(f'Total authenticators created: {total_results["created"]}')
self.stdout.write(f'Total authenticators updated: {total_results["updated"]}')
self.stdout.write(f'Total authenticators unchanged: {total_results["unchanged"]}')
self.stdout.write(f'Total authenticators failed: {total_results["failed"]}')
self.stdout.write(f'Total mappers created: {total_results["mappers_created"]}')
self.stdout.write(f'Total mappers updated: {total_results["mappers_updated"]}')
self.stdout.write(f'Total mappers failed: {total_results["mappers_failed"]}')
self.stdout.write(f'Total settings created: {total_results["settings_created"]}')
self.stdout.write(f'Total settings updated: {total_results["settings_updated"]}')
self.stdout.write(f'Total settings unchanged: {total_results["settings_unchanged"]}')
self.stdout.write(f'Total settings failed: {total_results["settings_failed"]}')
# Check for any failures and return appropriate status code
has_failures = total_results["failed"] > 0 or total_results["mappers_failed"] > 0 or total_results["settings_failed"] > 0
if has_failures:
self.stdout.write(self.style.ERROR('\nMigration completed with failures.'))
sys.exit(1)
else:
self.stdout.write(self.style.SUCCESS('\nMigration completed successfully.'))
sys.exit(0)
except GatewayAPIError as e:
self.stdout.write(self.style.ERROR(f'Gateway API Error: {e.message}'))
if e.status_code:
self.stdout.write(self.style.ERROR(f'Status Code: {e.status_code}'))
if e.response_data:
self.stdout.write(self.style.ERROR(f'Response: {e.response_data}'))
sys.exit(1)
except Exception as e:
self.stdout.write(self.style.ERROR(f'Unexpected error during migration: {str(e)}'))
sys.exit(1)
def _print_export_summary(self, config_type, result):
"""Print a summary of the export results."""
self.stdout.write(f'\n--- {config_type} Export Summary ---')
if config_type in ['GitHub', 'OIDC', 'SAML', 'Azure AD', 'LDAP', 'RADIUS', 'TACACS+', 'Google OAuth2']:
self.stdout.write(f'Authenticators created: {result.get("created", 0)}')
self.stdout.write(f'Authenticators updated: {result.get("updated", 0)}')
self.stdout.write(f'Authenticators unchanged: {result.get("unchanged", 0)}')
self.stdout.write(f'Authenticators failed: {result.get("failed", 0)}')
self.stdout.write(f'Mappers created: {result.get("mappers_created", 0)}')
self.stdout.write(f'Mappers updated: {result.get("mappers_updated", 0)}')
self.stdout.write(f'Mappers failed: {result.get("mappers_failed", 0)}')
if config_type == 'Settings':
self.stdout.write(f'Settings created: {result.get("settings_created", 0)}')
self.stdout.write(f'Settings updated: {result.get("settings_updated", 0)}')
self.stdout.write(f'Settings unchanged: {result.get("settings_unchanged", 0)}')
self.stdout.write(f'Settings failed: {result.get("settings_failed", 0)}')

View File

@@ -8,7 +8,7 @@ from awx.main.migrations._dab_rbac import migrate_to_new_rbac, create_permission
class Migration(migrations.Migration):
dependencies = [
('main', '0191_add_django_permissions'),
('dab_rbac', '0003_alter_dabpermission_codename_and_more'),
('dab_rbac', '__first__'),
]
operations = [

View File

@@ -20,7 +20,7 @@ def update_github_app_kind(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
('main', '0201_create_managed_creds'),
('main', '0203_remove_team_of_teams'),
]
operations = [
migrations.DeleteModel(

View File

@@ -336,16 +336,6 @@ def setup_managed_role_definitions(apps, schema_editor):
to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition
)
)
if cls_name == 'team':
managed_role_definitions.append(
get_or_create_managed(
'Controller Team Admin',
f'Has all permissions to a single {cls._meta.verbose_name}',
ct,
indiv_perms,
RoleDefinition,
)
)
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
org_child_perms = object_perms.copy()
@@ -386,18 +376,6 @@ def setup_managed_role_definitions(apps, schema_editor):
RoleDefinition,
)
)
if action == 'member' and cls_name in ('organization', 'team'):
suffix = to_create['special'].format(cls=cls, action=action.title())
rd_name = f'Controller {suffix}'
managed_role_definitions.append(
get_or_create_managed(
rd_name,
f'Has {action} permissions to a single {cls._meta.verbose_name}',
ct,
perm_list,
RoleDefinition,
)
)
if 'org_admin' in to_create:
managed_role_definitions.append(
@@ -409,15 +387,6 @@ def setup_managed_role_definitions(apps, schema_editor):
RoleDefinition,
)
)
managed_role_definitions.append(
get_or_create_managed(
'Controller Organization Admin',
'Has all permissions to a single organization and all objects inside of it',
org_ct,
org_perms,
RoleDefinition,
)
)
# Special "organization action" roles
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]

File diff suppressed because it is too large Load Diff

View File

@@ -749,81 +749,6 @@ def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
maybe_reverse_sync_unassignment(rd, team, child_role.content_object)
ROLE_DEFINITION_TO_ROLE_FIELD = {
'Organization Member': 'member_role',
'WorkflowJobTemplate Admin': 'admin_role',
'Organization WorkflowJobTemplate Admin': 'workflow_admin_role',
'WorkflowJobTemplate Execute': 'execute_role',
'WorkflowJobTemplate Approve': 'approval_role',
'InstanceGroup Admin': 'admin_role',
'InstanceGroup Use': 'use_role',
'Organization ExecutionEnvironment Admin': 'execution_environment_admin_role',
'Project Admin': 'admin_role',
'Organization Project Admin': 'project_admin_role',
'Project Use': 'use_role',
'Project Update': 'update_role',
'JobTemplate Admin': 'admin_role',
'Organization JobTemplate Admin': 'job_template_admin_role',
'JobTemplate Execute': 'execute_role',
'Inventory Admin': 'admin_role',
'Organization Inventory Admin': 'inventory_admin_role',
'Inventory Use': 'use_role',
'Inventory Adhoc': 'adhoc_role',
'Inventory Update': 'update_role',
'Organization NotificationTemplate Admin': 'notification_admin_role',
'Credential Admin': 'admin_role',
'Organization Credential Admin': 'credential_admin_role',
'Credential Use': 'use_role',
'Team Admin': 'admin_role',
'Team Member': 'member_role',
'Organization Admin': 'admin_role',
'Organization Audit': 'auditor_role',
'Organization Execute': 'execute_role',
'Organization Approval': 'approval_role',
}
def _sync_assignments_to_old_rbac(instance, delete=True):
from awx.main.signals import disable_activity_stream
with disable_activity_stream():
with disable_rbac_sync():
field_name = ROLE_DEFINITION_TO_ROLE_FIELD.get(instance.role_definition.name)
if not field_name:
return
try:
role = getattr(instance.object_role.content_object, field_name)
# in the case RoleUserAssignment is being cascade deleted, then
# object_role might not exist. In which case the object is about to be removed
# anyways so just return
except ObjectDoesNotExist:
return
if isinstance(instance.actor, get_user_model()):
# user
if delete:
role.members.remove(instance.actor)
else:
role.members.add(instance.actor)
else:
# team
if delete:
instance.team.member_role.children.remove(role)
else:
instance.team.member_role.children.add(role)
@receiver(post_delete, sender=RoleUserAssignment)
@receiver(post_delete, sender=RoleTeamAssignment)
def sync_assignments_to_old_rbac_delete(instance, **kwargs):
_sync_assignments_to_old_rbac(instance, delete=True)
@receiver(post_save, sender=RoleUserAssignment)
@receiver(post_save, sender=RoleTeamAssignment)
def sync_user_assignments_to_old_rbac_create(instance, **kwargs):
_sync_assignments_to_old_rbac(instance, delete=False)
ROLE_DEFINITION_TO_ROLE_FIELD = {
'Organization Member': 'member_role',
'Controller Organization Member': 'member_role',

View File

@@ -93,9 +93,6 @@ from awx.main.utils.update_model import update_model
# Django flags
from flags.state import flag_enabled
# Django flags
from flags.state import flag_enabled
logger = logging.getLogger('awx.main.tasks.jobs')

View File

@@ -13,25 +13,6 @@ from datetime import datetime
from distutils.version import LooseVersion as Version
from io import StringIO
# Django
from django.conf import settings
from django.db import connection, transaction, DatabaseError, IntegrityError
from django.db.models.fields.related import ForeignKey
from django.utils.timezone import now, timedelta
from django.utils.encoding import smart_str
from django.contrib.auth.models import User
from django.utils.translation import gettext_lazy as _
from django.utils.translation import gettext_noop
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.query import QuerySet
# Django-CRUM
from crum import impersonate
# Django flags
from flags.state import flag_enabled
# Runner
import ansible_runner.cleanup
import psycopg
@@ -91,13 +72,6 @@ from awx.main.tasks.receptor import administrative_workunit_reaper, get_receptor
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
from awx.main.utils.reload import stop_local_services
from dispatcherd.publish import task
from awx.main.tasks.receptor import get_receptor_ctl, worker_info, worker_cleanup, administrative_workunit_reaper, write_receptor_config
from awx.main.consumers import emit_channel_notification
from awx.main import analytics
from awx.conf import settings_registry
from awx.main.analytics.subsystem_metrics import DispatcherMetrics
from rest_framework.exceptions import PermissionDenied
logger = logging.getLogger('awx.main.tasks.system')

View File

@@ -1,6 +0,0 @@
{
"VMWARE_HOST": "https://foo.invalid",
"VMWARE_PASSWORD": "fooo",
"VMWARE_USER": "fooo",
"VMWARE_VALIDATE_CERTS": "False"
}

View File

@@ -1,4 +0,0 @@
---
{
"demo.query.example": ""
}

View File

@@ -1,17 +1,57 @@
import time
import logging
from dispatcherd.publish import task
from django.db import connection
from awx.main.dispatch import get_task_queuename
from awx.main.dispatch.publish import task
from awx.main.dispatch.publish import task as old_task
from ansible_base.lib.utils.db import advisory_lock
logger = logging.getLogger(__name__)
@task(queue=get_task_queuename)
@old_task(queue=get_task_queuename)
def sleep_task(seconds=10, log=False):
if log:
logger.info('starting sleep_task')
time.sleep(seconds)
if log:
logger.info('finished sleep_task')
@task()
def sleep_break_connection(seconds=0.2):
"""
Interact with the database in an intentionally breaking way.
After this finishes, queries made by this connection are expected to error
with "the connection is closed"
This is obviously a problem for any task that comes afterwards.
So this is used to break things so that the fixes may be demonstrated.
"""
with connection.cursor() as cursor:
cursor.execute(f"SET idle_session_timeout = '{seconds / 2}s';")
logger.info(f'sleeping for {seconds}s > {seconds / 2}s session timeout')
time.sleep(seconds)
for i in range(1, 3):
logger.info(f'\nRunning query number {i}')
try:
with connection.cursor() as cursor:
cursor.execute("SELECT 1;")
logger.info(' query worked, not expected')
except Exception as exc:
logger.info(f' query errored as expected\ntype: {type(exc)}\nstr: {str(exc)}')
logger.info(f'Connection present: {bool(connection.connection)}, reports closed: {getattr(connection.connection, "closed", "not_found")}')
@task()
def advisory_lock_exception():
time.sleep(0.2) # so it can fill up all the workers... hacky for now
with advisory_lock('advisory_lock_exception', lock_session_timeout_milliseconds=20):
raise RuntimeError('this is an intentional error')

View File

@@ -1,344 +0,0 @@
"""Tests for GitHub App Installation access token extraction plugin."""
from typing import TypedDict
import pytest
from pytest_mock import MockerFixture
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKey,
RSAPublicKey,
generate_private_key,
)
from cryptography.hazmat.primitives.serialization import (
Encoding,
NoEncryption,
PrivateFormat,
PublicFormat,
)
from github.Auth import AppInstallationAuth
from github.Consts import DEFAULT_JWT_ALGORITHM
from github.GithubException import (
BadAttributeException,
GithubException,
UnknownObjectException,
)
from jwt import decode as decode_jwt
from awx.main.credential_plugins import github_app
github_app_jwt_client_id_unsupported = pytest.mark.xfail(
raises=(AssertionError, ValueError),
reason='Client ID in JWT is not currently supported by ' 'PyGitHub and is disabled.\n\n' 'Ref: https://github.com/PyGithub/PyGithub/issues/3213',
)
RSA_PUBLIC_EXPONENT = 65_537 # noqa: WPS303
MINIMUM_RSA_KEY_SIZE = 1024 # the lowest value chosen for performance in tests
@pytest.fixture(scope='module')
def rsa_private_key() -> RSAPrivateKey:
"""Generate an RSA private key."""
return generate_private_key(
public_exponent=RSA_PUBLIC_EXPONENT,
key_size=MINIMUM_RSA_KEY_SIZE, # would be 4096 or higher in production
backend=default_backend(),
)
@pytest.fixture(scope='module')
def rsa_public_key(rsa_private_key: RSAPrivateKey) -> RSAPublicKey:
"""Extract a public key out of the private one."""
return rsa_private_key.public_key()
@pytest.fixture(scope='module')
def rsa_private_key_bytes(rsa_private_key: RSAPrivateKey) -> bytes:
r"""Generate an unencrypted PKCS#1 formatted RSA private key.
Encoded as PEM-bytes.
This is what the GitHub-downloaded PEM files contain.
Ref: https://developer.github.com/apps/building-github-apps/\
authenticating-with-github-apps/
"""
return rsa_private_key.private_bytes(
encoding=Encoding.PEM,
format=PrivateFormat.TraditionalOpenSSL, # A.K.A. PKCS#1
encryption_algorithm=NoEncryption(),
)
@pytest.fixture(scope='module')
def rsa_private_key_str(rsa_private_key_bytes: bytes) -> str:
"""Return private key as an instance of string."""
return rsa_private_key_bytes.decode('utf-8')
@pytest.fixture(scope='module')
def rsa_public_key_bytes(rsa_public_key: RSAPublicKey) -> bytes:
"""Return a PKCS#1 formatted RSA public key encoded as PEM."""
return rsa_public_key.public_bytes(
encoding=Encoding.PEM,
format=PublicFormat.PKCS1,
)
class AppInstallIds(TypedDict):
"""Schema for augmented extractor function keyword args."""
app_or_client_id: str
install_id: str
@pytest.mark.parametrize(
('extract_github_app_install_token_args', 'expected_error_msg'),
(
pytest.param(
{
'app_or_client_id': 'invalid',
'install_id': '666',
},
'^Expected GitHub App or Client ID to be an integer or a string ' r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got' " 'invalid'$",
id='gh-app-id-broken-text',
),
pytest.param(
{
'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbb',
'install_id': '666',
},
'^Expected GitHub App or Client ID to be an integer or a string '
r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got'
" 'Iv1.bbbbbbbbbbbbbbb'$",
id='gh-app-id-client-id-not-enough-chars',
),
pytest.param(
{
'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbbx',
'install_id': '666',
},
'^Expected GitHub App or Client ID to be an integer or a string '
r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got'
" 'Iv1.bbbbbbbbbbbbbbbx'$",
id='gh-app-id-client-id-broken-hex',
),
pytest.param(
{
'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbbbb',
'install_id': '666',
},
'^Expected GitHub App or Client ID to be an integer or a string '
r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got'
" 'Iv1.bbbbbbbbbbbbbbbbb'$",
id='gh-app-id-client-id-too-many-chars',
),
pytest.param(
{
'app_or_client_id': 999,
'install_id': 'invalid',
},
'^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$",
id='gh-app-invalid-install-id-with-int-app-id',
),
pytest.param(
{
'app_or_client_id': '999',
'install_id': 'invalid',
},
'^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$",
id='gh-app-invalid-install-id-with-str-digit-app-id',
),
pytest.param(
{
'app_or_client_id': 'Iv1.cccccccccccccccc',
'install_id': 'invalid',
},
'^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$",
id='gh-app-invalid-install-id-with-client-id',
marks=github_app_jwt_client_id_unsupported,
),
),
)
def test_github_app_invalid_args(
extract_github_app_install_token_args: AppInstallIds,
expected_error_msg: str,
) -> None:
"""Test that invalid arguments make token extractor bail early."""
with pytest.raises(ValueError, match=expected_error_msg):
github_app.extract_github_app_install_token(
github_api_url='https://github.com',
private_rsa_key='key',
**extract_github_app_install_token_args,
)
@pytest.mark.parametrize(
(
'github_exception',
'transformed_exception',
'error_msg',
),
(
(
BadAttributeException(
'',
{},
Exception(),
),
RuntimeError,
(
r'^Broken GitHub @ https://github\.com with '
r'app_or_client_id: 123, install_id: 456\. It is a bug, '
'please report it to the '
r"developers\.\n\n\('', \{\}, Exception\(\)\)$"
),
),
(
GithubException(-1),
RuntimeError,
(
'^An unexpected error happened while talking to GitHub API '
r'@ https://github\.com '
r'\(app_or_client_id: 123, install_id: 456\)\. '
r'Is the app or client ID correct\? '
r'And the private RSA key\? '
r'See https://docs\.github\.com/rest/reference/apps'
r'#create-an-installation-access-token-for-an-app\.'
r'\n\n-1$'
),
),
(
UnknownObjectException(-1),
ValueError,
(
'^Failed to retrieve a GitHub installation token from '
r'https://github\.com using '
r'app_or_client_id: 123, install_id: 456\. '
r'Is the app installed\? See '
r'https://docs\.github\.com/rest/reference/apps'
r'#create-an-installation-access-token-for-an-app\.'
r'\n\n-1$'
),
),
),
ids=(
'github-broken',
'unexpected-error',
'no-install',
),
)
def test_github_app_api_errors(
mocker: MockerFixture,
github_exception: Exception,
transformed_exception: type[Exception],
error_msg: str,
) -> None:
"""Test successful GitHub authentication."""
application_id = 123
installation_id = 456
mocker.patch.object(
github_app.Auth.AppInstallationAuth,
'token',
new_callable=mocker.PropertyMock,
side_effect=github_exception,
)
with pytest.raises(transformed_exception, match=error_msg):
github_app.extract_github_app_install_token(
github_api_url='https://github.com',
app_or_client_id=application_id,
install_id=installation_id,
private_rsa_key='key',
)
class _FakeAppInstallationAuth(AppInstallationAuth):
@property
def token(self: '_FakeAppInstallationAuth') -> str:
return 'token-sentinel'
@pytest.mark.parametrize(
'application_id',
(
123,
'123',
pytest.param(
'Iv1.aaaaaaaaaaaaaaaa',
marks=github_app_jwt_client_id_unsupported,
),
),
ids=('app-id-int', 'app-id-str', 'client-id'),
)
@pytest.mark.parametrize(
'installation_id',
(456, '456'),
ids=('install-id-int', 'install-id-str'),
)
# pylint: disable-next=too-many-arguments,too-many-positional-arguments
def test_github_app_github_authentication( # noqa: WPS211
application_id: int | str,
installation_id: int | str,
mocker: MockerFixture,
monkeypatch: pytest.MonkeyPatch,
rsa_private_key_str: str,
rsa_public_key_bytes: bytes,
) -> None:
"""Test successful GitHub authentication."""
monkeypatch.setattr(
github_app.Auth,
'AppInstallationAuth',
_FakeAppInstallationAuth,
)
get_installation_auth_spy = mocker.spy(
github_app.Auth,
'AppInstallationAuth',
)
github_initializer_spy = mocker.spy(github_app, 'Github')
token = github_app.extract_github_app_install_token(
github_api_url='https://github.com',
app_or_client_id=application_id,
install_id=installation_id,
private_rsa_key=rsa_private_key_str,
)
observed_pygithub_obj = github_initializer_spy.spy_return
observed_gh_install_auth_obj = get_installation_auth_spy.spy_return
# pylint: disable-next=protected-access
signed_jwt = observed_gh_install_auth_obj._app_auth.token # noqa: WPS437
assert token == 'token-sentinel'
assert observed_pygithub_obj.requester.base_url == 'https://github.com'
assert observed_gh_install_auth_obj.installation_id == int(installation_id)
assert isinstance(observed_gh_install_auth_obj, _FakeAppInstallationAuth)
# NOTE: The `decode_jwt()` call asserts that no
# NOTE: `jwt.exceptions.InvalidSignatureError()` exception gets raised
# NOTE: which would indicate incorrect RSA key or corrupted payload if
# NOTE: that was to happen. This verifies that JWT is signed with the
# NOTE: private RSA key we passed by using its public counterpart.
decode_jwt(
signed_jwt,
key=rsa_public_key_bytes,
algorithms=[DEFAULT_JWT_ALGORITHM],
options={
'require': ['exp', 'iat', 'iss'],
'strict_aud': False,
'verify_aud': True,
'verify_exp': True,
'verify_signature': True,
'verify_nbf': True,
},
audience=None, # GH App JWT don't set the audience claim
issuer=str(application_id),
leeway=0.001, # noqa: WPS432
)

View File

@@ -1,217 +0,0 @@
import pytest
from unittest import mock
from awx.main.credential_plugins import hashivault, azure_kv
from azure.keyvault.secrets import (
KeyVaultSecret,
SecretClient,
SecretProperties,
)
def test_imported_azure_cloud_sdk_vars():
from awx.main.credential_plugins import azure_kv
assert len(azure_kv.clouds) > 0
assert all([hasattr(c, 'name') for c in azure_kv.clouds])
assert all([hasattr(c, 'suffixes') for c in azure_kv.clouds])
assert all([hasattr(c.suffixes, 'keyvault_dns') for c in azure_kv.clouds])
def test_hashivault_approle_auth():
kwargs = {
'role_id': 'the_role_id',
'secret_id': 'the_secret_id',
}
expected_res = {
'role_id': 'the_role_id',
'secret_id': 'the_secret_id',
}
res = hashivault.approle_auth(**kwargs)
assert res == expected_res
def test_hashivault_kubernetes_auth():
kwargs = {
'kubernetes_role': 'the_kubernetes_role',
}
expected_res = {
'role': 'the_kubernetes_role',
'jwt': 'the_jwt',
}
with mock.patch('pathlib.Path') as path_mock:
mock.mock_open(path_mock.return_value.open, read_data='the_jwt')
res = hashivault.kubernetes_auth(**kwargs)
path_mock.assert_called_with('/var/run/secrets/kubernetes.io/serviceaccount/token')
assert res == expected_res
def test_hashivault_client_cert_auth_explicit_role():
kwargs = {
'client_cert_role': 'test-cert-1',
}
expected_res = {
'name': 'test-cert-1',
}
res = hashivault.client_cert_auth(**kwargs)
assert res == expected_res
def test_hashivault_client_cert_auth_no_role():
kwargs = {}
expected_res = {
'name': None,
}
res = hashivault.client_cert_auth(**kwargs)
assert res == expected_res
def test_hashivault_userpass_auth():
kwargs = {'username': 'the_username', 'password': 'the_password'}
expected_res = {'username': 'the_username', 'password': 'the_password'}
res = hashivault.userpass_auth(**kwargs)
assert res == expected_res
def test_hashivault_handle_auth_token():
kwargs = {
'token': 'the_token',
}
token = hashivault.handle_auth(**kwargs)
assert token == kwargs['token']
def test_hashivault_handle_auth_approle():
kwargs = {
'role_id': 'the_role_id',
'secret_id': 'the_secret_id',
}
with mock.patch.object(hashivault, 'method_auth') as method_mock:
method_mock.return_value = 'the_token'
token = hashivault.handle_auth(**kwargs)
method_mock.assert_called_with(**kwargs, auth_param=kwargs)
assert token == 'the_token'
def test_hashivault_handle_auth_kubernetes():
kwargs = {
'kubernetes_role': 'the_kubernetes_role',
}
with mock.patch.object(hashivault, 'method_auth') as method_mock:
with mock.patch('pathlib.Path') as path_mock:
mock.mock_open(path_mock.return_value.open, read_data='the_jwt')
method_mock.return_value = 'the_token'
token = hashivault.handle_auth(**kwargs)
method_mock.assert_called_with(**kwargs, auth_param={'role': 'the_kubernetes_role', 'jwt': 'the_jwt'})
assert token == 'the_token'
def test_hashivault_handle_auth_client_cert():
kwargs = {
'client_cert_public': "foo",
'client_cert_private': "bar",
'client_cert_role': 'test-cert-1',
}
auth_params = {
'name': 'test-cert-1',
}
with mock.patch.object(hashivault, 'method_auth') as method_mock:
method_mock.return_value = 'the_token'
token = hashivault.handle_auth(**kwargs)
method_mock.assert_called_with(**kwargs, auth_param=auth_params)
assert token == 'the_token'
def test_hashivault_handle_auth_not_enough_args():
with pytest.raises(Exception):
hashivault.handle_auth()
class TestDelineaImports:
"""
These module have a try-except for ImportError which will allow using the older library
but we do not want the awx_devel image to have the older library,
so these tests are designed to fail if these wind up using the fallback import
"""
def test_dsv_import(self):
from awx.main.credential_plugins.dsv import SecretsVault # noqa
# assert this module as opposed to older thycotic.secrets.vault
assert SecretsVault.__module__ == 'delinea.secrets.vault'
def test_tss_import(self):
from awx.main.credential_plugins.tss import DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret # noqa
for cls in (DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret):
# assert this module as opposed to older thycotic.secrets.server
assert cls.__module__ == 'delinea.secrets.server'
class _FakeSecretClient(SecretClient):
def get_secret(
self: '_FakeSecretClient',
name: str,
version: str | None = None,
**kwargs: str,
) -> KeyVaultSecret:
props = SecretProperties(None, None)
return KeyVaultSecret(properties=props, value='test-secret')
def test_azure_kv_invalid_env() -> None:
"""Test running outside of Azure raises error."""
error_msg = (
'You are not operating on an Azure VM, so the Managed Identity '
'feature is unavailable. Please provide the full Client ID, '
'Client Secret, and Tenant ID or run the software on an Azure VM.'
)
with pytest.raises(
RuntimeError,
match=error_msg,
):
azure_kv.azure_keyvault_backend(
url='https://test.vault.azure.net',
client='',
secret='client-secret',
tenant='tenant-id',
secret_field='secret',
secret_version='',
)
@pytest.mark.parametrize(
('client', 'secret', 'tenant'),
(
pytest.param('', '', '', id='managed-identity'),
pytest.param(
'client-id',
'client-secret',
'tenant-id',
id='client-secret-credential',
),
),
)
def test_azure_kv_valid_auth(
monkeypatch: pytest.MonkeyPatch,
client: str,
secret: str,
tenant: str,
) -> None:
"""Test successful Azure authentication via Managed Identity and credentials."""
monkeypatch.setattr(
azure_kv,
'SecretClient',
_FakeSecretClient,
)
keyvault_secret = azure_kv.azure_keyvault_backend(
url='https://test.vault.azure.net',
client=client,
secret=secret,
tenant=tenant,
secret_field='secret',
secret_version='',
)
assert keyvault_secret == 'test-secret'

View File

@@ -1,45 +0,0 @@
import pytest
# AWX
from awx.main.ha import is_ha_environment
from awx.main.models.ha import Instance
from awx.main.dispatch.pool import get_auto_max_workers
# Django
from django.test.utils import override_settings
@pytest.mark.django_db
def test_multiple_instances():
for i in range(2):
Instance.objects.create(hostname=f'foo{i}', node_type='hybrid')
assert is_ha_environment()
@pytest.mark.django_db
def test_db_localhost():
Instance.objects.create(hostname='foo', node_type='hybrid')
Instance.objects.create(hostname='bar', node_type='execution')
assert is_ha_environment() is False
@pytest.mark.django_db
@pytest.mark.parametrize(
'settings',
[
dict(SYSTEM_TASK_ABS_MEM='16Gi', SYSTEM_TASK_ABS_CPU='24', SYSTEM_TASK_FORKS_MEM=400, SYSTEM_TASK_FORKS_CPU=4),
dict(SYSTEM_TASK_ABS_MEM='124Gi', SYSTEM_TASK_ABS_CPU='2', SYSTEM_TASK_FORKS_MEM=None, SYSTEM_TASK_FORKS_CPU=None),
],
ids=['cpu_dominated', 'memory_dominated'],
)
def test_dispatcher_max_workers_reserve(settings, fake_redis):
"""This tests that the dispatcher max_workers matches instance capacity
Assumes capacity_adjustment is 1,
plus reserve worker count
"""
with override_settings(**settings):
i = Instance.objects.create(hostname='test-1', node_type='hybrid')
i.local_health_check()
assert get_auto_max_workers() == i.capacity + 7, (i.cpu, i.memory, i.cpu_capacity, i.mem_capacity)

View File

@@ -1,56 +0,0 @@
import pytest
from awx.main.migrations._db_constraints import _rename_duplicates
from awx.main.models import JobTemplate
@pytest.mark.django_db
def test_rename_job_template_duplicates(organization, project):
ids = []
for i in range(5):
jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project)
ids.append(jt.id) # saved in order of creation
# Hack to first allow duplicate names of JT to test migration
JobTemplate.objects.filter(id__in=ids).update(org_unique=False)
# Set all JTs to the same name
JobTemplate.objects.filter(id__in=ids).update(name='same_name_for_test')
_rename_duplicates(JobTemplate)
first_jt = JobTemplate.objects.get(id=ids[0])
assert first_jt.name == 'same_name_for_test'
for i, pk in enumerate(ids):
if i == 0:
continue
jt = JobTemplate.objects.get(id=pk)
# Name should be set based on creation order
assert jt.name == f'same_name_for_test_dup{i}'
@pytest.mark.django_db
def test_rename_job_template_name_too_long(organization, project):
ids = []
for i in range(3):
jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project)
ids.append(jt.id) # saved in order of creation
JobTemplate.objects.filter(id__in=ids).update(org_unique=False)
chars = 512
# Set all JTs to the same reaaaaaaly long name
JobTemplate.objects.filter(id__in=ids).update(name='A' * chars)
_rename_duplicates(JobTemplate)
first_jt = JobTemplate.objects.get(id=ids[0])
assert first_jt.name == 'A' * chars
for i, pk in enumerate(ids):
if i == 0:
continue
jt = JobTemplate.objects.get(id=pk)
assert jt.name.endswith(f'dup{i}')
assert len(jt.name) <= 512

View File

@@ -2,7 +2,6 @@ import pytest
from django_test_migrations.plan import all_migrations, nodes_to_tuples
from django.utils.timezone import now
from django.utils import timezone
"""
Most tests that live in here can probably be deleted at some point. They are mainly

View File

@@ -1,96 +0,0 @@
import pytest
import os
import tempfile
import shutil
from awx.main.tasks.jobs import RunJob
from awx.main.tasks.system import CleanupImagesAndFiles, execution_node_health_check
from awx.main.models import Instance, Job
@pytest.fixture
def scm_revision_file(tmpdir_factory):
# Returns path to temporary testing revision file
revision_file = tmpdir_factory.mktemp('revisions').join('revision.txt')
with open(str(revision_file), 'w') as f:
f.write('1234567890123456789012345678901234567890')
return os.path.join(revision_file.dirname, 'revision.txt')
@pytest.mark.django_db
@pytest.mark.parametrize('node_type', ('control. hybrid'))
def test_no_worker_info_on_AWX_nodes(node_type):
hostname = 'us-south-3-compute.invalid'
Instance.objects.create(hostname=hostname, node_type=node_type)
assert execution_node_health_check(hostname) is None
@pytest.fixture
def job_folder_factory(request):
def _rf(job_id='1234'):
pdd_path = tempfile.mkdtemp(prefix=f'awx_{job_id}_')
def test_folder_cleanup():
if os.path.exists(pdd_path):
shutil.rmtree(pdd_path)
request.addfinalizer(test_folder_cleanup)
return pdd_path
return _rf
@pytest.fixture
def mock_job_folder(job_folder_factory):
return job_folder_factory()
@pytest.mark.django_db
def test_folder_cleanup_stale_file(mock_job_folder, mock_me):
CleanupImagesAndFiles.run()
assert os.path.exists(mock_job_folder) # grace period should protect folder from deletion
CleanupImagesAndFiles.run(grace_period=0)
assert not os.path.exists(mock_job_folder) # should be deleted
@pytest.mark.django_db
def test_folder_cleanup_running_job(mock_job_folder, me_inst):
job = Job.objects.create(id=1234, controller_node=me_inst.hostname, status='running')
CleanupImagesAndFiles.run(grace_period=0)
assert os.path.exists(mock_job_folder) # running job should prevent folder from getting deleted
job.status = 'failed'
job.save(update_fields=['status'])
CleanupImagesAndFiles.run(grace_period=0)
assert not os.path.exists(mock_job_folder) # job is finished and no grace period, should delete
@pytest.mark.django_db
def test_folder_cleanup_multiple_running_jobs(job_folder_factory, me_inst):
jobs = []
dirs = []
num_jobs = 3
for i in range(num_jobs):
job = Job.objects.create(controller_node=me_inst.hostname, status='running')
dirs.append(job_folder_factory(job.id))
jobs.append(job)
CleanupImagesAndFiles.run(grace_period=0)
assert [os.path.exists(d) for d in dirs] == [True for i in range(num_jobs)]
@pytest.mark.django_db
def test_does_not_run_reaped_job(mocker, mock_me):
job = Job.objects.create(status='failed', job_explanation='This job has been reaped.')
mock_run = mocker.patch('awx.main.tasks.jobs.ansible_runner.interface.run')
try:
RunJob().run(job.id)
except Exception:
pass
job.refresh_from_db()
assert job.status == 'failed'
mock_run.assert_not_called()

View File

@@ -3,6 +3,7 @@ import time
import os
import shutil
import tempfile
import logging
import pytest
@@ -13,11 +14,15 @@ from awx.api.versioning import reverse
# These tests are invoked from the awx/main/tests/live/ subfolder
# so any fixtures from higher-up conftest files must be explicitly included
from awx.main.tests.functional.conftest import * # noqa
from awx.main.tests.conftest import load_all_credentials # noqa: F401; pylint: disable=unused-import
from awx.main.tests import data
from awx.main.models import Project, JobTemplate, Organization, Inventory
logger = logging.getLogger(__name__)
PROJ_DATA = os.path.join(os.path.dirname(data.__file__), 'projects')
@@ -133,30 +138,29 @@ def podman_image_generator():
@pytest.fixture
def run_job_from_playbook(default_org, demo_inv, post, admin):
def _rf(test_name, playbook, local_path=None, scm_url=None, jt_params=None):
project_name = f'{test_name} project'
jt_name = f'{test_name} JT: {playbook}'
old_proj = Project.objects.filter(name=project_name).first()
if old_proj:
old_proj.delete()
old_jt = JobTemplate.objects.filter(name=jt_name).first()
if old_jt:
old_jt.delete()
proj_kwargs = {'name': project_name, 'organization': default_org.id}
def project_factory(post, default_org, admin):
def _rf(scm_url=None, local_path=None):
proj_kwargs = {}
if local_path:
# manual path
project_name = f'Manual roject {local_path}'
proj_kwargs['scm_type'] = ''
proj_kwargs['local_path'] = local_path
elif scm_url:
project_name = f'Project {scm_url}'
proj_kwargs['scm_type'] = 'git'
proj_kwargs['scm_url'] = scm_url
else:
raise RuntimeError('Need to provide scm_url or local_path')
proj_kwargs['name'] = project_name
proj_kwargs['organization'] = default_org.id
old_proj = Project.objects.filter(name=project_name).first()
if old_proj:
logger.info(f'Deleting existing project {project_name}')
old_proj.delete()
result = post(
reverse('api:project_list'),
proj_kwargs,
@@ -164,6 +168,23 @@ def run_job_from_playbook(default_org, demo_inv, post, admin):
expect=201,
)
proj = Project.objects.get(id=result.data['id'])
return proj
return _rf
@pytest.fixture
def run_job_from_playbook(demo_inv, post, admin, project_factory):
def _rf(test_name, playbook, local_path=None, scm_url=None, jt_params=None, proj=None, wait=True):
jt_name = f'{test_name} JT: {playbook}'
if not proj:
proj = project_factory(scm_url=scm_url, local_path=local_path)
old_jt = JobTemplate.objects.filter(name=jt_name).first()
if old_jt:
logger.info(f'Deleting existing JT {jt_name}')
old_jt.delete()
if proj.current_job:
wait_for_job(proj.current_job)
@@ -185,7 +206,9 @@ def run_job_from_playbook(default_org, demo_inv, post, admin):
job = jt.create_unified_job()
job.signal_start()
wait_for_job(job)
assert job.status == 'successful'
if wait:
wait_for_job(job)
assert job.status == 'successful'
return {'job': job, 'job_template': jt, 'project': proj}
return _rf

View File

@@ -1,581 +0,0 @@
import os
import pytest
from unittest.mock import patch, Mock, call, DEFAULT
from io import StringIO
from unittest import TestCase
from awx.main.management.commands.import_auth_config_to_gateway import Command
from awx.main.utils.gateway_client import GatewayAPIError
class TestImportAuthConfigToGatewayCommand(TestCase):
def setUp(self):
self.command = Command()
def options_basic_auth_full_send(self):
return {
'basic_auth': True,
'skip_all_authenticators': False,
'skip_oidc': False,
'skip_github': False,
'skip_ldap': False,
'skip_ad': False,
'skip_saml': False,
'skip_radius': False,
'skip_tacacs': False,
'skip_google': False,
'skip_settings': False,
'force': False,
}
def options_basic_auth_skip_all_individual(self):
return {
'basic_auth': True,
'skip_all_authenticators': False,
'skip_oidc': True,
'skip_github': True,
'skip_ldap': True,
'skip_ad': True,
'skip_saml': True,
'skip_radius': True,
'skip_tacacs': True,
'skip_google': True,
'skip_settings': True,
'force': False,
}
def options_svc_token_full_send(self):
options = self.options_basic_auth_full_send()
options['basic_auth'] = False
return options
def options_svc_token_skip_all(self):
options = self.options_basic_auth_skip_all_individual()
options['basic_auth'] = False
return options
def create_mock_migrator(
self,
mock_migrator_class,
authenticator_type="TestAuth",
created=0,
updated=0,
unchanged=0,
failed=0,
mappers_created=0,
mappers_updated=0,
mappers_failed=0,
settings_created=0,
settings_updated=0,
settings_unchanged=0,
settings_failed=0,
):
"""Helper method to create a mock migrator with specified return values."""
mock_migrator = Mock()
mock_migrator.get_authenticator_type.return_value = authenticator_type
mock_migrator.migrate.return_value = {
'created': created,
'updated': updated,
'unchanged': unchanged,
'failed': failed,
'mappers_created': mappers_created,
'mappers_updated': mappers_updated,
'mappers_failed': mappers_failed,
}
mock_migrator_class.return_value = mock_migrator
return mock_migrator
def test_add_arguments(self):
"""Test that all expected arguments are properly added to the parser."""
parser = Mock()
self.command.add_arguments(parser)
expected_calls = [
call('--basic-auth', action='store_true', help='Use HTTP Basic Authentication between Controller and Gateway'),
call(
'--skip-all-authenticators',
action='store_true',
help='Skip importing all authenticators [GitHub, OIDC, SAML, Azure AD, LDAP, RADIUS, TACACS+, Google OAuth2]',
),
call('--skip-oidc', action='store_true', help='Skip importing generic OIDC authenticators'),
call('--skip-github', action='store_true', help='Skip importing GitHub authenticator'),
call('--skip-ldap', action='store_true', help='Skip importing LDAP authenticators'),
call('--skip-ad', action='store_true', help='Skip importing Azure AD authenticator'),
call('--skip-saml', action='store_true', help='Skip importing SAML authenticator'),
call('--skip-radius', action='store_true', help='Skip importing RADIUS authenticator'),
call('--skip-tacacs', action='store_true', help='Skip importing TACACS+ authenticator'),
call('--skip-google', action='store_true', help='Skip importing Google OAuth2 authenticator'),
call('--skip-settings', action='store_true', help='Skip importing settings'),
call(
'--force',
action='store_true',
help='Force migration even if configurations already exist. Does not apply to skipped authenticators nor skipped settings.',
),
]
parser.add_argument.assert_has_calls(expected_calls, any_order=True)
@patch.dict(os.environ, {}, clear=True)
@patch('sys.stdout', new_callable=StringIO)
def test_handle_missing_env_vars_basic_auth(self, mock_stdout):
"""Test that missing environment variables cause clean exit when using basic auth."""
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**self.options_basic_auth_full_send())
# Should exit with code 0 for successful early validation
assert exc_info.value.code == 0
output = mock_stdout.getvalue()
self.assertIn('Missing required environment variables:', output)
self.assertIn('GATEWAY_BASE_URL', output)
self.assertIn('GATEWAY_USER', output)
self.assertIn('GATEWAY_PASSWORD', output)
@patch.dict(
os.environ,
{'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass', 'GATEWAY_SKIP_VERIFY': 'true'},
)
@patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator')
@patch.multiple(
'awx.main.management.commands.import_auth_config_to_gateway',
GitHubMigrator=DEFAULT,
OIDCMigrator=DEFAULT,
SAMLMigrator=DEFAULT,
AzureADMigrator=DEFAULT,
LDAPMigrator=DEFAULT,
RADIUSMigrator=DEFAULT,
TACACSMigrator=DEFAULT,
GoogleOAuth2Migrator=DEFAULT,
)
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
@patch('sys.stdout', new_callable=StringIO)
def test_handle_basic_auth_success(self, mock_stdout, mock_gateway_client, mock_settings_migrator, **mock_migrators):
"""Test successful execution with basic auth."""
# Mock gateway client context manager
mock_client_instance = Mock()
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
mock_gateway_client.return_value.__exit__.return_value = None
for mock_migrator_class in mock_migrators.values():
self.create_mock_migrator(mock_migrator_class, created=1, mappers_created=2)
self.create_mock_migrator(mock_settings_migrator, settings_created=1, settings_updated=0, settings_unchanged=2, settings_failed=0)
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**self.options_basic_auth_full_send())
# Should exit with code 0 for success
assert exc_info.value.code == 0
# Verify gateway client was created with correct parameters
mock_gateway_client.assert_called_once_with(
base_url='https://gateway.example.com', username='testuser', password='testpass', skip_verify=True, command=self.command
)
# Verify all migrators were created
for mock_migrator in mock_migrators.values():
mock_migrator.assert_called_once_with(mock_client_instance, self.command, force=False)
mock_settings_migrator.assert_called_once_with(mock_client_instance, self.command, force=False)
# Verify output contains success messages
output = mock_stdout.getvalue()
self.assertIn('HTTP Basic Auth: true', output)
self.assertIn('Successfully connected to Gateway', output)
self.assertIn('Migration Summary', output)
self.assertIn('authenticators', output)
self.assertIn('mappers', output)
self.assertIn('settings', output)
@patch.dict(os.environ, {'GATEWAY_SKIP_VERIFY': 'false'}, clear=True) # Ensure verify_https=True
@patch('awx.main.management.commands.import_auth_config_to_gateway.create_api_client')
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClientSVCToken')
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlparse')
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlunparse')
@patch('sys.stdout', new_callable=StringIO)
def test_handle_service_token_success(self, mock_stdout, mock_urlunparse, mock_urlparse, mock_gateway_client_svc, mock_create_api_client):
"""Test successful execution with service token."""
# Mock resource API client
mock_resource_client = Mock()
mock_resource_client.base_url = 'https://gateway.example.com/api/v1'
mock_resource_client.jwt_user_id = 'test-user'
mock_resource_client.jwt_expiration = '2024-12-31'
mock_resource_client.verify_https = True
mock_response = Mock()
mock_response.status_code = 200
mock_resource_client.get_service_metadata.return_value = mock_response
mock_create_api_client.return_value = mock_resource_client
# Mock URL parsing
mock_parsed = Mock()
mock_parsed.scheme = 'https'
mock_parsed.netloc = 'gateway.example.com'
mock_urlparse.return_value = mock_parsed
mock_urlunparse.return_value = 'https://gateway.example.com/'
# Mock gateway client context manager
mock_client_instance = Mock()
mock_gateway_client_svc.return_value.__enter__.return_value = mock_client_instance
mock_gateway_client_svc.return_value.__exit__.return_value = None
with patch.object(self.command, 'stdout', mock_stdout):
with patch('sys.exit'):
self.command.handle(**self.options_svc_token_skip_all())
# Should call sys.exit(0) for success, but may not due to test setup
# Just verify the command completed without raising an exception
# Verify resource API client was created and configured
mock_create_api_client.assert_called_once()
self.assertTrue(mock_resource_client.verify_https) # Should be True when GATEWAY_SKIP_VERIFY='false'
mock_resource_client.get_service_metadata.assert_called_once()
# Verify service token client was created
mock_gateway_client_svc.assert_called_once_with(resource_api_client=mock_resource_client, command=self.command)
# Verify output contains service token messages
output = mock_stdout.getvalue()
self.assertIn('Gateway Service Token: true', output)
self.assertIn('Connection Validated: True', output)
self.assertIn('No authentication configurations found to migrate.', output)
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
@patch.multiple(
'awx.main.management.commands.import_auth_config_to_gateway',
GitHubMigrator=DEFAULT,
OIDCMigrator=DEFAULT,
SAMLMigrator=DEFAULT,
AzureADMigrator=DEFAULT,
LDAPMigrator=DEFAULT,
RADIUSMigrator=DEFAULT,
TACACSMigrator=DEFAULT,
GoogleOAuth2Migrator=DEFAULT,
SettingsMigrator=DEFAULT,
)
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
@patch('sys.stdout', new_callable=StringIO)
def test_skip_flags_prevent_authenticator_individual_and_settings_migration(self, mock_stdout, mock_gateway_client, **mock_migrators):
"""Test that skip flags prevent corresponding migrators from being created."""
# Mock gateway client context manager
mock_client_instance = Mock()
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
mock_gateway_client.return_value.__exit__.return_value = None
with patch.object(self.command, 'stdout', mock_stdout):
with patch('sys.exit'):
self.command.handle(**self.options_basic_auth_skip_all_individual())
# Should call sys.exit(0) for success, but may not due to test setup
# Just verify the command completed without raising an exception
# Verify no migrators were created
for mock_migrator in mock_migrators.values():
mock_migrator.assert_not_called()
# Verify warning message about no configurations
output = mock_stdout.getvalue()
self.assertIn('No authentication configurations found to migrate.', output)
self.assertIn('Settings migration will not execute.', output)
self.assertIn('NO MIGRATIONS WILL EXECUTE.', output)
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
@patch.multiple(
'awx.main.management.commands.import_auth_config_to_gateway',
GitHubMigrator=DEFAULT,
OIDCMigrator=DEFAULT,
SAMLMigrator=DEFAULT,
AzureADMigrator=DEFAULT,
LDAPMigrator=DEFAULT,
RADIUSMigrator=DEFAULT,
TACACSMigrator=DEFAULT,
GoogleOAuth2Migrator=DEFAULT,
)
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
@patch('sys.stdout', new_callable=StringIO)
def test_skip_flags_prevent_authenticator_migration(self, mock_stdout, mock_gateway_client, **mock_migrators):
"""Test that skip flags prevent corresponding migrators from being created."""
# Mock gateway client context manager
mock_client_instance = Mock()
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
mock_gateway_client.return_value.__exit__.return_value = None
options = self.options_basic_auth_full_send()
options['skip_all_authenticators'] = True
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**options)
# Should exit with code 0 for success (no failures)
assert exc_info.value.code == 0
# Verify no migrators were created
for mock_migrator in mock_migrators.values():
mock_migrator.assert_not_called()
# Verify warning message about no configurations
output = mock_stdout.getvalue()
self.assertIn('No authentication configurations found to migrate.', output)
self.assertNotIn('Settings migration will not execute.', output)
self.assertNotIn('NO MIGRATIONS WILL EXECUTE.', output)
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
@patch('sys.stdout', new_callable=StringIO)
def test_handle_gateway_api_error(self, mock_stdout, mock_gateway_client):
"""Test handling of GatewayAPIError exceptions."""
# Mock gateway client to raise GatewayAPIError
mock_gateway_client.side_effect = GatewayAPIError('Test error message', status_code=400, response_data={'error': 'Bad request'})
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**self.options_basic_auth_full_send())
# Should exit with code 1 for errors
assert exc_info.value.code == 1
# Verify error message output
output = mock_stdout.getvalue()
self.assertIn('Gateway API Error: Test error message', output)
self.assertIn('Status Code: 400', output)
self.assertIn("Response: {'error': 'Bad request'}", output)
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
@patch('sys.stdout', new_callable=StringIO)
def test_handle_unexpected_error(self, mock_stdout, mock_gateway_client):
"""Test handling of unexpected exceptions."""
# Mock gateway client to raise unexpected error
mock_gateway_client.side_effect = ValueError('Unexpected error')
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**self.options_basic_auth_full_send())
# Should exit with code 1 for errors
assert exc_info.value.code == 1
# Verify error message output
output = mock_stdout.getvalue()
self.assertIn('Unexpected error during migration: Unexpected error', output)
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
@patch('awx.main.management.commands.import_auth_config_to_gateway.GitHubMigrator')
@patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator')
@patch('sys.stdout', new_callable=StringIO)
def test_force_flag_passed_to_migrators(self, mock_stdout, mock_github, mock_settings_migrator, mock_gateway_client):
"""Test that force flag is properly passed to migrators."""
# Mock gateway client context manager
mock_client_instance = Mock()
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
mock_gateway_client.return_value.__exit__.return_value = None
# Mock migrator
self.create_mock_migrator(mock_github, authenticator_type="GitHub", created=0, mappers_created=2)
self.create_mock_migrator(
mock_settings_migrator, authenticator_type="Settings", settings_created=0, settings_updated=2, settings_unchanged=0, settings_failed=0
)
options = self.options_basic_auth_skip_all_individual()
options['force'] = True
options['skip_github'] = False
options['skip_settings'] = False
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**options)
# Should exit with code 0 for success
assert exc_info.value.code == 0
# Verify migrator was created with force=True
mock_github.assert_called_once_with(mock_client_instance, self.command, force=True)
# Verify settings migrator was created with force=True
mock_settings_migrator.assert_called_once_with(mock_client_instance, self.command, force=True)
@patch('sys.stdout', new_callable=StringIO)
def test_print_export_summary(self, mock_stdout):
"""Test the _print_export_summary method."""
result = {
'created': 2,
'updated': 1,
'unchanged': 3,
'failed': 0,
'mappers_created': 5,
'mappers_updated': 2,
'mappers_failed': 1,
}
with patch.object(self.command, 'stdout', mock_stdout):
self.command._print_export_summary('SAML', result)
output = mock_stdout.getvalue()
self.assertIn('--- SAML Export Summary ---', output)
self.assertIn('Authenticators created: 2', output)
self.assertIn('Authenticators updated: 1', output)
self.assertIn('Authenticators unchanged: 3', output)
self.assertIn('Authenticators failed: 0', output)
self.assertIn('Mappers created: 5', output)
self.assertIn('Mappers updated: 2', output)
self.assertIn('Mappers failed: 1', output)
@patch('sys.stdout', new_callable=StringIO)
def test_print_export_summary_settings(self, mock_stdout):
"""Test the _print_export_summary method."""
result = {
'settings_created': 2,
'settings_updated': 1,
'settings_unchanged': 3,
'settings_failed': 0,
}
with patch.object(self.command, 'stdout', mock_stdout):
self.command._print_export_summary('Settings', result)
output = mock_stdout.getvalue()
self.assertIn('--- Settings Export Summary ---', output)
self.assertIn('Settings created: 2', output)
self.assertIn('Settings updated: 1', output)
self.assertIn('Settings unchanged: 3', output)
self.assertIn('Settings failed: 0', output)
@patch('sys.stdout', new_callable=StringIO)
def test_print_export_summary_missing_keys(self, mock_stdout):
"""Test _print_export_summary handles missing keys gracefully."""
result = {
'created': 1,
'updated': 2,
# Missing other keys
}
with patch.object(self.command, 'stdout', mock_stdout):
self.command._print_export_summary('LDAP', result)
output = mock_stdout.getvalue()
self.assertIn('--- LDAP Export Summary ---', output)
self.assertIn('Authenticators created: 1', output)
self.assertIn('Authenticators updated: 2', output)
self.assertIn('Authenticators unchanged: 0', output) # Default value
self.assertIn('Mappers created: 0', output) # Default value
@patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'})
@patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient')
@patch('awx.main.management.commands.import_auth_config_to_gateway.GitHubMigrator')
@patch('awx.main.management.commands.import_auth_config_to_gateway.OIDCMigrator')
@patch('sys.stdout', new_callable=StringIO)
def test_total_results_accumulation(self, mock_stdout, mock_oidc, mock_github, mock_gateway_client):
"""Test that results from multiple migrators are properly accumulated."""
# Mock gateway client context manager
mock_client_instance = Mock()
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
mock_gateway_client.return_value.__exit__.return_value = None
# Mock migrators with different results
self.create_mock_migrator(mock_github, authenticator_type="GitHub", created=1, mappers_created=2)
self.create_mock_migrator(mock_oidc, authenticator_type="OIDC", created=0, updated=1, unchanged=1, mappers_created=1, mappers_updated=1)
options = self.options_basic_auth_skip_all_individual()
options['skip_oidc'] = False
options['skip_github'] = False
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**options)
# Should exit with code 0 for success
assert exc_info.value.code == 0
# Verify total results are accumulated correctly
output = mock_stdout.getvalue()
self.assertIn('Total authenticators created: 1', output) # 1 + 0
self.assertIn('Total authenticators updated: 1', output) # 0 + 1
self.assertIn('Total authenticators unchanged: 1', output) # 0 + 1
self.assertIn('Total authenticators failed: 0', output) # 0 + 0
self.assertIn('Total mappers created: 3', output) # 2 + 1
self.assertIn('Total mappers updated: 1', output) # 0 + 1
self.assertIn('Total mappers failed: 0', output) # 0 + 0
@patch('sys.stdout', new_callable=StringIO)
def test_environment_variable_parsing(self, mock_stdout):
"""Test that environment variables are parsed correctly."""
test_cases = [
('true', True),
('1', True),
('yes', True),
('on', True),
('TRUE', True),
('false', False),
('0', False),
('no', False),
('off', False),
('', False),
('random', False),
]
for env_value, expected in test_cases:
with patch.dict(
os.environ,
{
'GATEWAY_BASE_URL': 'https://gateway.example.com',
'GATEWAY_USER': 'testuser',
'GATEWAY_PASSWORD': 'testpass',
'GATEWAY_SKIP_VERIFY': env_value,
},
):
with patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') as mock_gateway_client:
# Mock gateway client context manager
mock_client_instance = Mock()
mock_gateway_client.return_value.__enter__.return_value = mock_client_instance
mock_gateway_client.return_value.__exit__.return_value = None
with patch.object(self.command, 'stdout', mock_stdout):
with patch('sys.exit'):
self.command.handle(**self.options_basic_auth_skip_all_individual())
# Verify gateway client was called with correct skip_verify value
mock_gateway_client.assert_called_once_with(
base_url='https://gateway.example.com', username='testuser', password='testpass', skip_verify=expected, command=self.command
)
# Reset for next iteration
mock_gateway_client.reset_mock()
mock_stdout.seek(0)
mock_stdout.truncate(0)
@patch.dict(os.environ, {'GATEWAY_SKIP_VERIFY': 'false'})
@patch('awx.main.management.commands.import_auth_config_to_gateway.create_api_client')
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlparse')
@patch('awx.main.management.commands.import_auth_config_to_gateway.urlunparse')
@patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator')
@patch('sys.stdout', new_callable=StringIO)
def test_service_token_connection_validation_failure(self, mock_stdout, mock_settings_migrator, mock_urlunparse, mock_urlparse, mock_create_api_client):
"""Test that non-200 response from get_service_metadata causes error exit."""
# Mock resource API client with failing response
mock_resource_client = Mock()
mock_resource_client.base_url = 'https://gateway.example.com/api/v1'
mock_resource_client.jwt_user_id = 'test-user'
mock_resource_client.jwt_expiration = '2024-12-31'
mock_resource_client.verify_https = True
mock_response = Mock()
mock_response.status_code = 401 # Simulate unauthenticated error
mock_resource_client.get_service_metadata.return_value = mock_response
mock_create_api_client.return_value = mock_resource_client
# Mock URL parsing (needed for the service token flow)
mock_parsed = Mock()
mock_parsed.scheme = 'https'
mock_parsed.netloc = 'gateway.example.com'
mock_urlparse.return_value = mock_parsed
mock_urlunparse.return_value = 'https://gateway.example.com/'
with patch.object(self.command, 'stdout', mock_stdout):
with pytest.raises(SystemExit) as exc_info:
self.command.handle(**self.options_svc_token_skip_all())
# Should exit with code 1 for connection failure
assert exc_info.value.code == 1
# Verify error message is displayed
output = mock_stdout.getvalue()
self.assertIn(
'Gateway Service Token is unable to connect to Gateway via the base URL https://gateway.example.com/. Recieved HTTP response code 401', output
)
self.assertIn('Connection Validated: False', output)

View File

@@ -871,314 +871,6 @@ class TestJobCredentials(TestJobExecution):
assert f.read() == self.EXAMPLE_PRIVATE_KEY
assert safe_env['ANSIBLE_NET_PASSWORD'] == HIDDEN_PASSWORD
def test_terraform_cloud_credentials(self, job, private_data_dir, mock_me):
terraform = CredentialType.defaults['terraform']()
hcl_config = '''
backend "s3" {
bucket = "s3_sample_bucket"
key = "/tf_state/"
region = "us-east-1"
}
'''
credential = Credential(pk=1, credential_type=terraform, inputs={'configuration': hcl_config})
credential.inputs['configuration'] = encrypt_field(credential, 'configuration')
job.credentials.add(credential)
env = {}
safe_env = {}
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
local_path = to_host_path(env['TF_BACKEND_CONFIG_FILE'], private_data_dir)
config = open(local_path, 'r').read()
assert config == hcl_config
def test_terraform_gcs_backend_credentials(self, job, private_data_dir, mock_me):
terraform = CredentialType.defaults['terraform']()
hcl_config = '''
backend "gcs" {
bucket = "gce_storage"
}
'''
gce_backend_credentials = '''
{
"type": "service_account",
"project_id": "sample",
"private_key_id": "eeeeeeeeeeeeeeeeeeeeeeeeeee",
"private_key": "-----BEGIN PRIVATE KEY-----\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n-----END PRIVATE KEY-----\n",
"client_email": "sample@sample.iam.gserviceaccount.com",
"client_id": "0123456789",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/cloud-content-robot%40sample.iam.gserviceaccount.com",
}
'''
credential = Credential(pk=1, credential_type=terraform, inputs={'configuration': hcl_config, 'gce_credentials': gce_backend_credentials})
credential.inputs['configuration'] = encrypt_field(credential, 'configuration')
credential.inputs['gce_credentials'] = encrypt_field(credential, 'gce_credentials')
job.credentials.add(credential)
env = {}
safe_env = {}
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
local_path = to_host_path(env['TF_BACKEND_CONFIG_FILE'], private_data_dir)
config = open(local_path, 'r').read()
assert config == hcl_config
credentials_path = to_host_path(env['GOOGLE_BACKEND_CREDENTIALS'], private_data_dir)
credentials = open(credentials_path, 'r').read()
assert credentials == gce_backend_credentials
def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir, mock_me):
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
with pytest.raises(jinja2.exceptions.UndefinedError):
credential.credential_type.inject_credential(credential, {}, {}, [], private_data_dir)
def test_custom_environment_injectors(self, private_data_dir, mock_me):
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
env = {}
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
assert env['MY_CLOUD_API_TOKEN'] == 'ABC123'
def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir, mock_me):
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
injectors={'env': {'TURBO_BUTTON': '{{turbo_button}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
env = {}
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
assert env['TURBO_BUTTON'] == str(True)
def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job, mock_me):
task = jobs.RunJob()
task.instance = job
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
injectors={'env': {'JOB_ID': 'reserved'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
job.credentials.add(credential)
env = task.build_env(job, private_data_dir)
assert env['JOB_ID'] == str(job.pk)
def test_custom_environment_injectors_with_secret_field(self, private_data_dir, mock_me):
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
injectors={'env': {'MY_CLOUD_PRIVATE_VAR': '{{password}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'})
credential.inputs['password'] = encrypt_field(credential, 'password')
env = {}
safe_env = {}
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['MY_CLOUD_PRIVATE_VAR'] == 'SUPER-SECRET-123'
assert 'SUPER-SECRET-123' not in safe_env.values()
assert safe_env['MY_CLOUD_PRIVATE_VAR'] == HIDDEN_PASSWORD
def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job, mock_me):
task = jobs.RunJob()
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
injectors={'extra_vars': {'api_token': '{{api_token}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["api_token"] == "ABC123"
assert hasattr(extra_vars["api_token"], '__UNSAFE__')
def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir, mock_me):
task = jobs.RunJob()
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
injectors={'extra_vars': {'turbo_button': '{{turbo_button}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["turbo_button"] == "True"
def test_custom_environment_injectors_with_nested_extra_vars(self, private_data_dir, job, mock_me):
task = jobs.RunJob()
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'host', 'label': 'Host', 'type': 'string'}]},
injectors={'extra_vars': {'auth': {'host': '{{host}}'}}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'host': 'example.com'})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["auth"]["host"] == "example.com"
def test_custom_environment_injectors_with_templated_extra_vars_key(self, private_data_dir, job, mock_me):
task = jobs.RunJob()
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'environment', 'label': 'Environment', 'type': 'string'}, {'id': 'host', 'label': 'Host', 'type': 'string'}]},
injectors={'extra_vars': {'{{environment}}_auth': {'host': '{{host}}'}}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'environment': 'test', 'host': 'example.com'})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["test_auth"]["host"] == "example.com"
def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir, mock_me):
task = jobs.RunJob()
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
injectors={'extra_vars': {'turbo_button': '{% if turbo_button %}FAST!{% else %}SLOW!{% endif %}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["turbo_button"] == "FAST!"
def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir, mock_me):
"""
extra_vars that contain secret field values should be censored in the DB
"""
task = jobs.RunJob()
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
injectors={'extra_vars': {'password': '{{password}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'})
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["password"] == "SUPER-SECRET-123"
def test_custom_environment_injectors_with_file(self, private_data_dir, mock_me):
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
injectors={'file': {'template': '[mycloud]\n{{api_token}}'}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
env = {}
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir)
assert open(path, 'r').read() == '[mycloud]\nABC123'
def test_custom_environment_injectors_with_unicode_content(self, private_data_dir, mock_me):
value = 'Iñtërnâtiônàlizætiøn'
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': []},
injectors={'file': {'template': value}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
)
credential = Credential(
pk=1,
credential_type=some_cloud,
)
env = {}
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir)
assert open(path, 'r').read() == value
def test_custom_environment_injectors_with_files(self, private_data_dir, mock_me):
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed=False,
inputs={'fields': [{'id': 'cert', 'label': 'Certificate', 'type': 'string'}, {'id': 'key', 'label': 'Key', 'type': 'string'}]},
injectors={
'file': {'template.cert': '[mycert]\n{{cert}}', 'template.key': '[mykey]\n{{key}}'},
'env': {'MY_CERT_INI_FILE': '{{tower.filename.cert}}', 'MY_KEY_INI_FILE': '{{tower.filename.key}}'},
},
)
credential = Credential(pk=1, credential_type=some_cloud, inputs={'cert': 'CERT123', 'key': 'KEY123'})
env = {}
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
cert_path = to_host_path(env['MY_CERT_INI_FILE'], private_data_dir)
key_path = to_host_path(env['MY_KEY_INI_FILE'], private_data_dir)
assert open(cert_path, 'r').read() == '[mycert]\nCERT123'
assert open(key_path, 'r').read() == '[mykey]\nKEY123'
def test_multi_cloud(self, private_data_dir, mock_me):
gce = CredentialType.defaults['gce']()
gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,124 +0,0 @@
"""
Unit tests for GitHub authenticator migrator functionality.
"""
from unittest.mock import Mock, patch
from awx.sso.utils.github_migrator import GitHubMigrator
class TestGitHubMigrator:
"""Tests for GitHubMigrator class."""
def setup_method(self):
"""Set up test fixtures."""
self.gateway_client = Mock()
self.command = Mock()
self.migrator = GitHubMigrator(self.gateway_client, self.command)
def test_create_gateway_authenticator_returns_boolean_causes_crash(self):
"""
Test that verifies create_gateway_authenticator returns proper dictionary
structure instead of boolean when credentials are missing.
This test verifies the fix for the bug.
"""
# Mock the get_controller_config to return a GitHub config with missing credentials
github_config_missing_creds = {
'category': 'github',
'settings': {'SOCIAL_AUTH_GITHUB_KEY': '', 'SOCIAL_AUTH_GITHUB_SECRET': 'test-secret'}, # Missing key
'org_mappers': [],
'team_mappers': [],
'login_redirect_override': None,
}
with patch.object(self.migrator, 'get_controller_config', return_value=[github_config_missing_creds]):
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
# This should NOT crash now that the bug is fixed
result = self.migrator.migrate()
# Verify the migration ran successfully without crashing
assert 'created' in result
assert 'failed' in result
# Should have failed=1 since the config has success=False (missing credentials)
assert result['failed'] == 1
def test_create_gateway_authenticator_returns_boolean_with_unknown_category(self):
"""
Test that verifies create_gateway_authenticator returns proper dictionary
structure instead of boolean when category is unknown.
This test verifies the fix for the bug.
"""
# Mock the get_controller_config to return a GitHub config with unknown category
github_config_unknown_category = {
'category': 'unknown-category',
'settings': {'SOCIAL_AUTH_UNKNOWN_KEY': 'test-key', 'SOCIAL_AUTH_UNKNOWN_SECRET': 'test-secret'},
'org_mappers': [],
'team_mappers': [],
'login_redirect_override': None,
}
with patch.object(self.migrator, 'get_controller_config', return_value=[github_config_unknown_category]):
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
# This should NOT crash now that the bug is fixed
result = self.migrator.migrate()
# Verify the migration ran successfully without crashing
assert 'created' in result
assert 'failed' in result
# Should have failed=1 since the config has success=False (unknown category)
assert result['failed'] == 1
def test_create_gateway_authenticator_direct_boolean_return_missing_creds(self):
"""
Test that directly calls create_gateway_authenticator and verifies it returns
proper dictionary structure instead of boolean for missing credentials.
"""
# Config with missing key (empty string)
config_missing_key = {
'category': 'github',
'settings': {'SOCIAL_AUTH_GITHUB_KEY': '', 'SOCIAL_AUTH_GITHUB_SECRET': 'test-secret'}, # Missing key
'org_mappers': [],
'team_mappers': [],
'login_redirect_override': None,
}
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
result = self.migrator.create_gateway_authenticator(config_missing_key)
# Now the method should return a proper dictionary structure
assert isinstance(result, dict), f"Expected dict, got {type(result)} with value: {result}"
assert 'success' in result, f"Expected 'success' key in result: {result}"
assert 'action' in result, f"Expected 'action' key in result: {result}"
assert 'error' in result, f"Expected 'error' key in result: {result}"
# Verify the expected values
assert result['success'] is False
assert result['action'] == 'skipped'
assert 'Missing OAuth2 credentials' in result['error']
def test_create_gateway_authenticator_direct_boolean_return_unknown_category(self):
"""
Test that directly calls create_gateway_authenticator and verifies it returns
proper dictionary structure instead of boolean for unknown category.
"""
# Config with unknown category
config_unknown_category = {
'category': 'unknown-category',
'settings': {'SOCIAL_AUTH_UNKNOWN_KEY': 'test-key', 'SOCIAL_AUTH_UNKNOWN_SECRET': 'test-secret'},
'org_mappers': [],
'team_mappers': [],
'login_redirect_override': None,
}
with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise
result = self.migrator.create_gateway_authenticator(config_unknown_category)
# Now the method should return a proper dictionary structure
assert isinstance(result, dict), f"Expected dict, got {type(result)} with value: {result}"
assert 'success' in result, f"Expected 'success' key in result: {result}"
assert 'action' in result, f"Expected 'action' key in result: {result}"
assert 'error' in result, f"Expected 'error' key in result: {result}"
# Verify the expected values
assert result['success'] is False
assert result['action'] == 'skipped'
assert 'Unknown category unknown-category' in result['error']

File diff suppressed because it is too large Load Diff

View File

@@ -1,614 +0,0 @@
"""
Unit tests for role mapping utilities.
"""
import pytest
from awx.main.utils.gateway_mapping import role_map_to_gateway_format
from awx.sso.utils.ldap_migrator import LDAPMigrator
def get_role_mappers(role_map, start_order=1):
"""Helper function to get just the mappers from role_map_to_gateway_format."""
result, _ = role_map_to_gateway_format(role_map, start_order)
return result
def ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=1):
"""Helper function to test LDAP group allow mapping via LDAPMigrator."""
migrator = LDAPMigrator()
return migrator._ldap_group_allow_to_gateway_format(result, ldap_group, deny, start_order)
class TestRoleMapToGatewayFormat:
"""Tests for role_map_to_gateway_format function."""
def test_none_input(self):
"""Test that None input returns empty list."""
result, next_order = role_map_to_gateway_format(None)
assert result == []
assert next_order == 1 # Default start_order
def test_empty_dict(self):
"""Test that empty dict returns empty list."""
result, next_order = role_map_to_gateway_format({})
assert result == []
assert next_order == 1
def test_is_superuser_single_group(self):
"""Test is_superuser with single group."""
role_map = {"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"}
result, _ = role_map_to_gateway_format(role_map)
expected = [
{
"name": "is_superuser - role",
"authenticator": -1,
"revoke": True,
"map_type": "is_superuser",
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"],
}
},
"order": 1,
}
]
assert result == expected
def test_is_superuser_multiple_groups(self):
"""Test is_superuser with multiple groups."""
role_map = {"is_superuser": ["cn=super_users,dc=example,dc=com", "cn=admins,dc=example,dc=com"]}
result, _ = role_map_to_gateway_format(role_map)
expected = [
{
"name": "is_superuser - role",
"authenticator": -1,
"revoke": True,
"map_type": "is_superuser",
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": ["cn=super_users,dc=example,dc=com", "cn=admins,dc=example,dc=com"],
}
},
"order": 1,
}
]
assert result == expected
def test_is_system_auditor_single_group(self):
"""Test is_system_auditor with single group."""
role_map = {"is_system_auditor": "cn=auditors,dc=example,dc=com"}
result, _ = role_map_to_gateway_format(role_map)
expected = [
{
"name": "is_system_auditor - role",
"authenticator": -1,
"revoke": True,
"map_type": "role",
"role": "Platform Auditor",
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": ["cn=auditors,dc=example,dc=com"],
}
},
"order": 1,
}
]
assert result == expected
def test_is_system_auditor_multiple_groups(self):
"""Test is_system_auditor with multiple groups."""
role_map = {"is_system_auditor": ["cn=auditors,dc=example,dc=com", "cn=viewers,dc=example,dc=com"]}
result, _ = role_map_to_gateway_format(role_map)
expected = [
{
"name": "is_system_auditor - role",
"authenticator": -1,
"revoke": True,
"map_type": "role",
"role": "Platform Auditor",
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": ["cn=auditors,dc=example,dc=com", "cn=viewers,dc=example,dc=com"],
}
},
"order": 1,
}
]
assert result == expected
def test_multiple_roles(self):
"""Test multiple role mappings."""
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"}
result, _ = role_map_to_gateway_format(role_map)
expected = [
{
"name": "is_superuser - role",
"authenticator": -1,
"revoke": True,
"map_type": "is_superuser",
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": ["cn=super_users,dc=example,dc=com"],
}
},
"order": 1,
},
{
"name": "is_system_auditor - role",
"authenticator": -1,
"revoke": True,
"map_type": "role",
"role": "Platform Auditor",
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": ["cn=auditors,dc=example,dc=com"],
}
},
"order": 2,
},
]
assert result == expected
def test_unsupported_role_flag(self):
"""Test that unsupported role flags are ignored."""
role_map = {
"is_superuser": "cn=super_users,dc=example,dc=com",
"is_staff": "cn=staff,dc=example,dc=com", # Unsupported flag
"is_system_auditor": "cn=auditors,dc=example,dc=com",
}
result, _ = role_map_to_gateway_format(role_map)
# Should only have 2 mappers (is_superuser and is_system_auditor)
assert len(result) == 2
assert result[0]["map_type"] == "is_superuser"
assert result[1]["map_type"] == "role"
assert result[1]["role"] == "Platform Auditor"
def test_order_increments_correctly(self):
"""Test that order values increment correctly."""
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"}
result, _ = role_map_to_gateway_format(role_map)
assert len(result) == 2
assert result[0]["order"] == 1
assert result[1]["order"] == 2
def test_start_order_parameter(self):
"""Test that start_order parameter is respected."""
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com"}
result, next_order = role_map_to_gateway_format(role_map, start_order=5)
assert result[0]["order"] == 5
assert next_order == 6
def test_string_to_list_conversion(self):
"""Test that string groups are converted to lists."""
role_map = {"is_superuser": "single-group"}
result, _ = role_map_to_gateway_format(role_map)
# Should convert string to list for has_or
assert result[0]["triggers"]["groups"]["has_or"] == ["single-group"]
def test_triggers_format_validation(self):
"""Test that trigger formats match Gateway specification."""
role_map = {"is_superuser": ["group1", "group2"]}
result, _ = role_map_to_gateway_format(role_map)
# Validate that triggers follow Gateway format
triggers = result[0]["triggers"]
assert "groups" in triggers
assert "has_or" in triggers["groups"]
assert isinstance(triggers["groups"]["has_or"], list)
assert triggers["groups"]["has_or"] == ["group1", "group2"]
def test_ldap_dn_format(self):
"""Test with realistic LDAP DN format."""
role_map = {
"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com",
"is_system_auditor": "cn=awx_auditors,OU=administration groups,DC=contoso,DC=com",
}
result, _ = role_map_to_gateway_format(role_map)
assert len(result) == 2
assert result[0]["triggers"]["groups"]["has_or"] == ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"]
assert result[1]["triggers"]["groups"]["has_or"] == ["cn=awx_auditors,OU=administration groups,DC=contoso,DC=com"]
def test_gateway_format_compliance(self):
"""Test that all results comply with Gateway role mapping format."""
role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"}
result, _ = role_map_to_gateway_format(role_map)
for mapping in result:
# Required fields per Gateway spec
assert "name" in mapping
assert "authenticator" in mapping
assert "map_type" in mapping
assert "organization" in mapping
assert "team" in mapping
assert "triggers" in mapping
assert "order" in mapping
assert "revoke" in mapping
# Field types
assert isinstance(mapping["name"], str)
assert isinstance(mapping["authenticator"], int)
assert mapping["map_type"] in ["is_superuser", "role"]
assert mapping["organization"] is None
assert mapping["team"] is None
assert isinstance(mapping["triggers"], dict)
assert isinstance(mapping["order"], int)
assert isinstance(mapping["revoke"], bool)
# Specific field validations based on map_type
if mapping["map_type"] == "is_superuser":
assert "role" not in mapping
elif mapping["map_type"] == "role":
assert "role" in mapping
assert isinstance(mapping["role"], str)
assert mapping["role"] == "Platform Auditor"
# Parametrized tests for role mappings
@pytest.mark.parametrize(
"role_map,expected_length",
[
(None, 0),
({}, 0),
({"is_superuser": "group1"}, 1),
({"is_system_auditor": "group1"}, 1),
({"is_superuser": "group1", "is_system_auditor": "group2"}, 2),
({"is_staff": "group1"}, 0), # Unsupported flag
({"is_superuser": "group1", "is_staff": "group2", "is_system_auditor": "group3"}, 2), # Mixed supported/unsupported
],
)
def test_role_map_result_lengths(role_map, expected_length):
"""Test that role_map_to_gateway_format returns expected number of mappings."""
result, _ = role_map_to_gateway_format(role_map)
assert len(result) == expected_length
# Edge case tests
def test_empty_groups_handling():
"""Test handling of empty group lists."""
role_map = {"is_superuser": []}
result, _ = role_map_to_gateway_format(role_map)
assert len(result) == 1
assert result[0]["triggers"]["groups"]["has_or"] == []
def test_mixed_group_types():
"""Test handling of mixed group types (string and list)."""
role_map = {"is_superuser": "single-group", "is_system_auditor": ["group1", "group2"]}
result, _ = role_map_to_gateway_format(role_map)
assert len(result) == 2
assert result[0]["triggers"]["groups"]["has_or"] == ["single-group"]
assert result[1]["triggers"]["groups"]["has_or"] == ["group1", "group2"]
def test_realistic_ldap_user_flags_by_group():
"""Test with realistic LDAP USER_FLAGS_BY_GROUP data."""
role_map = {"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"}
result, _ = role_map_to_gateway_format(role_map)
# This is exactly the use case from the user's example
assert len(result) == 1
assert result[0]["map_type"] == "is_superuser"
assert result[0]["triggers"]["groups"]["has_or"] == ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"]
assert result[0]["revoke"] is True
assert result[0]["team"] is None
assert result[0]["organization"] is None
class TestLdapGroupAllowToGatewayFormat:
"""Tests for ldap_group_allow_to_gateway_format function."""
def test_none_input_with_empty_result(self):
"""Test that None input with empty result returns unchanged result."""
result = []
output_result, next_order = ldap_group_allow_to_gateway_format(result, None, deny=False)
assert output_result == []
assert next_order == 1 # Default start_order
def test_none_input_with_existing_result(self):
"""Test that None input with existing mappers returns unchanged result."""
result = [{"existing": "mapper"}]
output_result, next_order = ldap_group_allow_to_gateway_format(result, None, deny=False, start_order=5)
assert output_result == [{"existing": "mapper"}]
assert next_order == 5 # start_order unchanged
def test_require_group_mapping(self):
"""Test LDAP REQUIRE_GROUP mapping (deny=False)."""
result = []
ldap_group = "cn=allowed_users,dc=example,dc=com"
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=1)
expected = [
{
"name": "LDAP-RequireGroup",
"authenticator": -1,
"map_type": "allow",
"revoke": False,
"triggers": {"groups": {"has_and": ["cn=allowed_users,dc=example,dc=com"]}},
"order": 1,
}
]
assert output_result == expected
assert next_order == 2
def test_deny_group_mapping(self):
"""Test LDAP DENY_GROUP mapping (deny=True)."""
result = []
ldap_group = "cn=blocked_users,dc=example,dc=com"
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=True, start_order=1)
expected = [
{
"name": "LDAP-DenyGroup",
"authenticator": -1,
"map_type": "allow",
"revoke": True,
"triggers": {"groups": {"has_or": ["cn=blocked_users,dc=example,dc=com"]}},
"order": 1,
}
]
assert output_result == expected
assert next_order == 2
def test_appending_to_existing_result(self):
"""Test appending to existing result list."""
existing_mapper = {
"name": "existing-mapper",
"authenticator": -1,
"map_type": "role",
"order": 1,
}
result = [existing_mapper]
ldap_group = "cn=new_group,dc=example,dc=com"
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=2)
assert len(output_result) == 2
assert output_result[0] == existing_mapper # Original mapper unchanged
assert output_result[1]["name"] == "LDAP-RequireGroup"
assert output_result[1]["order"] == 2
assert next_order == 3
def test_custom_start_order(self):
"""Test that custom start_order is respected."""
result = []
ldap_group = "cn=test_group,dc=example,dc=com"
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=10)
assert output_result[0]["order"] == 10
assert next_order == 11
def test_require_vs_deny_trigger_differences(self):
"""Test the difference between require and deny group triggers."""
ldap_group = "cn=test_group,dc=example,dc=com"
# Test require group (deny=False)
require_result, _ = ldap_group_allow_to_gateway_format([], ldap_group, deny=False)
# Test deny group (deny=True)
deny_result, _ = ldap_group_allow_to_gateway_format([], ldap_group, deny=True)
# Require group should use has_and
assert require_result[0]["triggers"]["groups"]["has_and"] == ["cn=test_group,dc=example,dc=com"]
assert require_result[0]["revoke"] is False
assert require_result[0]["name"] == "LDAP-RequireGroup"
# Deny group should use has_or
assert deny_result[0]["triggers"]["groups"]["has_or"] == ["cn=test_group,dc=example,dc=com"]
assert deny_result[0]["revoke"] is True
assert deny_result[0]["name"] == "LDAP-DenyGroup"
def test_realistic_ldap_dn_format(self):
"""Test with realistic LDAP DN format."""
result = []
# Test with require group
require_group = "cn=awx_users,OU=application groups,DC=contoso,DC=com"
output_result, next_order = ldap_group_allow_to_gateway_format(result, require_group, deny=False, start_order=1)
assert len(output_result) == 1
assert output_result[0]["triggers"]["groups"]["has_and"] == ["cn=awx_users,OU=application groups,DC=contoso,DC=com"]
assert output_result[0]["name"] == "LDAP-RequireGroup"
assert next_order == 2
def test_multiple_sequential_calls(self):
"""Test multiple sequential calls to build complex allow mappers."""
result = []
# Add deny group first
result, next_order = ldap_group_allow_to_gateway_format(result, "cn=blocked,dc=example,dc=com", deny=True, start_order=1)
# Add require group second
result, next_order = ldap_group_allow_to_gateway_format(result, "cn=allowed,dc=example,dc=com", deny=False, start_order=next_order)
assert len(result) == 2
# First mapper should be deny group
assert result[0]["name"] == "LDAP-DenyGroup"
assert result[0]["revoke"] is True
assert result[0]["triggers"]["groups"]["has_or"] == ["cn=blocked,dc=example,dc=com"]
assert result[0]["order"] == 1
# Second mapper should be require group
assert result[1]["name"] == "LDAP-RequireGroup"
assert result[1]["revoke"] is False
assert result[1]["triggers"]["groups"]["has_and"] == ["cn=allowed,dc=example,dc=com"]
assert result[1]["order"] == 2
assert next_order == 3
def test_gateway_format_compliance(self):
"""Test that all results comply with Gateway allow mapping format."""
result = []
# Test both deny and require groups
result, _ = ldap_group_allow_to_gateway_format(result, "cn=denied,dc=example,dc=com", deny=True, start_order=1)
result, _ = ldap_group_allow_to_gateway_format(result, "cn=required,dc=example,dc=com", deny=False, start_order=2)
for mapping in result:
# Required fields per Gateway spec
assert "name" in mapping
assert "authenticator" in mapping
assert "map_type" in mapping
assert "triggers" in mapping
assert "order" in mapping
assert "revoke" in mapping
# Field types
assert isinstance(mapping["name"], str)
assert isinstance(mapping["authenticator"], int)
assert mapping["map_type"] == "allow"
assert isinstance(mapping["triggers"], dict)
assert isinstance(mapping["order"], int)
assert isinstance(mapping["revoke"], bool)
# Trigger format validation
assert "groups" in mapping["triggers"]
groups_trigger = mapping["triggers"]["groups"]
# Should have either has_and or has_or, but not both
has_and = "has_and" in groups_trigger
has_or = "has_or" in groups_trigger
assert has_and != has_or # XOR - exactly one should be true
if has_and:
assert isinstance(groups_trigger["has_and"], list)
assert len(groups_trigger["has_and"]) == 1
if has_or:
assert isinstance(groups_trigger["has_or"], list)
assert len(groups_trigger["has_or"]) == 1
def test_original_result_not_modified_when_none(self):
"""Test that original result list is not modified when ldap_group is None."""
original_result = [{"original": "mapper"}]
result_copy = original_result.copy()
output_result, _ = ldap_group_allow_to_gateway_format(original_result, None, deny=False)
# Original list should be unchanged
assert original_result == result_copy
# Output should be the same reference
assert output_result is original_result
def test_empty_string_group(self):
"""Test handling of empty string group."""
result = []
output_result, next_order = ldap_group_allow_to_gateway_format(result, "", deny=False, start_order=1)
# Should still create a mapper even with empty string
assert len(output_result) == 1
assert output_result[0]["triggers"]["groups"]["has_and"] == [""]
assert next_order == 2
# Parametrized tests for ldap_group_allow_to_gateway_format
@pytest.mark.parametrize(
"ldap_group,deny,expected_name,expected_revoke,expected_trigger_type",
[
("cn=test,dc=example,dc=com", True, "LDAP-DenyGroup", True, "has_or"),
("cn=test,dc=example,dc=com", False, "LDAP-RequireGroup", False, "has_and"),
("cn=users,ou=groups,dc=company,dc=com", True, "LDAP-DenyGroup", True, "has_or"),
("cn=users,ou=groups,dc=company,dc=com", False, "LDAP-RequireGroup", False, "has_and"),
],
)
def test_ldap_group_parametrized(ldap_group, deny, expected_name, expected_revoke, expected_trigger_type):
"""Parametrized test for various LDAP group configurations."""
result = []
output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=deny, start_order=1)
assert len(output_result) == 1
mapper = output_result[0]
assert mapper["name"] == expected_name
assert mapper["revoke"] == expected_revoke
assert expected_trigger_type in mapper["triggers"]["groups"]
assert mapper["triggers"]["groups"][expected_trigger_type] == [ldap_group]
assert next_order == 2
def test_realistic_awx_ldap_migration_scenario():
"""Test realistic scenario from AWX LDAP migration."""
result = []
# Simulate AWX LDAP configuration with both REQUIRE_GROUP and DENY_GROUP
deny_group = "cn=blocked_users,OU=blocked groups,DC=contoso,DC=com"
require_group = "cn=awx_users,OU=application groups,DC=contoso,DC=com"
# Add deny group first (as in the migrator)
result, next_order = ldap_group_allow_to_gateway_format(result, deny_group, deny=True, start_order=1)
# Add require group second
result, next_order = ldap_group_allow_to_gateway_format(result, require_group, deny=False, start_order=next_order)
# Should have 2 allow mappers
assert len(result) == 2
# Verify deny group mapper
deny_mapper = result[0]
assert deny_mapper["name"] == "LDAP-DenyGroup"
assert deny_mapper["map_type"] == "allow"
assert deny_mapper["revoke"] is True
assert deny_mapper["triggers"]["groups"]["has_or"] == [deny_group]
assert deny_mapper["order"] == 1
# Verify require group mapper
require_mapper = result[1]
assert require_mapper["name"] == "LDAP-RequireGroup"
assert require_mapper["map_type"] == "allow"
assert require_mapper["revoke"] is False
assert require_mapper["triggers"]["groups"]["has_and"] == [require_group]
assert require_mapper["order"] == 2
assert next_order == 3

View File

@@ -1,511 +0,0 @@
"""
Gateway API client for AAP Gateway interactions.
This module provides a client class to interact with the AAP Gateway REST API,
specifically for creating authenticators and mapping configurations.
"""
import requests
import logging
from typing import Dict, List, Optional, Any
from urllib.parse import urljoin
logger = logging.getLogger(__name__)
class GatewayAPIError(Exception):
"""Exception raised for Gateway API errors."""
def __init__(self, message: str, status_code: Optional[int] = None, response_data: Optional[Dict] = None):
self.message = message
self.status_code = status_code
self.response_data = response_data
super().__init__(self.message)
class GatewayClient:
"""Client for AAP Gateway REST API interactions."""
def __init__(self, base_url: str, username: str, password: str, skip_verify: bool = False, skip_session_init: bool = False, command=None):
"""Initialize Gateway client.
Args:
base_url: Base URL of the AAP Gateway instance
username: Username for authentication
password: Password for authentication
skip_verify: Skip SSL certificate verification
skip_session_init: Skip initializing the session. Only set to True if you are using a base class that doesn't need the initialization of the session.
command: The command object. This is used to write output to the console.
"""
self.base_url = base_url.rstrip('/')
self.username = username
self.password = password
self.skip_verify = skip_verify
self.command = command
self.session_was_not_initialized = skip_session_init
# Initialize session
if not skip_session_init:
self.session = requests.Session()
# Configure SSL verification
if skip_verify:
self.session.verify = False
# Disable SSL warnings when verification is disabled
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Set default headers
self.session.headers.update(
{
'User-Agent': 'AWX-Gateway-Migration-Client/1.0',
'Accept': 'application/json',
'Content-Type': 'application/json',
}
)
else:
self.session = None
# Authentication state
self._authenticated = False
def authenticate(self) -> bool:
"""Authenticate with the Gateway using HTTP Basic Authentication.
Returns:
bool: True if authentication successful, False otherwise
Raises:
GatewayAPIError: If authentication fails
"""
try:
# Set up HTTP Basic Authentication
from requests.auth import HTTPBasicAuth
self.session.auth = HTTPBasicAuth(self.username, self.password)
# Test authentication by making a simple request to the API
test_url = urljoin(self.base_url, '/api/gateway/v1/authenticators/')
response = self.session.get(test_url)
if response.status_code in [200, 401]: # 401 means auth is working but might need permissions
self._authenticated = True
logger.info("Successfully authenticated with Gateway using Basic Auth")
return True
else:
error_msg = f"Authentication test failed with status {response.status_code}"
try:
error_data = response.json()
error_msg += f": {error_data}"
except requests.exceptions.JSONDecodeError:
error_msg += f": {response.text}"
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
except requests.RequestException as e:
raise GatewayAPIError(f"Network error during authentication: {str(e)}")
def _ensure_authenticated(self):
"""Ensure the client is authenticated, authenticate if needed."""
if not self._authenticated:
self.authenticate()
def _make_request(self, method: str, endpoint: str, data: Optional[Dict] = None, params: Optional[Dict] = None) -> requests.Response:
"""Make an authenticated request to the Gateway API.
Args:
method: HTTP method (GET, POST, PUT, DELETE, etc.)
endpoint: API endpoint (without base URL)
data: JSON data to send in request body
params: Query parameters
Returns:
requests.Response: The response object
Raises:
GatewayAPIError: If request fails
"""
self._ensure_authenticated()
url = urljoin(self.base_url, endpoint.lstrip('/'))
try:
response = self.session.request(method=method.upper(), url=url, json=data, params=params)
# Log request details
logger.debug(f"{method.upper()} {url} - Status: {response.status_code}")
return response
except requests.RequestException as e:
raise GatewayAPIError(f"Request failed: {str(e)}")
def create_authenticator(self, authenticator_config: Dict[str, Any]) -> Dict[str, Any]:
"""Create a new authenticator in Gateway.
Args:
authenticator_config: Authenticator configuration dictionary
Returns:
dict: Created authenticator data
Raises:
GatewayAPIError: If creation fails
"""
endpoint = '/api/gateway/v1/authenticators/'
try:
response = self._make_request('POST', endpoint, data=authenticator_config)
if response.status_code == 201:
result = response.json()
logger.info(f"Successfully created authenticator: {result.get('name', 'Unknown')}")
return result
else:
error_msg = f"Failed to create authenticator. Status: {response.status_code}"
try:
error_data = response.json()
error_msg += f", Error: {error_data}"
except requests.exceptions.JSONDecodeError:
error_msg += f", Response: {response.text}"
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to create authenticator: {str(e)}")
def update_authenticator(self, authenticator_id: int, authenticator_config: Dict[str, Any]) -> Dict[str, Any]:
"""Update an existing authenticator in Gateway.
Args:
authenticator_id: ID of the authenticator to update
authenticator_config: Authenticator configuration dictionary
Returns:
dict: Updated authenticator data
Raises:
GatewayAPIError: If update fails
"""
endpoint = f'/api/gateway/v1/authenticators/{authenticator_id}/'
try:
response = self._make_request('PATCH', endpoint, data=authenticator_config)
if response.status_code == 200:
result = response.json()
logger.info(f"Successfully updated authenticator: {result.get('name', 'Unknown')}")
return result
else:
error_msg = f"Failed to update authenticator. Status: {response.status_code}"
try:
error_data = response.json()
error_msg += f", Error: {error_data}"
except requests.exceptions.JSONDecodeError:
error_msg += f", Response: {response.text}"
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to update authenticator: {str(e)}")
def create_authenticator_map(self, authenticator_id: int, mapper_config: Dict[str, Any]) -> Dict[str, Any]:
"""Create a new authenticator map in Gateway.
Args:
authenticator_id: ID of the authenticator to create map for
mapper_config: Mapper configuration dictionary
Returns:
dict: Created mapper data
Raises:
GatewayAPIError: If creation fails
"""
endpoint = '/api/gateway/v1/authenticator_maps/'
try:
response = self._make_request('POST', endpoint, data=mapper_config)
if response.status_code == 201:
result = response.json()
logger.info(f"Successfully created authenticator map: {result.get('name', 'Unknown')}")
return result
else:
error_msg = f"Failed to create authenticator map. Status: {response.status_code}"
try:
error_data = response.json()
error_msg += f", Error: {error_data}"
except requests.exceptions.JSONDecodeError:
error_msg += f", Response: {response.text}"
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to create authenticator map: {str(e)}")
def update_authenticator_map(self, mapper_id: int, mapper_config: Dict[str, Any]) -> Dict[str, Any]:
"""Update an existing authenticator map in Gateway.
Args:
mapper_id: ID of the authenticator map to update
mapper_config: Mapper configuration dictionary
Returns:
dict: Updated mapper data
Raises:
GatewayAPIError: If update fails
"""
endpoint = f'/api/gateway/v1/authenticator_maps/{mapper_id}/'
try:
response = self._make_request('PATCH', endpoint, data=mapper_config)
if response.status_code == 200:
result = response.json()
logger.info(f"Successfully updated authenticator map: {result.get('name', 'Unknown')}")
return result
else:
error_msg = f"Failed to update authenticator map. Status: {response.status_code}"
try:
error_data = response.json()
error_msg += f", Error: {error_data}"
except requests.exceptions.JSONDecodeError:
error_msg += f", Response: {response.text}"
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to update authenticator map: {str(e)}")
def get_authenticators(self, params: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""Get list of authenticators from Gateway.
Args:
params: Optional query parameters
Returns:
list: List of authenticator configurations
Raises:
GatewayAPIError: If request fails
"""
endpoint = '/api/gateway/v1/authenticators/'
try:
response = self._make_request('GET', endpoint, params=params)
if response.status_code == 200:
result = response.json()
# Handle paginated response
if isinstance(result, dict) and 'results' in result:
return result['results']
return result
else:
error_msg = f"Failed to get authenticators. Status: {response.status_code}"
raise GatewayAPIError(error_msg, response.status_code)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to get authenticators: {str(e)}")
def get_authenticator_by_slug(self, slug: str) -> Optional[Dict[str, Any]]:
"""Get a specific authenticator by slug.
Args:
slug: The authenticator slug to search for
Returns:
dict: The authenticator data if found, None otherwise
Raises:
GatewayAPIError: If request fails
"""
try:
# Use query parameter to filter by slug - more efficient than getting all
authenticators = self.get_authenticators(params={'slug': slug})
# Return the first match (slugs should be unique)
if authenticators:
return authenticators[0]
return None
except GatewayAPIError as e:
# Re-raise Gateway API errors
raise e
except Exception as e:
raise GatewayAPIError(f"Failed to get authenticator by slug: {str(e)}")
def get_authenticator_maps(self, authenticator_id: int) -> List[Dict[str, Any]]:
"""Get list of maps for a specific authenticator.
Args:
authenticator_id: ID of the authenticator
Returns:
list: List of authenticator maps
Raises:
GatewayAPIError: If request fails
"""
endpoint = f'/api/gateway/v1/authenticators/{authenticator_id}/authenticator_maps/'
try:
response = self._make_request('GET', endpoint)
if response.status_code == 200:
result = response.json()
# Handle paginated response
if isinstance(result, dict) and 'results' in result:
return result['results']
return result
else:
error_msg = f"Failed to get authenticator maps. Status: {response.status_code}"
raise GatewayAPIError(error_msg, response.status_code)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to get authenticator maps: {str(e)}")
def create_github_authenticator(
self, name: str, client_id: str, client_secret: str, enabled: bool = True, create_objects: bool = False, remove_users: bool = False
) -> Dict[str, Any]:
"""Create a GitHub authenticator with the specified configuration.
Args:
name: Name for the authenticator
client_id: GitHub OAuth App Client ID
client_secret: GitHub OAuth App Client Secret
enabled: Whether authenticator should be enabled
create_objects: Whether to create users/orgs/teams automatically
remove_users: Whether to remove users when they lose access
Returns:
dict: Created authenticator data
"""
config = {
"name": name,
"type": "ansible_base.authentication.authenticator_plugins.github",
"enabled": enabled,
"create_objects": create_objects,
"remove_users": remove_users,
"configuration": {"KEY": client_id, "SECRET": client_secret},
}
return self.create_authenticator(config)
def update_gateway_setting(self, setting_name: str, setting_value: Any) -> Dict[str, Any]:
"""Update a Gateway setting via the settings API.
Args:
setting_name: Name of the setting to update
setting_value: Value to set for the setting
Returns:
dict: Upon successful update, well formed responses are returned, otherwise the original payload is returned.
Raises:
GatewayAPIError: If update fails, anything other than a 200 or 204 response code.
"""
endpoint = '/api/gateway/v1/settings/all/'
# Create the JSON payload with the setting name and value
payload = {setting_name: setting_value}
try:
response = self._make_request('PUT', endpoint, data=payload)
if response.status_code in [200, 204]:
logger.info(f"Successfully updated Gateway setting: {setting_name}")
# Return the response data if available, otherwise return the payload
if response.content:
try:
return response.json()
except requests.exceptions.JSONDecodeError:
return payload
return payload
else:
error_msg = f"Failed to update Gateway setting. Status: {response.status_code}"
try:
error_data = response.json()
error_msg += f", Error: {error_data}"
except requests.exceptions.JSONDecodeError:
error_msg += f", Response: {response.text}"
raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to update Gateway setting: {str(e)}")
def get_gateway_setting(self, setting_name: str) -> Any:
"""Get a Gateway setting value via the settings API.
Args:
setting_name: Name of the setting to retrieve
Returns:
Any: The value of the setting, or None if not found
Raises:
GatewayAPIError: If request fails
"""
endpoint = '/api/gateway/v1/settings/all/'
try:
response = self._make_request('GET', endpoint)
if response.status_code == 200:
settings_data = response.json()
logger.info("Successfully retrieved Gateway settings")
# Return the specific setting value or None if not found
return settings_data.get(setting_name)
else:
error_msg = f"Failed to get Gateway settings from '{endpoint}' for '{setting_name}'. Status: {response.status_code}"
error_data = response.text
try:
error_data = response.json()
error_msg += f", Error: {error_data}"
except requests.exceptions.JSONDecodeError:
error_msg += f", Response: {response.text}"
raise GatewayAPIError(error_msg, response.status_code, error_data)
except requests.RequestException as e:
raise GatewayAPIError(f"Failed to get Gateway settings from '{endpoint}' for '{setting_name}'. Unexpected Exception - Error: {str(e)}")
def get_base_url(self) -> str:
"""Get the base URL of the Gateway instance.
Returns:
str: The base URL of the Gateway instance
"""
return self.base_url
def close(self):
"""Close the session and clean up resources."""
if self.session:
self.session.close()
def __enter__(self):
"""Context manager entry."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.close()
def _write_output(self, message, style=None):
"""Write output message if command is available."""
if self.command:
if style == 'success':
self.command.stdout.write(self.command.style.SUCCESS(message))
elif style == 'warning':
self.command.stdout.write(self.command.style.WARNING(message))
elif style == 'error':
self.command.stdout.write(self.command.style.ERROR(message))
else:
self.command.stdout.write(message)

View File

@@ -1,77 +0,0 @@
"""
Gateway API client for AAP Gateway interactions with Service Tokens.
This module provides a client class to interact with the AAP Gateway REST API,
specifically for creating authenticators and mapping configurations.
"""
import requests
import logging
from typing import Dict, Optional
from awx.main.utils.gateway_client import GatewayClient, GatewayAPIError
logger = logging.getLogger(__name__)
class GatewayClientSVCToken(GatewayClient):
"""Client for AAP Gateway REST API interactions."""
def __init__(self, resource_api_client=None, command=None):
"""Initialize Gateway client.
Args:
resource_api_client: Resource API Client for Gateway leveraging service tokens
"""
super().__init__(
base_url=resource_api_client.base_url,
username=resource_api_client.jwt_user_id,
password="required-in-GatewayClient-authenticate()-but-unused-by-GatewayClientSVCToken",
skip_verify=(not resource_api_client.verify_https),
skip_session_init=True,
command=command,
)
self.resource_api_client = resource_api_client
# Authentication state
self._authenticated = True
def authenticate(self) -> bool:
"""Overload the base class method to always return True.
Returns:
bool: True always
"""
return True
def _ensure_authenticated(self):
"""Refresh JWT service token"""
self.resource_api_client.refresh_jwt()
def _make_request(self, method: str, endpoint: str, data: Optional[Dict] = None, params: Optional[Dict] = None) -> requests.Response:
"""Make a service token authenticated request to the Gateway API.
Args:
method: HTTP method (GET, POST, PUT, DELETE, etc.)
endpoint: API endpoint (without base URL)
data: JSON data to send in request body
params: Query parameters
Returns:
requests.Response: The response object
Raises:
GatewayAPIError: If request fails
"""
self._ensure_authenticated()
try:
response = self.resource_api_client._make_request(method=method, path=endpoint, data=data, params=params)
# Log request details
logger.debug(f"{method.upper()} {self.base_url}{endpoint} - Status: {response.status_code}")
return response
except requests.RequestException as e:
raise GatewayAPIError(f"Request failed: {str(e)}")

View File

@@ -1,361 +0,0 @@
"""
Gateway mapping conversion utilities.
This module contains functions to convert AWX authentication mappings
(organization and team mappings) to AAP Gateway format.
"""
import re
from typing import cast, Any, Literal, Pattern, Union
email_regex = re.compile(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$")
def truncate_name(name: str, max_length: int = 128) -> str:
"""Truncate a name to the specified maximum length."""
if len(name) <= max_length:
return name
return name[:max_length]
def build_truncated_name(org_name: str, entity_name: str, trigger_name: str, max_component_length: int = 40) -> str:
"""Build a name by truncating each component individually and joining with ' - '."""
truncated_org = truncate_name(org_name, max_component_length)
truncated_entity = truncate_name(entity_name, max_component_length)
truncated_trigger = truncate_name(trigger_name, max_component_length)
return f"{truncated_org} - {truncated_entity} {truncated_trigger}"
def pattern_to_slash_format(pattern: Any) -> str:
"""Convert a re.Pattern object to /pattern/flags format."""
if not isinstance(pattern, re.Pattern):
return str(pattern)
flags_str = ""
if pattern.flags & re.IGNORECASE:
flags_str += "i"
if pattern.flags & re.MULTILINE:
flags_str += "m"
if pattern.flags & re.DOTALL:
flags_str += "s"
if pattern.flags & re.VERBOSE:
flags_str += "x"
return f"/{pattern.pattern}/{flags_str}"
def process_ldap_user_list(
groups: Union[None, str, bool, list[Union[None, str, bool]]],
) -> list[dict[str, Any]]:
if not isinstance(groups, list):
groups = [groups]
# Type cast to help mypy understand the type after conversion
groups_list: list[Union[str, bool, None]] = cast(list[Union[str, bool, None]], groups)
triggers = []
if groups_list == [None]:
# A None value means we shouldn't update whatever this is based on LDAP values
pass
elif groups_list == []:
# Empty list means no triggers should be created
pass
elif groups_list == [True]:
triggers.append({"name": "Always Allow", "trigger": {"always": {}}})
elif groups_list == [False]:
triggers.append(
{
"name": "Never Allow",
"trigger": {"never": {}},
}
)
else:
triggers.append({"name": "Match User Groups", "trigger": {"groups": {"has_or": groups_list}}})
return triggers
def process_sso_user_list(
users: Union[str, bool, Pattern[str], list[Union[str, bool, Pattern[str]]]], email_attr: str = 'email', username_attr: str = 'username'
) -> dict[str, Union[str, dict[str, dict[str, Union[str, list[str]]]]]]:
"""Process SSO user list and return a single consolidated trigger instead of multiple separate ones."""
if not isinstance(users, list):
users = [users]
# Type cast to help mypy understand the type after conversion
user_list: list[Union[str, bool, Pattern[str]]] = cast(list[Union[str, bool, Pattern[str]]], users)
if user_list == ["false"] or user_list == [False]:
return {"name": "Never Allow", "trigger": {"never": {}}}
elif user_list == ["true"] or user_list == [True]:
return {"name": "Always Allow", "trigger": {"always": {}}}
else:
# Group users by type
emails = []
usernames = []
regexes_username = []
regexes_email = []
for user_or_email in user_list:
if isinstance(user_or_email, re.Pattern):
pattern_str = pattern_to_slash_format(user_or_email)
regexes_username.append(pattern_str)
regexes_email.append(pattern_str)
elif isinstance(user_or_email, str):
if email_regex.match(user_or_email):
emails.append(user_or_email)
else:
usernames.append(user_or_email)
else:
# Convert other objects to string and treat as both
str_val = str(user_or_email)
usernames.append(str_val)
emails.append(str_val)
# Build consolidated trigger
attributes = {"join_condition": "or"}
if emails:
if len(emails) == 1:
attributes[email_attr] = {"equals": emails[0]}
else:
attributes[email_attr] = {"in": emails}
if usernames:
if len(usernames) == 1:
attributes[username_attr] = {"equals": usernames[0]}
else:
attributes[username_attr] = {"in": usernames}
# For regex patterns, we need to create separate matches conditions since there's no matches_or
for i, pattern in enumerate(regexes_username):
pattern_key = f"{username_attr}_pattern_{i}" if len(regexes_username) > 1 else username_attr
if pattern_key not in attributes:
attributes[pattern_key] = {}
attributes[pattern_key]["matches"] = pattern
for i, pattern in enumerate(regexes_email):
pattern_key = f"{email_attr}_pattern_{i}" if len(regexes_email) > 1 else email_attr
if pattern_key not in attributes:
attributes[pattern_key] = {}
attributes[pattern_key]["matches"] = pattern
# Create a deterministic, concise name based on trigger types and counts
name_parts = []
if emails:
name_parts.append(f"E:{len(emails)}")
if usernames:
name_parts.append(f"U:{len(usernames)}")
if regexes_username:
name_parts.append(f"UP:{len(regexes_username)}")
if regexes_email:
name_parts.append(f"EP:{len(regexes_email)}")
name = " ".join(name_parts) if name_parts else "Mixed Rules"
return {"name": name, "trigger": {"attributes": attributes}}
def team_map_to_gateway_format(team_map, start_order=1, email_attr: str = 'email', username_attr: str = 'username', auth_type: Literal['sso', 'ldap'] = 'sso'):
"""Convert AWX team mapping to Gateway authenticator format.
Args:
team_map: The SOCIAL_AUTH_*_TEAM_MAP setting value
start_order: Starting order value for the mappers
email_attr: The attribute representing the email
username_attr: The attribute representing the username
Returns:
tuple: (List of Gateway-compatible team mappers, next_order)
"""
if team_map is None:
return [], start_order
result = []
order = start_order
for team_name in team_map.keys():
team = team_map[team_name]
# TODO: Confirm that if we have None with remove we still won't remove
if team['users'] is None:
continue
# Get the organization name
organization_name = team.get('organization', 'Unknown')
# Check for remove flag
revoke = team.get('remove', False)
if auth_type == 'ldap':
triggers = process_ldap_user_list(team['users'])
for trigger in triggers:
result.append(
{
"name": build_truncated_name(organization_name, team_name, trigger['name']),
"map_type": "team",
"order": order,
"authenticator": -1, # Will be updated when creating the mapper
"triggers": trigger['trigger'],
"organization": organization_name,
"team": team_name,
"role": "Team Member", # Gateway team member role
"revoke": revoke,
}
)
order += 1
if auth_type == 'sso':
trigger = process_sso_user_list(team['users'], email_attr=email_attr, username_attr=username_attr)
result.append(
{
"name": build_truncated_name(organization_name, team_name, trigger['name']),
"map_type": "team",
"order": order,
"authenticator": -1, # Will be updated when creating the mapper
"triggers": trigger['trigger'],
"organization": organization_name,
"team": team_name,
"role": "Team Member", # Gateway team member role
"revoke": revoke,
}
)
order += 1
return result, order
def org_map_to_gateway_format(org_map, start_order=1, email_attr: str = 'email', username_attr: str = 'username', auth_type: Literal['sso', 'ldap'] = 'sso'):
"""Convert AWX organization mapping to Gateway authenticator format.
Args:
org_map: The SOCIAL_AUTH_*_ORGANIZATION_MAP setting value
start_order: Starting order value for the mappers
email_attr: The attribute representing the email
username_attr: The attribute representing the username
Returns:
tuple: (List of Gateway-compatible organization mappers, next_order)
"""
if org_map is None:
return [], start_order
result = []
order = start_order
for organization_name in org_map.keys():
organization = org_map[organization_name]
for user_type in ['admins', 'users']:
if organization.get(user_type, None) is None:
# TODO: Confirm that if we have None with remove we still won't remove
continue
# Get the permission type
permission_type = user_type.title()
# Map AWX admin/users to appropriate Gateway organization roles
role = "Organization Admin" if user_type == "admins" else "Organization Member"
# Check for remove flags
revoke = False
if organization.get(f"remove_{user_type}"):
revoke = True
if auth_type == 'ldap':
triggers = process_ldap_user_list(organization[user_type])
for trigger in triggers:
result.append(
{
"name": build_truncated_name(organization_name, permission_type, trigger['name']),
"map_type": "organization",
"order": order,
"authenticator": -1, # Will be updated when creating the mapper
"triggers": trigger['trigger'],
"organization": organization_name,
"team": None, # Organization-level mapping, not team-specific
"role": role,
"revoke": revoke,
}
)
order += 1
if auth_type == 'sso':
trigger = process_sso_user_list(organization[user_type], email_attr=email_attr, username_attr=username_attr)
result.append(
{
"name": build_truncated_name(organization_name, permission_type, trigger['name']),
"map_type": "organization",
"order": order,
"authenticator": -1, # Will be updated when creating the mapper
"triggers": trigger['trigger'],
"organization": organization_name,
"team": None, # Organization-level mapping, not team-specific
"role": role,
"revoke": revoke,
}
)
order += 1
return result, order
def role_map_to_gateway_format(role_map, start_order=1):
"""Convert AWX role mapping to Gateway authenticator format.
Args:
role_map: An LDAP or SAML role mapping
start_order: Starting order value for the mappers
Returns:
tuple: (List of Gateway-compatible organization mappers, next_order)
"""
if role_map is None:
return [], start_order
result = []
order = start_order
for flag in role_map:
groups = role_map[flag]
if type(groups) is str:
groups = [groups]
if flag == 'is_superuser':
# Gateway has a special map_type for superusers
result.append(
{
"name": f"{flag} - role",
"authenticator": -1,
"revoke": True,
"map_type": flag,
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": groups,
}
},
"order": order,
}
)
elif flag == 'is_system_auditor':
# roles other than superuser must be represented as a generic role mapper
result.append(
{
"name": f"{flag} - role",
"authenticator": -1,
"revoke": True,
"map_type": "role",
"role": "Platform Auditor",
"team": None,
"organization": None,
"triggers": {
"groups": {
"has_or": groups,
}
},
"order": order,
}
)
order += 1
return result, order

View File

@@ -73,4 +73,5 @@ AWX_DISABLE_TASK_MANAGERS = False
def set_dev_flags(settings):
defaults_flags = settings.get("FLAGS", {})
defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}]
defaults_flags['FEATURE_DISPATCHERD_ENABLED'] = [{'condition': 'boolean', 'value': True}]
return {'FLAGS': defaults_flags}

View File

@@ -23,8 +23,13 @@ ALLOWED_HOSTS = []
# only used for deprecated fields and management commands for them
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
# Switch to a writable location for the dispatcher sockfile location
DISPATCHERD_DEBUGGING_SOCKFILE = os.path.realpath('/var/run/tower/dispatcherd.sock')
# Very important that this is editable (not read_only) in the API
AWX_ISOLATION_SHOW_PATHS = [
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
'/usr/share/pki:/usr/share/pki:O',
]
del os

View File

@@ -1,469 +0,0 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
# Python
from collections import OrderedDict
import logging
import uuid
import ldap
# Django
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.conf import settings as django_settings
from django.core.signals import setting_changed
from django.utils.encoding import force_str
from django.http import HttpResponse
# django-auth-ldap
from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings
from django_auth_ldap.backend import LDAPBackend as BaseLDAPBackend
from django_auth_ldap.backend import populate_user
from django.core.exceptions import ImproperlyConfigured
# radiusauth
from radiusauth.backends import RADIUSBackend as BaseRADIUSBackend
# tacacs+ auth
import tacacs_plus
# social
from social_core.backends.saml import OID_USERID
from social_core.backends.saml import SAMLAuth as BaseSAMLAuth
from social_core.backends.saml import SAMLIdentityProvider as BaseSAMLIdentityProvider
# Ansible Tower
from awx.sso.models import UserEnterpriseAuth
from awx.sso.common import create_org_and_teams, reconcile_users_org_team_mappings
logger = logging.getLogger('awx.sso.backends')
class LDAPSettings(BaseLDAPSettings):
defaults = dict(list(BaseLDAPSettings.defaults.items()) + list({'ORGANIZATION_MAP': {}, 'TEAM_MAP': {}, 'GROUP_TYPE_PARAMS': {}}.items()))
def __init__(self, prefix='AUTH_LDAP_', defaults={}):
super(LDAPSettings, self).__init__(prefix, defaults)
# If a DB-backed setting is specified that wipes out the
# OPT_NETWORK_TIMEOUT, fall back to a sane default
if ldap.OPT_NETWORK_TIMEOUT not in getattr(self, 'CONNECTION_OPTIONS', {}):
options = getattr(self, 'CONNECTION_OPTIONS', {})
options[ldap.OPT_NETWORK_TIMEOUT] = 30
self.CONNECTION_OPTIONS = options
# when specifying `.set_option()` calls for TLS in python-ldap, the
# *order* in which you invoke them *matters*, particularly in Python3,
# where dictionary insertion order is persisted
#
# specifically, it is *critical* that `ldap.OPT_X_TLS_NEWCTX` be set *last*
# this manual sorting puts `OPT_X_TLS_NEWCTX` *after* other TLS-related
# options
#
# see: https://github.com/python-ldap/python-ldap/issues/55
newctx_option = self.CONNECTION_OPTIONS.pop(ldap.OPT_X_TLS_NEWCTX, None)
self.CONNECTION_OPTIONS = OrderedDict(self.CONNECTION_OPTIONS)
if newctx_option is not None:
self.CONNECTION_OPTIONS[ldap.OPT_X_TLS_NEWCTX] = newctx_option
class LDAPBackend(BaseLDAPBackend):
"""
Custom LDAP backend for AWX.
"""
settings_prefix = 'AUTH_LDAP_'
def __init__(self, *args, **kwargs):
self._dispatch_uid = uuid.uuid4()
super(LDAPBackend, self).__init__(*args, **kwargs)
setting_changed.connect(self._on_setting_changed, dispatch_uid=self._dispatch_uid)
def _on_setting_changed(self, sender, **kwargs):
# If any AUTH_LDAP_* setting changes, force settings to be reloaded for
# this backend instance.
if kwargs.get('setting', '').startswith(self.settings_prefix):
self._settings = None
def _get_settings(self):
if self._settings is None:
self._settings = LDAPSettings(self.settings_prefix)
return self._settings
def _set_settings(self, settings):
self._settings = settings
settings = property(_get_settings, _set_settings)
def authenticate(self, request, username, password):
if self.settings.START_TLS and ldap.OPT_X_TLS_REQUIRE_CERT in self.settings.CONNECTION_OPTIONS:
# with python-ldap, if you want to set connection-specific TLS
# parameters, you must also specify OPT_X_TLS_NEWCTX = 0
# see: https://stackoverflow.com/a/29722445
# see: https://stackoverflow.com/a/38136255
self.settings.CONNECTION_OPTIONS[ldap.OPT_X_TLS_NEWCTX] = 0
if not self.settings.SERVER_URI:
return None
try:
user = User.objects.get(username=username)
if user and (not user.profile or not user.profile.ldap_dn):
return None
except User.DoesNotExist:
pass
try:
for setting_name, type_ in [('GROUP_SEARCH', 'LDAPSearch'), ('GROUP_TYPE', 'LDAPGroupType')]:
if getattr(self.settings, setting_name) is None:
raise ImproperlyConfigured("{} must be an {} instance.".format(setting_name, type_))
ldap_user = super(LDAPBackend, self).authenticate(request, username, password)
# If we have an LDAP user and that user we found has an ldap_user internal object and that object has a bound connection
# Then we can try and force an unbind to close the sticky connection
if ldap_user and ldap_user.ldap_user and ldap_user.ldap_user._connection_bound:
logger.debug("Forcing LDAP connection to close")
try:
ldap_user.ldap_user._connection.unbind_s()
ldap_user.ldap_user._connection_bound = False
except Exception:
logger.exception(f"Got unexpected LDAP exception when forcing LDAP disconnect for user {ldap_user}, login will still proceed")
return ldap_user
except Exception:
logger.exception("Encountered an error authenticating to LDAP")
return None
def get_user(self, user_id):
if not self.settings.SERVER_URI:
return None
return super(LDAPBackend, self).get_user(user_id)
# Disable any LDAP based authorization / permissions checking.
def has_perm(self, user, perm, obj=None):
return False
def has_module_perms(self, user, app_label):
return False
def get_all_permissions(self, user, obj=None):
return set()
def get_group_permissions(self, user, obj=None):
return set()
class LDAPBackend1(LDAPBackend):
settings_prefix = 'AUTH_LDAP_1_'
class LDAPBackend2(LDAPBackend):
settings_prefix = 'AUTH_LDAP_2_'
class LDAPBackend3(LDAPBackend):
settings_prefix = 'AUTH_LDAP_3_'
class LDAPBackend4(LDAPBackend):
settings_prefix = 'AUTH_LDAP_4_'
class LDAPBackend5(LDAPBackend):
settings_prefix = 'AUTH_LDAP_5_'
def _decorate_enterprise_user(user, provider):
user.set_unusable_password()
user.save()
enterprise_auth, _ = UserEnterpriseAuth.objects.get_or_create(user=user, provider=provider)
return enterprise_auth
def _get_or_set_enterprise_user(username, password, provider):
created = False
try:
user = User.objects.prefetch_related('enterprise_auth').get(username=username)
except User.DoesNotExist:
user = User(username=username)
enterprise_auth = _decorate_enterprise_user(user, provider)
logger.debug("Created enterprise user %s via %s backend." % (username, enterprise_auth.get_provider_display()))
created = True
if created or user.is_in_enterprise_category(provider):
return user
logger.warning("Enterprise user %s already defined in Tower." % username)
class RADIUSBackend(BaseRADIUSBackend):
"""
Custom Radius backend to verify license status
"""
def authenticate(self, request, username, password):
if not django_settings.RADIUS_SERVER:
return None
return super(RADIUSBackend, self).authenticate(request, username, password)
def get_user(self, user_id):
if not django_settings.RADIUS_SERVER:
return None
user = super(RADIUSBackend, self).get_user(user_id)
if not user.has_usable_password():
return user
def get_django_user(self, username, password=None, groups=[], is_staff=False, is_superuser=False):
return _get_or_set_enterprise_user(force_str(username), force_str(password), 'radius')
class TACACSPlusBackend(object):
"""
Custom TACACS+ auth backend for AWX
"""
def authenticate(self, request, username, password):
if not django_settings.TACACSPLUS_HOST:
return None
try:
# Upstream TACACS+ client does not accept non-string, so convert if needed.
tacacs_client = tacacs_plus.TACACSClient(
django_settings.TACACSPLUS_HOST,
django_settings.TACACSPLUS_PORT,
django_settings.TACACSPLUS_SECRET,
timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT,
)
auth_kwargs = {'authen_type': tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL]}
if django_settings.TACACSPLUS_AUTH_PROTOCOL:
client_ip = self._get_client_ip(request)
if client_ip:
auth_kwargs['rem_addr'] = client_ip
auth = tacacs_client.authenticate(username, password, **auth_kwargs)
except Exception as e:
logger.exception("TACACS+ Authentication Error: %s" % str(e))
return None
if auth.valid:
return _get_or_set_enterprise_user(username, password, 'tacacs+')
def get_user(self, user_id):
if not django_settings.TACACSPLUS_HOST:
return None
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
def _get_client_ip(self, request):
if not request or not hasattr(request, 'META'):
return None
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
"""
Custom Identity Provider to make attributes to what we expect.
"""
def get_user_permanent_id(self, attributes):
uid = attributes[self.conf.get('attr_user_permanent_id', OID_USERID)]
if isinstance(uid, str):
return uid
return uid[0]
def get_attr(self, attributes, conf_key, default_attribute):
"""
Get the attribute 'default_attribute' out of the attributes,
unless self.conf[conf_key] overrides the default by specifying
another attribute to use.
"""
key = self.conf.get(conf_key, default_attribute)
value = attributes[key] if key in attributes else None
# In certain implementations (like https://pagure.io/ipsilon) this value is a string, not a list
if isinstance(value, (list, tuple)):
value = value[0]
if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None:
logger.warning(
"Could not map user detail '%s' from SAML attribute '%s'; update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
conf_key[5:],
key,
self.name,
conf_key,
)
return str(value) if value is not None else value
class SAMLAuth(BaseSAMLAuth):
"""
Custom SAMLAuth backend to verify license status
"""
def get_idp(self, idp_name):
idp_config = self.setting('ENABLED_IDPS')[idp_name]
return TowerSAMLIdentityProvider(idp_name, **idp_config)
def authenticate(self, request, *args, **kwargs):
if not all(
[
django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY,
django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
]
):
return None
pipeline_result = super(SAMLAuth, self).authenticate(request, *args, **kwargs)
if isinstance(pipeline_result, HttpResponse):
return pipeline_result
else:
user = pipeline_result
# Comes from https://github.com/omab/python-social-auth/blob/v0.2.21/social/backends/base.py#L91
if getattr(user, 'is_new', False):
enterprise_auth = _decorate_enterprise_user(user, 'saml')
logger.debug("Created enterprise user %s from %s backend." % (user.username, enterprise_auth.get_provider_display()))
elif user and not user.is_in_enterprise_category('saml'):
return None
if user:
logger.debug("Enterprise user %s already created in Tower." % user.username)
return user
def get_user(self, user_id):
if not all(
[
django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY,
django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
]
):
return None
return super(SAMLAuth, self).get_user(user_id)
def _update_m2m_from_groups(ldap_user, opts, remove=True):
"""
Hepler function to evaluate the LDAP team/org options to determine if LDAP user should
be a member of the team/org based on their ldap group dns.
Returns:
True - User should be added
False - User should be removed
None - Users membership should not be changed
"""
if opts is None:
return None
elif not opts:
pass
elif isinstance(opts, bool) and opts is True:
return True
else:
if isinstance(opts, str):
opts = [opts]
# If any of the users groups matches any of the list options
for group_dn in opts:
if not isinstance(group_dn, str):
continue
if ldap_user._get_groups().is_member_of(group_dn):
return True
if remove:
return False
return None
@receiver(populate_user, dispatch_uid='populate-ldap-user')
def on_populate_user(sender, **kwargs):
"""
Handle signal from LDAP backend to populate the user object. Update user
organization/team memberships according to their LDAP groups.
"""
user = kwargs['user']
ldap_user = kwargs['ldap_user']
backend = ldap_user.backend
# Boolean to determine if we should force an user update
# to avoid duplicate SQL update statements
force_user_update = False
# Prefetch user's groups to prevent LDAP queries for each org/team when
# checking membership.
ldap_user._get_groups().get_group_dns()
# If the LDAP user has a first or last name > $maxlen chars, truncate it
for field in ('first_name', 'last_name'):
max_len = User._meta.get_field(field).max_length
field_len = len(getattr(user, field))
if field_len > max_len:
setattr(user, field, getattr(user, field)[:max_len])
force_user_update = True
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
team_map_settings = getattr(backend.settings, 'TEAM_MAP', {})
orgs_list = list(org_map.keys())
team_map = {}
for team_name, team_opts in team_map_settings.items():
if not team_opts.get('organization', None):
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
continue
team_map[team_name] = team_opts['organization']
create_org_and_teams(orgs_list, team_map, 'LDAP')
# Compute in memory what the state is of the different LDAP orgs
org_roles_and_ldap_attributes = {'admin_role': 'admins', 'auditor_role': 'auditors', 'member_role': 'users'}
desired_org_states = {}
for org_name, org_opts in org_map.items():
remove = bool(org_opts.get('remove', True))
desired_org_states[org_name] = {}
for org_role_name in org_roles_and_ldap_attributes.keys():
ldap_name = org_roles_and_ldap_attributes[org_role_name]
opts = org_opts.get(ldap_name, None)
remove = bool(org_opts.get('remove_{}'.format(ldap_name), remove))
desired_org_states[org_name][org_role_name] = _update_m2m_from_groups(ldap_user, opts, remove)
# If everything returned None (because there was no configuration) we can remove this org from our map
# This will prevent us from loading the org in the next query
if all(desired_org_states[org_name][org_role_name] is None for org_role_name in org_roles_and_ldap_attributes.keys()):
del desired_org_states[org_name]
# Compute in memory what the state is of the different LDAP teams
desired_team_states = {}
for team_name, team_opts in team_map_settings.items():
if 'organization' not in team_opts:
continue
users_opts = team_opts.get('users', None)
remove = bool(team_opts.get('remove', True))
state = _update_m2m_from_groups(ldap_user, users_opts, remove)
if state is not None:
organization = team_opts['organization']
if organization not in desired_team_states:
desired_team_states[organization] = {}
desired_team_states[organization][team_name] = {'member_role': state}
# Check if user.profile is available, otherwise force user.save()
try:
_ = user.profile
except ValueError:
force_user_update = True
finally:
if force_user_update:
user.save()
# Update user profile to store LDAP DN.
profile = user.profile
if profile.ldap_dn != ldap_user.dn:
profile.ldap_dn = ldap_user.dn
profile.save()
reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, 'LDAP')

View File

@@ -1,83 +0,0 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
# Python
import urllib.parse
# Django
from django.conf import settings
from django.utils.functional import LazyObject
from django.shortcuts import redirect
# Python Social Auth
from social_core.exceptions import SocialAuthBaseException
from social_core.utils import social_logger
from social_django import utils
from social_django.middleware import SocialAuthExceptionMiddleware
class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
def __call__(self, request):
return self.process_request(request)
def process_request(self, request):
if request.path.startswith('/sso'):
# See upgrade blocker note in requirements/README.md
utils.BACKENDS = settings.AUTHENTICATION_BACKENDS
token_key = request.COOKIES.get('token', '')
token_key = urllib.parse.quote(urllib.parse.unquote(token_key).strip('"'))
if not hasattr(request, 'successful_authenticator'):
request.successful_authenticator = None
if not request.path.startswith('/sso/') and 'migrations_notran' not in request.path:
if request.user and request.user.is_authenticated:
# The rest of the code base rely hevily on type/inheritance checks,
# LazyObject sent from Django auth middleware can be buggy if not
# converted back to its original object.
if isinstance(request.user, LazyObject) and request.user._wrapped:
request.user = request.user._wrapped
request.session.pop('social_auth_error', None)
request.session.pop('social_auth_last_backend', None)
return self.get_response(request)
def process_view(self, request, callback, callback_args, callback_kwargs):
if request.path.startswith('/sso/login/'):
request.session['social_auth_last_backend'] = callback_kwargs['backend']
def process_exception(self, request, exception):
strategy = getattr(request, 'social_strategy', None)
if strategy is None or self.raise_exception(request, exception):
return
if isinstance(exception, SocialAuthBaseException) or request.path.startswith('/sso/'):
backend = getattr(request, 'backend', None)
backend_name = getattr(backend, 'name', 'unknown-backend')
message = self.get_message(request, exception)
if request.session.get('social_auth_last_backend') != backend_name:
backend_name = request.session.get('social_auth_last_backend')
message = request.GET.get('error_description', message)
full_backend_name = backend_name
try:
idp_name = strategy.request_data()['RelayState']
full_backend_name = '%s:%s' % (backend_name, idp_name)
except KeyError:
pass
social_logger.error(message)
url = self.get_redirect_uri(request, exception)
request.session['social_auth_error'] = (full_backend_name, message)
return redirect(url)
def get_message(self, request, exception):
msg = str(exception)
if msg and msg[-1] not in '.?!':
msg = msg + '.'
return msg
def get_redirect_uri(self, request, exception):
strategy = getattr(request, 'social_strategy', None)
return strategy.session_get('next', '') or strategy.setting('LOGIN_ERROR_URL')

View File

@@ -1,150 +0,0 @@
import pytest
from django.contrib.auth.models import User
from awx.sso.backends import TACACSPlusBackend
from awx.sso.models import UserEnterpriseAuth
@pytest.fixture
def tacacsplus_backend():
return TACACSPlusBackend()
@pytest.fixture
def existing_normal_user():
try:
user = User.objects.get(username="alice")
except User.DoesNotExist:
user = User(username="alice", password="password")
user.save()
return user
@pytest.fixture
def existing_tacacsplus_user():
try:
user = User.objects.get(username="foo")
except User.DoesNotExist:
user = User(username="foo")
user.set_unusable_password()
user.save()
enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+')
enterprise_auth.save()
return user
@pytest.fixture
def test_radius_config(settings):
settings.RADIUS_SERVER = '127.0.0.1'
settings.RADIUS_PORT = 1812
settings.RADIUS_SECRET = 'secret'
@pytest.fixture
def basic_saml_config(settings):
settings.SAML_SECURITY_CONFIG = {
"wantNameId": True,
"signMetadata": False,
"digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
"nameIdEncrypted": False,
"signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256",
"authnRequestsSigned": False,
"logoutRequestSigned": False,
"wantNameIdEncrypted": False,
"logoutResponseSigned": False,
"wantAssertionsSigned": True,
"requestedAuthnContext": False,
"wantAssertionsEncrypted": False,
}
settings.SOCIAL_AUTH_SAML_ENABLED_IDPS = {
"example": {
"attr_email": "email",
"attr_first_name": "first_name",
"attr_last_name": "last_name",
"attr_user_permanent_id": "username",
"attr_username": "username",
"entity_id": "https://www.example.com/realms/sample",
"url": "https://www.example.com/realms/sample/protocol/saml",
"x509cert": "A" * 64 + "B" * 64 + "C" * 23,
}
}
settings.SOCIAL_AUTH_SAML_TEAM_ATTR = {
"remove": False,
"saml_attr": "group_name",
"team_org_map": [
{"team": "internal:unix:domain:admins", "team_alias": "Administrators", "organization": "Default"},
{"team": "East Coast", "organization": "North America"},
{"team": "developers", "organization": "North America"},
{"team": "developers", "organization": "South America"},
],
}
settings.SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR = {
"is_superuser_role": ["wilma"],
"is_superuser_attr": "friends",
"is_superuser_value": ["barney", "fred"],
"remove_superusers": False,
"is_system_auditor_role": ["fred"],
"is_system_auditor_attr": "auditor",
"is_system_auditor_value": ["bamm-bamm"],
}
settings.SOCIAL_AUTH_SAML_ORGANIZATION_ATTR = {"saml_attr": "member-of", "remove": True, "saml_admin_attr": "admin-of", "remove_admins": False}
@pytest.fixture
def test_tacacs_config(settings):
settings.TACACSPLUS_HOST = "tacacshost"
settings.TACACSPLUS_PORT = 49
settings.TACACSPLUS_SECRET = "secret"
settings.TACACSPLUS_SESSION_TIMEOUT = 10
settings.TACACSPLUS_AUTH_PROTOCOL = "pap"
settings.TACACSPLUS_REM_ADDR = True
@pytest.fixture
def saml_config_user_flags_no_value(settings):
settings.SAML_SECURITY_CONFIG = {
"wantNameId": True,
"signMetadata": False,
"digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
"nameIdEncrypted": False,
"signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256",
"authnRequestsSigned": False,
"logoutRequestSigned": False,
"wantNameIdEncrypted": False,
"logoutResponseSigned": False,
"wantAssertionsSigned": True,
"requestedAuthnContext": False,
"wantAssertionsEncrypted": False,
}
settings.SOCIAL_AUTH_SAML_ENABLED_IDPS = {
"example": {
"attr_email": "email",
"attr_first_name": "first_name",
"attr_last_name": "last_name",
"attr_user_permanent_id": "username",
"attr_username": "username",
"entity_id": "https://www.example.com/realms/sample",
"url": "https://www.example.com/realms/sample/protocol/saml",
"x509cert": "A" * 64 + "B" * 64 + "C" * 23,
}
}
settings.SOCIAL_AUTH_SAML_TEAM_ATTR = {
"remove": False,
"saml_attr": "group_name",
"team_org_map": [
{"team": "internal:unix:domain:admins", "team_alias": "Administrators", "organization": "Default"},
{"team": "East Coast", "organization": "North America"},
{"team": "developers", "organization": "North America"},
{"team": "developers", "organization": "South America"},
],
}
settings.SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR = {
"is_superuser_role": ["wilma"],
"is_superuser_attr": "friends",
}

View File

@@ -1,104 +0,0 @@
import pytest
from unittest.mock import MagicMock
from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator
@pytest.fixture
def test_google_config(settings):
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = "test_key"
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = "test_secret"
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL = "https://tower.example.com/sso/complete/google-oauth2/"
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP = {"My Org": {"users": True}}
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP = {"My Team": {"organization": "My Org", "users": True}}
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = ["profile", "email"]
@pytest.mark.django_db
def test_get_controller_config(test_google_config):
gateway_client = MagicMock()
command_obj = MagicMock()
obj = GoogleOAuth2Migrator(gateway_client, command_obj)
result = obj.get_controller_config()
assert len(result) == 1
config = result[0]
assert config['category'] == 'Google OAuth2'
settings = config['settings']
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'] == 'test_key'
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'] == 'test_secret'
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL'] == "https://tower.example.com/sso/complete/google-oauth2/"
assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE'] == ["profile", "email"]
# Assert that other settings are not present in the returned config
assert 'SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP' not in settings
assert 'SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP' not in settings
@pytest.mark.django_db
def test_create_gateway_authenticator(mocker, test_google_config):
mocker.patch('django.conf.settings.LOGGING', {})
gateway_client = MagicMock()
command_obj = MagicMock()
obj = GoogleOAuth2Migrator(gateway_client, command_obj)
mock_submit = MagicMock(return_value=True)
obj.submit_authenticator = mock_submit
configs = obj.get_controller_config()
result = obj.create_gateway_authenticator(configs[0])
assert result is True
mock_submit.assert_called_once()
# Assert payload sent to gateway
payload = mock_submit.call_args[0][0]
assert payload['name'] == 'google'
assert payload['slug'] == 'aap-google-oauth2-google-oauth2'
assert payload['type'] == 'ansible_base.authentication.authenticator_plugins.google_oauth2'
assert payload['enabled'] is False
assert payload['create_objects'] is True
assert payload['remove_users'] is False
# Assert configuration details
configuration = payload['configuration']
assert configuration['KEY'] == 'test_key'
assert configuration['SECRET'] == 'test_secret'
assert configuration['CALLBACK_URL'] == 'https://tower.example.com/sso/complete/google-oauth2/'
assert configuration['SCOPE'] == ['profile', 'email']
# Assert mappers
assert len(payload['mappers']) == 2
assert payload['mappers'][0]['map_type'] == 'organization'
assert payload['mappers'][1]['map_type'] == 'team'
# Assert ignore_keys
ignore_keys = mock_submit.call_args[0][1]
assert ignore_keys == ["ACCESS_TOKEN_METHOD", "REVOKE_TOKEN_METHOD"]
@pytest.mark.django_db
def test_create_gateway_authenticator_no_optional_values(mocker, settings):
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = "test_key"
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = "test_secret"
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP = {}
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP = {}
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = None
settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL = None
mocker.patch('django.conf.settings.LOGGING', {})
gateway_client = MagicMock()
command_obj = MagicMock()
obj = GoogleOAuth2Migrator(gateway_client, command_obj)
mock_submit = MagicMock(return_value=True)
obj.submit_authenticator = mock_submit
configs = obj.get_controller_config()
obj.create_gateway_authenticator(configs[0])
payload = mock_submit.call_args[0][0]
assert 'CALLBACK_URL' not in payload['configuration']
assert 'SCOPE' not in payload['configuration']
ignore_keys = mock_submit.call_args[0][1]
assert 'CALLBACK_URL' in ignore_keys
assert 'SCOPE' in ignore_keys

View File

@@ -1,17 +0,0 @@
import pytest
from unittest.mock import MagicMock
from awx.sso.utils.radius_migrator import RADIUSMigrator
@pytest.mark.django_db
def test_get_controller_config(test_radius_config):
gateway_client = MagicMock()
command_obj = MagicMock()
obj = RADIUSMigrator(gateway_client, command_obj)
result = obj.get_controller_config()
config = result[0]['settings']['configuration']
assert config['SERVER'] == '127.0.0.1'
assert config['PORT'] == 1812
assert config['SECRET'] == 'secret'
assert len(config) == 3

View File

@@ -1,272 +0,0 @@
import pytest
from unittest.mock import MagicMock, patch
from awx.sso.utils.saml_migrator import SAMLMigrator
@pytest.mark.django_db
def test_get_controller_config(basic_saml_config):
gateway_client = MagicMock()
command_obj = MagicMock()
obj = SAMLMigrator(gateway_client, command_obj)
result = obj.get_controller_config()
lines = result[0]['settings']['configuration']['IDP_X509_CERT'].splitlines()
assert lines[0] == '-----BEGIN CERTIFICATE-----'
assert lines[1] == "A" * 64
assert lines[2] == "B" * 64
assert lines[3] == "C" * 23
assert lines[-1] == '-----END CERTIFICATE-----'
@pytest.mark.django_db
def test_get_controller_config_with_mapper(saml_config_user_flags_no_value):
gateway_client = MagicMock()
command_obj = MagicMock()
obj = SAMLMigrator(gateway_client, command_obj)
result = obj.get_controller_config()
expected_maps = [
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'Default',
'team': 'Administrators',
'name': 'Team-Administrators-Default',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['internal:unix:domain:admins']}, 'join_condition': 'or'}},
'order': 1,
},
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'North America',
'team': 'East Coast',
'name': 'Team-East Coast-North America',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['East Coast']}, 'join_condition': 'or'}},
'order': 2,
},
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'North America',
'team': 'developers',
'name': 'Team-developers-North America',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
'order': 3,
},
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'South America',
'team': 'developers',
'name': 'Team-developers-South America',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
'order': 4,
},
{
'map_type': 'is_superuser',
'role': None,
'name': 'Role-is_superuser',
'organization': None,
'team': None,
'revoke': True,
'order': 5,
'authenticator': -1,
'triggers': {'attributes': {'Role': {'in': ['wilma']}, 'join_condition': 'or'}},
},
{
'map_type': 'is_superuser',
'role': None,
'name': 'Role-is_superuser-attr',
'organization': None,
'team': None,
'revoke': True,
'order': 6,
'authenticator': -1,
'triggers': {'attributes': {'friends': {}, 'join_condition': 'or'}},
},
]
assert result[0]['team_mappers'] == expected_maps
extra_data = result[0]['settings']['configuration']['EXTRA_DATA']
assert ['Role', 'Role'] in extra_data
assert ['friends', 'friends'] in extra_data
assert ['group_name', 'group_name'] in extra_data
@pytest.mark.django_db
def test_get_controller_config_with_roles(basic_saml_config):
gateway_client = MagicMock()
command_obj = MagicMock()
obj = SAMLMigrator(gateway_client, command_obj)
result = obj.get_controller_config()
expected_maps = [
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'Default',
'team': 'Administrators',
'name': 'Team-Administrators-Default',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['internal:unix:domain:admins']}, 'join_condition': 'or'}},
'order': 1,
},
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'North America',
'team': 'East Coast',
'name': 'Team-East Coast-North America',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['East Coast']}, 'join_condition': 'or'}},
'order': 2,
},
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'North America',
'team': 'developers',
'name': 'Team-developers-North America',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
'order': 3,
},
{
'map_type': 'team',
'role': 'Team Member',
'organization': 'South America',
'team': 'developers',
'name': 'Team-developers-South America',
'revoke': False,
'authenticator': -1,
'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}},
'order': 4,
},
{
'map_type': 'is_superuser',
'role': None,
'name': 'Role-is_superuser',
'organization': None,
'team': None,
'revoke': False,
'order': 5,
'authenticator': -1,
'triggers': {'attributes': {'Role': {'in': ['wilma']}, 'join_condition': 'or'}},
},
{
'map_type': 'role',
'role': 'Platform Auditor',
'name': 'Role-Platform Auditor',
'organization': None,
'team': None,
'revoke': True,
'order': 6,
'authenticator': -1,
'triggers': {'attributes': {'Role': {'in': ['fred']}, 'join_condition': 'or'}},
},
{
'map_type': 'is_superuser',
'role': None,
'name': 'Role-is_superuser-attr',
'organization': None,
'team': None,
'revoke': False,
'order': 7,
'authenticator': -1,
'triggers': {'attributes': {'friends': {'in': ['barney', 'fred']}, 'join_condition': 'or'}},
},
{
'map_type': 'role',
'role': 'Platform Auditor',
'name': 'Role-Platform Auditor-attr',
'organization': None,
'team': None,
'revoke': True,
'order': 8,
'authenticator': -1,
'triggers': {'attributes': {'auditor': {'in': ['bamm-bamm']}, 'join_condition': 'or'}},
},
{
'map_type': 'organization',
'role': 'Organization Member',
'name': 'Role-Organization Member-attr',
'organization': "{% for_attr_value('member-of') %}",
'team': None,
'revoke': True,
'order': 9,
'authenticator': -1,
'triggers': {'attributes': {'member-of': {}, 'join_condition': 'or'}},
},
{
'map_type': 'organization',
'role': 'Organization Admin',
'name': 'Role-Organization Admin-attr',
'organization': "{% for_attr_value('admin-of') %}",
'team': None,
'revoke': False,
'order': 10,
'authenticator': -1,
'triggers': {'attributes': {'admin-of': {}, 'join_condition': 'or'}},
},
]
assert result[0]['team_mappers'] == expected_maps
extra_data = result[0]['settings']['configuration']['EXTRA_DATA']
extra_data_items = [
['member-of', 'member-of'],
['admin-of', 'admin-of'],
['Role', 'Role'],
['friends', 'friends'],
['group_name', 'group_name'],
]
for item in extra_data_items:
assert item in extra_data
assert extra_data.count(item) == 1
@pytest.mark.django_db
def test_get_controller_config_enabled_false(basic_saml_config):
"""SAML controller export marks settings.enabled False by default."""
gateway_client = MagicMock()
command_obj = MagicMock()
obj = SAMLMigrator(gateway_client, command_obj)
result = obj.get_controller_config()
assert isinstance(result, list) and len(result) >= 1
assert result[0]['settings']['enabled'] is False
@pytest.mark.django_db
def test_create_gateway_authenticator_submits_disabled(basic_saml_config):
"""Submitted Gateway authenticator config must have enabled=False and correct ignore keys."""
gateway_client = MagicMock()
command_obj = MagicMock()
obj = SAMLMigrator(gateway_client, command_obj)
config = obj.get_controller_config()[0]
with patch.object(
obj,
'submit_authenticator',
return_value={'success': True, 'action': 'created', 'error': None},
) as submit_mock:
obj.create_gateway_authenticator(config)
# Extract submitted args: gateway_config, ignore_keys, original_config
submitted_gateway_config = submit_mock.call_args[0][0]
ignore_keys = submit_mock.call_args[0][1]
assert submitted_gateway_config['enabled'] is False
assert 'CALLBACK_URL' in ignore_keys
assert 'SP_PRIVATE_KEY' in ignore_keys

View File

@@ -1,384 +0,0 @@
"""
Unit tests for SettingsMigrator class.
"""
import pytest
from unittest.mock import Mock, patch
from awx.sso.utils.settings_migrator import SettingsMigrator
class TestSettingsMigrator:
"""Tests for SettingsMigrator class."""
def setup_method(self):
"""Set up test fixtures."""
self.gateway_client = Mock()
self.command = Mock()
self.migrator = SettingsMigrator(self.gateway_client, self.command)
def test_get_authenticator_type(self):
"""Test that get_authenticator_type returns 'Settings'."""
assert self.migrator.get_authenticator_type() == "Settings"
@pytest.mark.parametrize(
"input_name,expected_output",
[
('CUSTOM_LOGIN_INFO', 'custom_login_info'),
('CUSTOM_LOGO', 'custom_logo'),
('UNKNOWN_SETTING', 'UNKNOWN_SETTING'),
('ANOTHER_UNKNOWN', 'ANOTHER_UNKNOWN'),
],
)
def test_convert_setting_name(self, input_name, expected_output):
"""Test setting name conversion."""
result = self.migrator._convert_setting_name(input_name)
assert result == expected_output
@pytest.mark.parametrize(
"transformer_method,test_values",
[
('_transform_social_auth_username_is_full_email', [True, False]),
('_transform_allow_oauth2_for_external_users', [True, False]),
],
)
def test_boolean_transformers(self, transformer_method, test_values):
"""Test that boolean transformers return values as-is."""
transformer = getattr(self.migrator, transformer_method)
for value in test_values:
assert transformer(value) is value
@pytest.mark.parametrize(
"settings_values,expected_count",
[
# Test case: all settings are None
(
{
'SESSION_COOKIE_AGE': None,
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
'LOGIN_REDIRECT_OVERRIDE': None,
'ORG_ADMINS_CAN_SEE_ALL_USERS': None,
'MANAGE_ORGANIZATION_AUTH': None,
},
0,
),
# Test case: all settings are empty strings
(
{
'SESSION_COOKIE_AGE': "",
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': "",
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': "",
'LOGIN_REDIRECT_OVERRIDE': "",
'ORG_ADMINS_CAN_SEE_ALL_USERS': "",
'MANAGE_ORGANIZATION_AUTH': "",
},
0,
),
# Test case: only new settings have values
(
{
'SESSION_COOKIE_AGE': None,
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
'LOGIN_REDIRECT_OVERRIDE': None,
'ORG_ADMINS_CAN_SEE_ALL_USERS': True,
'MANAGE_ORGANIZATION_AUTH': False,
},
2,
),
],
)
@patch('awx.sso.utils.settings_migrator.settings')
def test_get_controller_config_various_scenarios(self, mock_settings, settings_values, expected_count):
"""Test get_controller_config with various setting combinations."""
# Apply the settings values to the mock
for setting_name, setting_value in settings_values.items():
setattr(mock_settings, setting_name, setting_value)
result = self.migrator.get_controller_config()
assert len(result) == expected_count
# Verify structure if we have results
if result:
for config in result:
assert config['category'] == 'global-settings'
assert 'setting_name' in config
assert 'setting_value' in config
assert config['org_mappers'] == []
assert config['team_mappers'] == []
assert config['role_mappers'] == []
assert config['allow_mappers'] == []
@patch('awx.sso.utils.settings_migrator.settings')
def test_get_controller_config_with_all_settings(self, mock_settings):
"""Test get_controller_config with all settings configured."""
# Mock all settings with valid values
mock_settings.SESSION_COOKIE_AGE = 3600
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = False
mock_settings.LOGIN_REDIRECT_OVERRIDE = "https://example.com/login"
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = True
mock_settings.MANAGE_ORGANIZATION_AUTH = False
# Mock the login redirect override to not be set by migrator
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False):
result = self.migrator.get_controller_config()
assert len(result) == 6
# Check that all expected settings are present
setting_names = [config['setting_name'] for config in result]
expected_settings = [
'SESSION_COOKIE_AGE',
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL',
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
'LOGIN_REDIRECT_OVERRIDE',
'ORG_ADMINS_CAN_SEE_ALL_USERS',
'MANAGE_ORGANIZATION_AUTH',
]
for setting in expected_settings:
assert setting in setting_names
# Verify structure of returned configs
for config in result:
assert config['category'] == 'global-settings'
assert 'setting_name' in config
assert 'setting_value' in config
assert config['org_mappers'] == []
assert config['team_mappers'] == []
assert config['role_mappers'] == []
assert config['allow_mappers'] == []
@patch('awx.sso.utils.settings_migrator.settings')
def test_get_controller_config_with_new_settings_only(self, mock_settings):
"""Test get_controller_config with only the new settings configured."""
# Mock only the new settings
mock_settings.SESSION_COOKIE_AGE = None
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = None
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None
mock_settings.LOGIN_REDIRECT_OVERRIDE = None
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = True
mock_settings.MANAGE_ORGANIZATION_AUTH = False
result = self.migrator.get_controller_config()
assert len(result) == 2
# Check the new settings are present
setting_names = [config['setting_name'] for config in result]
assert 'ORG_ADMINS_CAN_SEE_ALL_USERS' in setting_names
assert 'MANAGE_ORGANIZATION_AUTH' in setting_names
# Verify the values
org_admins_config = next(c for c in result if c['setting_name'] == 'ORG_ADMINS_CAN_SEE_ALL_USERS')
assert org_admins_config['setting_value'] is True
manage_org_auth_config = next(c for c in result if c['setting_name'] == 'MANAGE_ORGANIZATION_AUTH')
assert manage_org_auth_config['setting_value'] is False
@patch('awx.sso.utils.settings_migrator.settings')
def test_get_controller_config_with_login_redirect_override_from_migrator(self, mock_settings):
"""Test get_controller_config when LOGIN_REDIRECT_OVERRIDE is set by migrator."""
# Mock settings
mock_settings.SESSION_COOKIE_AGE = None
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = None
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None
mock_settings.LOGIN_REDIRECT_OVERRIDE = "https://original.com/login"
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = None
mock_settings.MANAGE_ORGANIZATION_AUTH = None
# Mock the login redirect override to be set by migrator
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', True):
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_new_url', 'https://new.com/login'):
result = self.migrator.get_controller_config()
assert len(result) == 1
assert result[0]['setting_name'] == 'LOGIN_REDIRECT_OVERRIDE'
assert result[0]['setting_value'] == 'https://new.com/login' # Should use the migrator URL
@pytest.mark.parametrize(
"config,current_value,expected_action,should_update",
[
# Test case: setting needs update
({'setting_name': 'ORG_ADMINS_CAN_SEE_ALL_USERS', 'setting_value': True}, False, 'updated', True),
# Test case: setting is unchanged
({'setting_name': 'MANAGE_ORGANIZATION_AUTH', 'setting_value': False}, False, 'skipped', False),
# Test case: another setting needs update
({'setting_name': 'SESSION_COOKIE_AGE', 'setting_value': 7200}, 3600, 'updated', True),
# Test case: another setting is unchanged
({'setting_name': 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', 'setting_value': True}, True, 'skipped', False),
],
)
def test_create_gateway_authenticator_success_scenarios(self, config, current_value, expected_action, should_update):
"""Test create_gateway_authenticator success scenarios."""
# Mock gateway client methods
self.gateway_client.get_gateway_setting.return_value = current_value
self.gateway_client.update_gateway_setting.return_value = None
result = self.migrator.create_gateway_authenticator(config)
assert result['success'] is True
assert result['action'] == expected_action
assert result['error'] is None
# Verify gateway client calls
expected_setting_name = config['setting_name']
self.gateway_client.get_gateway_setting.assert_called_once_with(expected_setting_name)
if should_update:
self.gateway_client.update_gateway_setting.assert_called_once_with(expected_setting_name, config['setting_value'])
else:
self.gateway_client.update_gateway_setting.assert_not_called()
# Reset mocks for next iteration
self.gateway_client.reset_mock()
def test_create_gateway_authenticator_with_setting_name_conversion(self):
"""Test create_gateway_authenticator with setting name that needs conversion."""
config = {'setting_name': 'CUSTOM_LOGIN_INFO', 'setting_value': 'Some custom info'}
# Mock gateway client methods
self.gateway_client.get_gateway_setting.return_value = 'Old info' # Different value
self.gateway_client.update_gateway_setting.return_value = None
result = self.migrator.create_gateway_authenticator(config)
assert result['success'] is True
assert result['action'] == 'updated'
# Verify gateway client was called with converted name
self.gateway_client.get_gateway_setting.assert_called_once_with('custom_login_info')
self.gateway_client.update_gateway_setting.assert_called_once_with('custom_login_info', 'Some custom info')
def test_create_gateway_authenticator_failure(self):
"""Test create_gateway_authenticator when gateway update fails."""
config = {'setting_name': 'SESSION_COOKIE_AGE', 'setting_value': 7200}
# Mock gateway client to raise exception
self.gateway_client.get_gateway_setting.return_value = 3600
self.gateway_client.update_gateway_setting.side_effect = Exception("Gateway error")
result = self.migrator.create_gateway_authenticator(config)
assert result['success'] is False
assert result['action'] == 'failed'
assert result['error'] == 'Gateway error'
@pytest.mark.parametrize(
"scenario,settings_config,gateway_responses,update_side_effects,expected_counts",
[
# Scenario 1: No settings configured
(
"no_settings",
{
'SESSION_COOKIE_AGE': None,
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
'LOGIN_REDIRECT_OVERRIDE': None,
'ORG_ADMINS_CAN_SEE_ALL_USERS': None,
'MANAGE_ORGANIZATION_AUTH': None,
},
[], # No gateway calls expected
[], # No update calls expected
{'settings_created': 0, 'settings_updated': 0, 'settings_unchanged': 0, 'settings_failed': 0},
),
# Scenario 2: All updates successful
(
"successful_updates",
{
'SESSION_COOKIE_AGE': None,
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
'LOGIN_REDIRECT_OVERRIDE': None,
'ORG_ADMINS_CAN_SEE_ALL_USERS': True,
'MANAGE_ORGANIZATION_AUTH': False,
},
[False, True], # Different values to trigger updates
[None, None], # Successful updates
{'settings_created': 0, 'settings_updated': 2, 'settings_unchanged': 0, 'settings_failed': 0},
),
# Scenario 3: One unchanged, one updated
(
"mixed_results",
{
'SESSION_COOKIE_AGE': None,
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None,
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None,
'LOGIN_REDIRECT_OVERRIDE': None,
'ORG_ADMINS_CAN_SEE_ALL_USERS': True,
'MANAGE_ORGANIZATION_AUTH': False,
},
[True, True], # Gateway returns: ORG_ADMINS_CAN_SEE_ALL_USERS=True (unchanged), MANAGE_ORGANIZATION_AUTH=True (needs update)
[ValueError("Update failed")], # Only one update call (for MANAGE_ORGANIZATION_AUTH), and it fails
{'settings_created': 0, 'settings_updated': 0, 'settings_unchanged': 1, 'settings_failed': 1},
),
],
)
@patch('awx.sso.utils.settings_migrator.settings')
def test_migrate_scenarios(self, mock_settings, scenario, settings_config, gateway_responses, update_side_effects, expected_counts):
"""Test migrate method with various scenarios."""
# Apply settings configuration
for setting_name, setting_value in settings_config.items():
setattr(mock_settings, setting_name, setting_value)
# Mock gateway client responses
if gateway_responses:
self.gateway_client.get_gateway_setting.side_effect = gateway_responses
if update_side_effects:
self.gateway_client.update_gateway_setting.side_effect = update_side_effects
# Mock the login redirect override to not be set by migrator for these tests
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False):
result = self.migrator.migrate()
# Verify expected counts
for key, expected_value in expected_counts.items():
assert result[key] == expected_value, f"Scenario {scenario}: Expected {key}={expected_value}, got {result[key]}"
# All authenticator/mapper counts should be 0 since settings don't have them
authenticator_mapper_keys = ['created', 'updated', 'unchanged', 'failed', 'mappers_created', 'mappers_updated', 'mappers_failed']
for key in authenticator_mapper_keys:
assert result[key] == 0, f"Scenario {scenario}: Expected {key}=0, got {result[key]}"
def test_setting_transformers_defined(self):
"""Test that setting transformers are properly defined."""
expected_transformers = {'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS'}
actual_transformers = set(self.migrator.setting_transformers.keys())
assert actual_transformers == expected_transformers
@pytest.mark.parametrize(
"transformer_return_value,expected_result_count",
[
(None, 0), # Transformer returns None - should be excluded
("", 0), # Transformer returns empty string - should be excluded
(True, 1), # Transformer returns valid value - should be included
],
)
@patch('awx.sso.utils.settings_migrator.settings')
def test_get_controller_config_transformer_edge_cases(self, mock_settings, transformer_return_value, expected_result_count):
"""Test get_controller_config when transformer returns various edge case values."""
# Mock settings - only one setting with a value that has a transformer
mock_settings.SESSION_COOKIE_AGE = None
mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True
mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None
mock_settings.LOGIN_REDIRECT_OVERRIDE = None
mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = None
mock_settings.MANAGE_ORGANIZATION_AUTH = None
# Mock transformer to return the specified value
# We need to patch the transformer in the dictionary, not just the method
original_transformer = self.migrator.setting_transformers.get('SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL')
self.migrator.setting_transformers['SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL'] = lambda x: transformer_return_value
try:
# Mock the login redirect override to not be set by migrator
with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False):
result = self.migrator.get_controller_config()
finally:
# Restore the original transformer
if original_transformer:
self.migrator.setting_transformers['SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL'] = original_transformer
assert len(result) == expected_result_count

View File

@@ -1,37 +0,0 @@
import pytest
from unittest.mock import MagicMock
from awx.sso.utils.tacacs_migrator import TACACSMigrator
@pytest.mark.django_db
def test_get_controller_config(test_tacacs_config):
gateway_client = MagicMock()
command_obj = MagicMock()
obj = TACACSMigrator(gateway_client, command_obj)
result = obj.get_controller_config()
assert len(result) == 1
config = result[0]
assert config['category'] == 'TACACSPLUS'
settings_data = config['settings']
assert settings_data['name'] == 'default'
assert settings_data['type'] == 'ansible_base.authentication.authenticator_plugins.tacacs'
configuration = settings_data['configuration']
assert configuration['HOST'] == 'tacacshost'
assert configuration['PORT'] == 49
assert configuration['SECRET'] == 'secret'
assert configuration['SESSION_TIMEOUT'] == 10
assert configuration['AUTH_PROTOCOL'] == 'pap'
assert configuration['REM_ADDR'] is True
@pytest.mark.django_db
def test_get_controller_config_no_host(settings):
settings.TACACSPLUS_HOST = ""
gateway_client = MagicMock()
command_obj = MagicMock()
obj = TACACSMigrator(gateway_client, command_obj)
result = obj.get_controller_config()
assert len(result) == 0

View File

@@ -1,17 +0,0 @@
from awx.sso.utils.azure_ad_migrator import AzureADMigrator
from awx.sso.utils.github_migrator import GitHubMigrator
from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator
from awx.sso.utils.ldap_migrator import LDAPMigrator
from awx.sso.utils.oidc_migrator import OIDCMigrator
from awx.sso.utils.radius_migrator import RADIUSMigrator
from awx.sso.utils.saml_migrator import SAMLMigrator
__all__ = [
'AzureADMigrator',
'GitHubMigrator',
'GoogleOAuth2Migrator',
'LDAPMigrator',
'OIDCMigrator',
'RADIUSMigrator',
'SAMLMigrator',
]

View File

@@ -1,97 +0,0 @@
"""
Azure AD authenticator migrator.
This module handles the migration of Azure AD authenticators from AWX to Gateway.
"""
from django.conf import settings
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
class AzureADMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of Azure AD authenticators from AWX to Gateway.
"""
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "Azure AD"
def get_controller_config(self):
"""
Export Azure AD authenticators. An Azure AD authenticator is only exported if
KEY and SECRET are configured.
Returns:
list: List of configured Azure AD authentication providers with their settings
"""
key_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', None)
secret_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET', None)
# Skip this category if OIDC Key and/or Secret are not configured
if not key_value or not secret_value:
return []
# If we have both key and secret, collect all settings
org_map_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP', None)
team_map_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_TEAM_MAP', None)
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
# Convert GitHub org and team mappings from AWX to the Gateway format
# Start with order 1 and maintain sequence across both org and team mappers
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1)
team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order)
category = 'AzureAD'
# Generate authenticator name and slug
authenticator_name = "Azure AD"
authenticator_slug = self._generate_authenticator_slug("azure_ad", category)
return [
{
'category': category,
'settings': {
"name": authenticator_name,
"slug": authenticator_slug,
"type": "ansible_base.authentication.authenticator_plugins.azuread",
"enabled": False,
"create_objects": True,
"remove_users": False,
"configuration": {
"KEY": key_value,
"SECRET": secret_value,
"GROUPS_CLAIM": "groups",
},
},
'org_mappers': org_mappers,
'team_mappers': team_mappers,
'login_redirect_override': login_redirect_override,
}
]
def create_gateway_authenticator(self, config):
"""Create an Azure AD authenticator in Gateway."""
category = config["category"]
gateway_config = config["settings"]
self._write_output(f"\n--- Processing {category} authenticator ---")
self._write_output(f"Name: {gateway_config['name']}")
self._write_output(f"Slug: {gateway_config['slug']}")
self._write_output(f"Type: {gateway_config['type']}")
# CALLBACK_URL - automatically created by Gateway
# GROUPS_CLAIM - Not an AWX feature
# ADDITIONAL_UNVERIFIED_ARGS - Not an AWX feature
ignore_keys = ["CALLBACK_URL", "GROUPS_CLAIM"]
# Submit the authenticator (create or update as needed)
result = self.submit_authenticator(gateway_config, ignore_keys, config)
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
valid_login_urls = ['/sso/login/azuread-oauth2']
self.handle_login_override(config, valid_login_urls)
return result

View File

@@ -1,679 +0,0 @@
"""
Base authenticator migrator class.
This module defines the contract that all specific authenticator migrators must follow.
"""
from urllib.parse import urlparse, parse_qs, urlencode
from django.conf import settings
from awx.main.utils.gateway_client import GatewayAPIError
class BaseAuthenticatorMigrator:
"""
Base class for all authenticator migrators.
Defines the contract that all specific authenticator migrators must follow.
"""
KEYS_TO_PRESERVE = ['idp']
# Class-level flag to track if LOGIN_REDIRECT_OVERRIDE was set by any migrator
login_redirect_override_set_by_migrator = False
# Class-level variable to store the new LOGIN_REDIRECT_OVERRIDE URL computed by migrators
login_redirect_override_new_url = None
def __init__(self, gateway_client=None, command=None, force=False):
"""
Initialize the authenticator migrator.
Args:
gateway_client: GatewayClient instance for API calls
command: Optional Django management command instance (for styled output)
force: If True, force migration even if configurations already exist
"""
self.gateway_client = gateway_client
self.command = command
self.force = force
self.encrypted_fields = [
# LDAP Fields
'BIND_PASSWORD',
# The following authenticators all use the same key to store encrypted information:
# Generic OIDC
# RADIUS
# TACACS+
# GitHub OAuth2
# Azure AD OAuth2
# Google OAuth2
'SECRET',
# SAML Fields
'SP_PRIVATE_KEY',
]
def migrate(self):
"""
Main entry point - orchestrates the migration process.
Returns:
dict: Summary of migration results
"""
# Get configuration from AWX/Controller
configs = self.get_controller_config()
if not configs:
self._write_output(f'No {self.get_authenticator_type()} authenticators found to migrate.', 'warning')
return {'created': 0, 'updated': 0, 'unchanged': 0, 'failed': 0, 'mappers_created': 0, 'mappers_updated': 0, 'mappers_failed': 0}
self._write_output(f'Found {len(configs)} {self.get_authenticator_type()} authentication configuration(s).', 'success')
# Process each authenticator configuration
created_authenticators = []
updated_authenticators = []
unchanged_authenticators = []
failed_authenticators = []
for config in configs:
result = self.create_gateway_authenticator(config)
if result['success']:
if result['action'] == 'created':
created_authenticators.append(config)
elif result['action'] == 'updated':
updated_authenticators.append(config)
elif result['action'] == 'skipped':
unchanged_authenticators.append(config)
else:
failed_authenticators.append(config)
# Process mappers for successfully created/updated/unchanged authenticators
mappers_created = 0
mappers_updated = 0
mappers_failed = 0
successful_authenticators = created_authenticators + updated_authenticators + unchanged_authenticators
if successful_authenticators:
self._write_output('\n=== Processing Authenticator Mappers ===', 'success')
for config in successful_authenticators:
mapper_result = self._process_gateway_mappers(config)
mappers_created += mapper_result['created']
mappers_updated += mapper_result['updated']
mappers_failed += mapper_result['failed']
# Authenticators don't have settings, so settings counts are always 0
return {
'created': len(created_authenticators),
'updated': len(updated_authenticators),
'unchanged': len(unchanged_authenticators),
'failed': len(failed_authenticators),
'mappers_created': mappers_created,
'mappers_updated': mappers_updated,
'mappers_failed': mappers_failed,
'settings_created': 0,
'settings_updated': 0,
'settings_unchanged': 0,
'settings_failed': 0,
}
def get_controller_config(self):
"""
Gather configuration from AWX/Controller.
Returns:
list: List of configuration dictionaries
"""
raise NotImplementedError("Subclasses must implement get_controller_config()")
def create_gateway_authenticator(self, config):
"""
Create authenticator in Gateway.
Args:
config: Configuration dictionary from get_controller_config()
Returns:
bool: True if authenticator was created successfully, False otherwise
"""
raise NotImplementedError("Subclasses must implement create_gateway_authenticator()")
def get_authenticator_type(self):
"""
Get the human-readable authenticator type name.
Returns:
str: Authenticator type name for logging
"""
raise NotImplementedError("Subclasses must implement get_authenticator_type()")
def _generate_authenticator_slug(self, auth_type, category):
"""Generate a deterministic slug for an authenticator."""
return f"aap-{auth_type}-{category}".lower()
def submit_authenticator(self, gateway_config, ignore_keys=[], config={}):
"""
Submit an authenticator to Gateway - either create new or update existing.
Args:
gateway_config: Complete Gateway authenticator configuration
ignore_keys: List of configuration keys to ignore during comparison
config: Optional AWX config dict to store result data
Returns:
dict: Result with 'success' (bool), 'action' ('created', 'updated', 'skipped'), 'error' (str or None)
"""
authenticator_slug = gateway_config.get('slug')
if not authenticator_slug:
self._write_output('Gateway config missing slug, cannot submit authenticator', 'error')
return {'success': False, 'action': None, 'error': 'Missing slug'}
try:
# Check if authenticator already exists by slug
existing_authenticator = self.gateway_client.get_authenticator_by_slug(authenticator_slug)
if existing_authenticator:
# Authenticator exists, check if configuration matches
authenticator_id = existing_authenticator.get('id')
configs_match, differences = self._authenticator_configs_match(existing_authenticator, gateway_config, ignore_keys)
if configs_match:
self._write_output(f'⚠ Authenticator already exists with matching configuration (ID: {authenticator_id})', 'warning')
# Store the existing result for mapper creation
config['gateway_authenticator_id'] = authenticator_id
config['gateway_authenticator'] = existing_authenticator
return {'success': True, 'action': 'skipped', 'error': None}
else:
self._write_output(f'⚠ Authenticator exists but configuration differs (ID: {authenticator_id})', 'warning')
self._write_output(' Configuration comparison:')
# Log differences between the existing and the new configuration in case of an update
for difference in differences:
self._write_output(f' {difference}')
# Update the existing authenticator
self._write_output(' Updating authenticator with new configuration...')
try:
# Don't include the slug in the update since it shouldn't change
update_config = gateway_config.copy()
if 'slug' in update_config:
del update_config['slug']
result = self.gateway_client.update_authenticator(authenticator_id, update_config)
self._write_output(f'✓ Successfully updated authenticator with ID: {authenticator_id}', 'success')
# Store the updated result for mapper creation
config['gateway_authenticator_id'] = authenticator_id
config['gateway_authenticator'] = result
return {'success': True, 'action': 'updated', 'error': None}
except GatewayAPIError as e:
self._write_output(f'✗ Failed to update authenticator: {e.message}', 'error')
if e.response_data:
self._write_output(f' Details: {e.response_data}', 'error')
return {'success': False, 'action': 'update_failed', 'error': e.message}
else:
# Authenticator doesn't exist, create it
self._write_output('Creating new authenticator...')
# Create the authenticator
result = self.gateway_client.create_authenticator(gateway_config)
self._write_output(f'✓ Successfully created authenticator with ID: {result.get("id")}', 'success')
# Store the result for potential mapper creation later
config['gateway_authenticator_id'] = result.get('id')
config['gateway_authenticator'] = result
return {'success': True, 'action': 'created', 'error': None}
except GatewayAPIError as e:
self._write_output(f'✗ Failed to submit authenticator: {e.message}', 'error')
if e.response_data:
self._write_output(f' Details: {e.response_data}', 'error')
return {'success': False, 'action': 'failed', 'error': e.message}
except Exception as e:
self._write_output(f'✗ Unexpected error submitting authenticator: {str(e)}', 'error')
return {'success': False, 'action': 'failed', 'error': str(e)}
def _authenticator_configs_match(self, existing_auth, new_config, ignore_keys=[]):
"""
Compare existing authenticator configuration with new configuration.
Args:
existing_auth: Existing authenticator data from Gateway
new_config: New authenticator configuration to be created
ignore_keys: List of configuration keys to ignore during comparison
(e.g., ['CALLBACK_URL'] for auto-generated fields)
Returns:
bool: True if configurations match, False otherwise
"""
# Add encrypted fields to ignore_keys if force flag is not set
# This prevents secrets from being updated unless explicitly forced
effective_ignore_keys = ignore_keys.copy()
if not self.force:
effective_ignore_keys.extend(self.encrypted_fields)
# Keep track of the differences between the existing and the new configuration
# Logging them makes debugging much easier
differences = []
if existing_auth.get('name') != new_config.get('name'):
differences.append(f' name: existing="{existing_auth.get("name")}" vs new="{new_config.get("name")}"')
elif existing_auth.get('type') != new_config.get('type'):
differences.append(f' type: existing="{existing_auth.get("type")}" vs new="{new_config.get("type")}"')
elif existing_auth.get('enabled') != new_config.get('enabled'):
differences.append(f' enabled: existing="{existing_auth.get("enabled")}" vs new="{new_config.get("enabled")}"')
elif existing_auth.get('create_objects') != new_config.get('create_objects'):
differences.append(f' create_objects: existing="{existing_auth.get("create_objects")}" vs new="{new_config.get("create_objects")}"')
elif existing_auth.get('remove_users') != new_config.get('remove_users'):
differences.append(f' create_objects: existing="{existing_auth.get("remove_users")}" vs new="{new_config.get("remove_users")}"')
# Compare configuration section
existing_config = existing_auth.get('configuration', {})
new_config_section = new_config.get('configuration', {})
# Helper function to check if a key should be ignored
def should_ignore_key(config_key):
return config_key in effective_ignore_keys
# Check if all keys in new config exist in existing config with same values
for key, value in new_config_section.items():
if should_ignore_key(key):
continue
if key not in existing_config:
differences.append(f' {key}: existing=<missing> vs new="{value}"')
elif existing_config[key] != value:
differences.append(f' {key}: existing="{existing_config.get(key)}" vs new="{value}"')
# Check if existing config has extra keys that new config doesn't have
# (this might indicate configuration drift), but ignore keys in ignore_keys
for key in existing_config:
if should_ignore_key(key):
continue
if key not in new_config_section:
differences.append(f' {key}: existing="{existing_config.get(key)}" vs new=<missing>')
return len(differences) == 0, differences
def _compare_mapper_lists(self, existing_mappers, new_mappers, ignore_keys=None):
"""
Compare existing and new mapper lists to determine which need updates vs creation.
Args:
existing_mappers: List of existing mapper configurations from Gateway
new_mappers: List of new mapper configurations to be created/updated
ignore_keys: List of keys to ignore during comparison (e.g., auto-generated fields)
Returns:
tuple: (mappers_to_update, mappers_to_create)
mappers_to_update: List of tuples (existing_mapper, new_mapper) for updates
mappers_to_create: List of new_mapper configs that don't match any existing
"""
if ignore_keys is None:
ignore_keys = []
mappers_to_update = []
mappers_to_create = []
for new_mapper in new_mappers:
matched_existing = None
# Try to find a matching existing mapper
for existing_mapper in existing_mappers:
if self._mappers_match_structurally(existing_mapper, new_mapper):
matched_existing = existing_mapper
break
if matched_existing:
# Check if the configuration actually differs (ignoring auto-generated fields)
if not self._mapper_configs_match(matched_existing, new_mapper, ignore_keys):
mappers_to_update.append((matched_existing, new_mapper))
# If configs match exactly, no action needed (mapper is up to date)
else:
# No matching existing mapper found, needs to be created
mappers_to_create.append(new_mapper)
return mappers_to_update, mappers_to_create
def _mappers_match_structurally(self, existing_mapper, new_mapper):
"""
Check if two mappers match structurally (same organization, team, map_type, role).
This identifies if they represent the same logical mapping.
Args:
existing_mapper: Existing mapper configuration from Gateway
new_mapper: New mapper configuration
Returns:
bool: True if mappers represent the same logical mapping
"""
# Compare key structural fields that identify the same logical mapper
structural_fields = ['name']
for field in structural_fields:
if existing_mapper.get(field) != new_mapper.get(field):
return False
return True
def _mapper_configs_match(self, existing_mapper, new_mapper, ignore_keys=None):
"""
Compare mapper configurations to check if they are identical.
Args:
existing_mapper: Existing mapper configuration from Gateway
new_mapper: New mapper configuration
ignore_keys: List of keys to ignore during comparison
Returns:
bool: True if configurations match, False otherwise
"""
if ignore_keys is None:
ignore_keys = []
# Helper function to check if a key should be ignored
def should_ignore_key(config_key):
return config_key in ignore_keys
# Compare all mapper fields except ignored ones
all_keys = set(existing_mapper.keys()) | set(new_mapper.keys())
for key in all_keys:
if should_ignore_key(key):
continue
existing_value = existing_mapper.get(key)
new_value = new_mapper.get(key)
if existing_value != new_value:
return False
return True
def _process_gateway_mappers(self, config):
"""Process authenticator mappers in Gateway from AWX config - create or update as needed."""
authenticator_id = config.get('gateway_authenticator_id')
if not authenticator_id:
self._write_output(f'No authenticator ID found for {config["category"]}, skipping mappers', 'error')
return {'created': 0, 'updated': 0, 'failed': 0}
category = config['category']
org_mappers = config.get('org_mappers', [])
team_mappers = config.get('team_mappers', [])
role_mappers = config.get('role_mappers', [])
allow_mappers = config.get('allow_mappers', [])
all_new_mappers = org_mappers + team_mappers + role_mappers + allow_mappers
if len(all_new_mappers) == 0:
self._write_output(f'No mappers to process for {category} authenticator')
return {'created': 0, 'updated': 0, 'failed': 0}
self._write_output(f'\n--- Processing mappers for {category} authenticator (ID: {authenticator_id}) ---')
self._write_output(f'Organization mappers: {len(org_mappers)}')
self._write_output(f'Team mappers: {len(team_mappers)}')
self._write_output(f'Role mappers: {len(role_mappers)}')
self._write_output(f'Allow mappers: {len(allow_mappers)}')
# Get existing mappers from Gateway
try:
existing_mappers = self.gateway_client.get_authenticator_maps(authenticator_id)
except GatewayAPIError as e:
self._write_output(f'Failed to retrieve existing mappers: {e.message}', 'error')
return {'created': 0, 'updated': 0, 'failed': len(all_new_mappers)}
# Define mapper-specific ignore keys (can be overridden by subclasses)
ignore_keys = self._get_mapper_ignore_keys()
# Compare existing vs new mappers
mappers_to_update, mappers_to_create = self._compare_mapper_lists(existing_mappers, all_new_mappers, ignore_keys)
self._write_output(f'Mappers to create: {len(mappers_to_create)}')
self._write_output(f'Mappers to update: {len(mappers_to_update)}')
created_count = 0
updated_count = 0
failed_count = 0
# Process updates
for existing_mapper, new_mapper in mappers_to_update:
if self._update_single_mapper(existing_mapper, new_mapper):
updated_count += 1
else:
failed_count += 1
# Process creations
for new_mapper in mappers_to_create:
mapper_type = new_mapper.get('map_type', 'unknown')
if self._create_single_mapper(authenticator_id, new_mapper, mapper_type):
created_count += 1
else:
failed_count += 1
# Summary
self._write_output(f'Mappers created: {created_count}, updated: {updated_count}, failed: {failed_count}')
return {'created': created_count, 'updated': updated_count, 'failed': failed_count}
def _get_mapper_ignore_keys(self):
"""
Get list of mapper keys to ignore during comparison.
Can be overridden by subclasses for mapper-specific ignore keys.
Returns:
list: List of keys to ignore (e.g., auto-generated fields)
"""
return ['id', 'authenticator', 'created', 'modified', 'summary_fields', 'modified_by', 'created_by', 'related', 'url']
def _update_single_mapper(self, existing_mapper, new_mapper):
"""Update a single mapper in Gateway.
Args:
existing_mapper: Existing mapper data from Gateway
new_mapper: New mapper configuration to update to
Returns:
bool: True if mapper was updated successfully, False otherwise
"""
try:
mapper_id = existing_mapper.get('id')
if not mapper_id:
self._write_output(' ✗ Existing mapper missing ID, cannot update', 'error')
return False
# Prepare update config - don't include fields that shouldn't be updated
update_config = new_mapper.copy()
# Remove fields that shouldn't be updated (read-only or auto-generated)
fields_to_remove = ['id', 'authenticator', 'created', 'modified']
for field in fields_to_remove:
update_config.pop(field, None)
# Update the mapper
self.gateway_client.update_authenticator_map(mapper_id, update_config)
mapper_name = new_mapper.get('name', 'Unknown')
self._write_output(f' ✓ Updated mapper: {mapper_name}', 'success')
return True
except GatewayAPIError as e:
mapper_name = new_mapper.get('name', 'Unknown')
self._write_output(f' ✗ Failed to update mapper "{mapper_name}": {e.message}', 'error')
if e.response_data:
self._write_output(f' Details: {e.response_data}', 'error')
return False
except Exception as e:
mapper_name = new_mapper.get('name', 'Unknown')
self._write_output(f' ✗ Unexpected error updating mapper "{mapper_name}": {str(e)}', 'error')
return False
def _create_single_mapper(self, authenticator_id, mapper_config, mapper_type):
"""Create a single mapper in Gateway."""
try:
# Update the mapper config with the correct authenticator ID
mapper_config = mapper_config.copy() # Don't modify the original
mapper_config['authenticator'] = authenticator_id
# Create the mapper
self.gateway_client.create_authenticator_map(authenticator_id, mapper_config)
mapper_name = mapper_config.get('name', 'Unknown')
self._write_output(f' ✓ Created {mapper_type} mapper: {mapper_name}', 'success')
return True
except GatewayAPIError as e:
mapper_name = mapper_config.get('name', 'Unknown')
self._write_output(f' ✗ Failed to create {mapper_type} mapper "{mapper_name}": {e.message}', 'error')
if e.response_data:
self._write_output(f' Details: {e.response_data}', 'error')
return False
except Exception as e:
mapper_name = mapper_config.get('name', 'Unknown')
self._write_output(f' ✗ Unexpected error creating {mapper_type} mapper "{mapper_name}": {str(e)}', 'error')
return False
def get_social_org_map(self, authenticator_setting_name=None):
"""
Get social auth organization map with fallback to global setting.
Args:
authenticator_setting_name: Name of the authenticator-specific organization map setting
(e.g., 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP')
Returns:
dict: Organization mapping configuration, with fallback to global setting
"""
# Try authenticator-specific setting first
if authenticator_setting_name:
if authenticator_map := getattr(settings, authenticator_setting_name, None):
return authenticator_map
# Fall back to global setting
global_map = getattr(settings, 'SOCIAL_AUTH_ORGANIZATION_MAP', {})
return global_map
def get_social_team_map(self, authenticator_setting_name=None):
"""
Get social auth team map with fallback to global setting.
Args:
authenticator_setting_name: Name of the authenticator-specific team map setting
(e.g., 'SOCIAL_AUTH_GITHUB_TEAM_MAP')
Returns:
dict: Team mapping configuration, with fallback to global setting
"""
# Try authenticator-specific setting first
if authenticator_setting_name:
if authenticator_map := getattr(settings, authenticator_setting_name, None):
return authenticator_map
# Fall back to global setting
global_map = getattr(settings, 'SOCIAL_AUTH_TEAM_MAP', {})
return global_map
def handle_login_override(self, config, valid_login_urls):
"""
Handle LOGIN_REDIRECT_OVERRIDE setting for this authenticator.
This method checks if the login_redirect_override from the config matches
any of the provided valid_login_urls. If it matches, it updates the
LOGIN_REDIRECT_OVERRIDE setting in Gateway with the new authenticator's
URL and sets the class flag to indicate it was handled.
Args:
config: Configuration dictionary containing:
- login_redirect_override: The current LOGIN_REDIRECT_OVERRIDE value
- gateway_authenticator: The created/updated authenticator info
valid_login_urls: List of URL patterns to match against
"""
# Check if another migrator has already handled login redirect override
if BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator:
raise RuntimeError("LOGIN_REDIRECT_OVERRIDE has already been handled by another migrator")
login_redirect_override = config.get('login_redirect_override')
if not login_redirect_override:
return
# Check if the login_redirect_override matches any of the provided valid URLs
url_matches = False
parsed_redirect = urlparse(login_redirect_override)
self.redirect_query_dict = parse_qs(parsed_redirect.query, keep_blank_values=True) if parsed_redirect.query else {}
for valid_url in valid_login_urls:
parsed_valid = urlparse(valid_url)
# Compare path: redirect path should match or contain the valid path at proper boundaries
if parsed_redirect.path == parsed_valid.path:
path_matches = True
elif parsed_redirect.path.startswith(parsed_valid.path):
# Ensure the match is at a path boundary (followed by '/' or end of string)
next_char_pos = len(parsed_valid.path)
if next_char_pos >= len(parsed_redirect.path) or parsed_redirect.path[next_char_pos] in ['/', '?']:
path_matches = True
else:
path_matches = False
else:
path_matches = False
# Compare query: if valid URL has query params, they should be present in redirect URL
query_matches = True
if parsed_valid.query:
# Parse query parameters for both URLs
valid_params = parse_qs(parsed_valid.query, keep_blank_values=True)
# All valid URL query params must be present in redirect URL with same values
query_matches = all(param in self.redirect_query_dict and self.redirect_query_dict[param] == values for param, values in valid_params.items())
if path_matches and query_matches:
url_matches = True
break
if not url_matches:
return
# Extract the created authenticator from config
gateway_authenticator = config.get('gateway_authenticator')
if not gateway_authenticator:
return
sso_login_url = gateway_authenticator.get('sso_login_url')
if not sso_login_url:
return
# Compute the new LOGIN_REDIRECT_OVERRIDE URL with the Gateway URL
gateway_base_url = self.gateway_client.get_base_url()
parsed_sso = urlparse(sso_login_url)
parsed_gw = urlparse(gateway_base_url)
updated_query = self._updated_query_string(parsed_sso)
complete_url = parsed_redirect._replace(scheme=parsed_gw.scheme, path=parsed_sso.path, netloc=parsed_gw.netloc, query=updated_query).geturl()
self._write_output(f'LOGIN_REDIRECT_OVERRIDE will be updated to: {complete_url}')
# Store the new URL in class variable for settings migrator to use
BaseAuthenticatorMigrator.login_redirect_override_new_url = complete_url
# Set the class-level flag to indicate LOGIN_REDIRECT_OVERRIDE was handled by a migrator
BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator = True
def _updated_query_string(self, parsed_sso):
if parsed_sso.query:
parsed_sso_dict = parse_qs(parsed_sso.query, keep_blank_values=True)
else:
parsed_sso_dict = {}
result = {}
for k, v in self.redirect_query_dict.items():
if k in self.KEYS_TO_PRESERVE and k in parsed_sso_dict:
v = parsed_sso_dict[k]
if isinstance(v, list) and len(v) == 1:
result[k] = v[0]
else:
result[k] = v
return urlencode(result, doseq=True) if result else ""
def _write_output(self, message, style=None):
"""Write output message if command is available."""
if self.command:
if style == 'success':
self.command.stdout.write(self.command.style.SUCCESS(message))
elif style == 'warning':
self.command.stdout.write(self.command.style.WARNING(message))
elif style == 'error':
self.command.stdout.write(self.command.style.ERROR(message))
else:
self.command.stdout.write(message)

View File

@@ -1,217 +0,0 @@
"""
GitHub authenticator migrator.
This module handles the migration of GitHub authenticators from AWX to Gateway.
"""
from django.conf import settings
from awx.conf import settings_registry
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
import re
class GitHubMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of GitHub authenticators from AWX to Gateway.
"""
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "GitHub"
def get_controller_config(self):
"""
Export all GitHub authenticators. A GitHub authenticator is only exported if both,
id and secret, are defined. Otherwise it will be skipped.
Returns:
list: List of configured GitHub authentication providers with their settings
"""
github_categories = ['github', 'github-org', 'github-team', 'github-enterprise', 'github-enterprise-org', 'github-enterprise-team']
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
found_configs = []
for category in github_categories:
try:
category_settings = settings_registry.get_registered_settings(category_slug=category)
if category_settings:
config_data = {}
key_setting = None
secret_setting = None
# Ensure category_settings is iterable and contains strings
if isinstance(category_settings, re.Pattern) or not hasattr(category_settings, '__iter__') or isinstance(category_settings, str):
continue
for setting_name in category_settings:
# Skip if setting_name is not a string (e.g., regex pattern)
if not isinstance(setting_name, str):
continue
if setting_name.endswith('_KEY'):
key_setting = setting_name
elif setting_name.endswith('_SECRET'):
secret_setting = setting_name
# Skip this category if KEY or SECRET is missing or empty
if not key_setting or not secret_setting:
continue
key_value = getattr(settings, key_setting, None)
secret_value = getattr(settings, secret_setting, None)
# Skip this category if OIDC Key and/or Secret are not configured
if not key_value or not secret_value:
continue
# If we have both key and secret, collect all settings
org_map_setting_name = None
team_map_setting_name = None
for setting_name in category_settings:
# Skip if setting_name is not a string (e.g., regex pattern)
if not isinstance(setting_name, str):
continue
value = getattr(settings, setting_name, None)
config_data[setting_name] = value
# Capture org and team map setting names for special processing
if setting_name.endswith('_ORGANIZATION_MAP'):
org_map_setting_name = setting_name
elif setting_name.endswith('_TEAM_MAP'):
team_map_setting_name = setting_name
# Get org and team mappings using the new fallback functions
org_map_value = self.get_social_org_map(org_map_setting_name) if org_map_setting_name else {}
team_map_value = self.get_social_team_map(team_map_setting_name) if team_map_setting_name else {}
# Convert GitHub org and team mappings from AWX to the Gateway format
# Start with order 1 and maintain sequence across both org and team mappers
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1)
team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order)
found_configs.append(
{
'category': category,
'settings': config_data,
'org_mappers': org_mappers,
'team_mappers': team_mappers,
'login_redirect_override': login_redirect_override,
}
)
except Exception as e:
raise Exception(f'Could not retrieve {category} settings: {str(e)}')
return found_configs
def create_gateway_authenticator(self, config):
"""Create a GitHub/OIDC authenticator in Gateway."""
category = config['category']
settings = config['settings']
# Extract the OAuth2 credentials
key_value = None
secret_value = None
for setting_name, value in settings.items():
if setting_name.endswith('_KEY') and value:
key_value = value
elif setting_name.endswith('_SECRET') and value:
secret_value = value
if not key_value or not secret_value:
self._write_output(f'Skipping {category}: missing OAuth2 credentials', 'warning')
return {'success': False, 'action': 'skipped', 'error': 'Missing OAuth2 credentials'}
# Generate authenticator name and slug
authenticator_name = category
authenticator_slug = self._generate_authenticator_slug('github', category)
# Map AWX category to Gateway authenticator type
type_mapping = {
'github': 'ansible_base.authentication.authenticator_plugins.github',
'github-org': 'ansible_base.authentication.authenticator_plugins.github_org',
'github-team': 'ansible_base.authentication.authenticator_plugins.github_team',
'github-enterprise': 'ansible_base.authentication.authenticator_plugins.github_enterprise',
'github-enterprise-org': 'ansible_base.authentication.authenticator_plugins.github_enterprise_org',
'github-enterprise-team': 'ansible_base.authentication.authenticator_plugins.github_enterprise_team',
}
authenticator_type = type_mapping.get(category)
if not authenticator_type:
self._write_output(f'Unknown category {category}, skipping', 'warning')
return {'success': False, 'action': 'skipped', 'error': f'Unknown category {category}'}
self._write_output(f'\n--- Processing {category} authenticator ---')
self._write_output(f'Name: {authenticator_name}')
self._write_output(f'Slug: {authenticator_slug}')
self._write_output(f'Type: {authenticator_type}')
self._write_output(f'Client ID: {key_value}')
self._write_output(f'Client Secret: {"*" * 8}')
# Build Gateway authenticator configuration
gateway_config = {
"name": authenticator_name,
"slug": authenticator_slug,
"type": authenticator_type,
"enabled": False,
"create_objects": True, # Allow Gateway to create users/orgs/teams
"remove_users": False, # Don't remove users by default
"configuration": {"KEY": key_value, "SECRET": secret_value},
}
# Add any additional configuration based on AWX settings
additional_config = self._build_additional_config(category, settings)
gateway_config["configuration"].update(additional_config)
# GitHub authenticators have auto-generated fields that should be ignored during comparison
# CALLBACK_URL - automatically created by Gateway
# SCOPE - relevant for mappers with team/org requirement, allows to read the org or team
# SECRET - the secret is encrypted in Gateway, we have no way of comparing the decrypted value
ignore_keys = ['CALLBACK_URL', 'SCOPE']
# Submit the authenticator (create or update as needed)
result = self.submit_authenticator(gateway_config, ignore_keys, config)
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
valid_login_urls = [f'/sso/login/{category}', f'/sso/login/{category}/']
self.handle_login_override(config, valid_login_urls)
return result
def _build_additional_config(self, category, settings):
"""Build additional configuration for specific authenticator types."""
additional_config = {}
# Add scope configuration if present
for setting_name, value in settings.items():
if setting_name.endswith('_SCOPE') and value:
additional_config['SCOPE'] = value
break
# Add GitHub Enterprise URL if present
if 'enterprise' in category:
for setting_name, value in settings.items():
if setting_name.endswith('_API_URL') and value:
additional_config['API_URL'] = value
elif setting_name.endswith('_URL') and value:
additional_config['URL'] = value
# Add organization name for org-specific authenticators
if 'org' in category:
for setting_name, value in settings.items():
if setting_name.endswith('_NAME') and value:
additional_config['NAME'] = value
break
# Add team ID for team-specific authenticators
if 'team' in category:
for setting_name, value in settings.items():
if setting_name.endswith('_ID') and value:
additional_config['ID'] = value
break
return additional_config

View File

@@ -1,102 +0,0 @@
"""
Google OAuth2 authenticator migrator.
This module handles the migration of Google OAuth2 authenticators from AWX to Gateway.
"""
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
class GoogleOAuth2Migrator(BaseAuthenticatorMigrator):
"""
Handles the migration of Google OAuth2 authenticators from AWX to Gateway.
"""
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "Google OAuth2"
def get_controller_config(self):
"""
Export Google OAuth2 authenticators. A Google OAuth2 authenticator is only exported if
KEY and SECRET are configured.
Returns:
list: List of configured Google OAuth2 authentication providers with their settings
"""
from django.conf import settings
if not getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None):
return []
config_data = {
'SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL,
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY,
'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET,
'SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE,
}
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
return [
{
"category": self.get_authenticator_type(),
"settings": config_data,
"login_redirect_override": login_redirect_override,
}
]
def _build_mappers(self):
org_map = self.get_social_org_map('SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP')
team_map = self.get_social_team_map('SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP')
mappers, order = org_map_to_gateway_format(org_map, 1)
team_mappers, _ = team_map_to_gateway_format(team_map, order)
mappers.extend(team_mappers)
return mappers
def create_gateway_authenticator(self, config):
"""Create a Google OAuth2 authenticator in Gateway."""
category = config["category"]
config_settings = config['settings']
authenticator_slug = self._generate_authenticator_slug('google-oauth2', category.replace(" ", "-"))
self._write_output(f"\n--- Processing {category} authenticator ---")
gateway_config = {
"name": "google",
"slug": authenticator_slug,
"type": "ansible_base.authentication.authenticator_plugins.google_oauth2",
"enabled": False,
"create_objects": True, # Allow Gateway to create users/orgs/teams
"remove_users": False, # Don't remove users by default
"configuration": {
"KEY": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'),
"SECRET": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'),
"REDIRECT_STATE": True,
},
"mappers": self._build_mappers(),
}
ignore_keys = ["ACCESS_TOKEN_METHOD", "REVOKE_TOKEN_METHOD"]
optional = {
"CALLBACK_URL": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL'),
"SCOPE": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE'),
}
for key, value in optional.items():
if value:
gateway_config["configuration"][key] = value
else:
ignore_keys.append(key)
result = self.submit_authenticator(gateway_config, ignore_keys, config)
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
valid_login_urls = ['/sso/login/google-oauth2']
self.handle_login_override(config, valid_login_urls)
return result

View File

@@ -1,368 +0,0 @@
"""
LDAP authenticator migrator.
This module handles the migration of LDAP authenticators from AWX to Gateway.
"""
from django.conf import settings
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format, role_map_to_gateway_format
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
import ldap
class LDAPMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of LDAP authenticators from AWX to Gateway.
"""
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "LDAP"
def get_controller_config(self):
"""
Export all LDAP authenticators. An LDAP authenticator is only exported if
SERVER_URI is configured. Otherwise it will be skipped.
Returns:
list: List of configured LDAP authentication providers with their settings
"""
# AWX supports up to 6 LDAP configurations: AUTH_LDAP (default) and AUTH_LDAP_1 through AUTH_LDAP_5
ldap_instances = [None, 1, 2, 3, 4, 5] # None represents the default AUTH_LDAP_ configuration
found_configs = []
for instance in ldap_instances:
# Build the prefix for this LDAP instance
prefix = f"AUTH_LDAP_{instance}_" if instance is not None else "AUTH_LDAP_"
# The authenticator category is always "ldap"
category = "ldap"
try:
# Get all LDAP settings for this instance
config_data = self._get_ldap_instance_config(prefix)
except Exception as e:
raise Exception(f'Could not retrieve {category} settings: {str(e)}')
# Skip if SERVER_URI is not configured (required for LDAP to function)
if not config_data.get('SERVER_URI'):
continue
# Convert organization, team, and role mappings to Gateway format
org_map_value = config_data.get('ORGANIZATION_MAP', {})
team_map_value = config_data.get('TEAM_MAP', {})
role_map_value = config_data.get('USER_FLAGS_BY_GROUP', {})
require_group_value = config_data.get('REQUIRE_GROUP', {})
deny_group_value = config_data.get('DENY_GROUP', {})
allow_mappers = []
# Start with order 1 and maintain sequence across org, team, and role mappers
allow_mappers, next_order = self._ldap_group_allow_to_gateway_format(allow_mappers, deny_group_value, deny=True, start_order=1)
allow_mappers, next_order = self._ldap_group_allow_to_gateway_format(allow_mappers, require_group_value, deny=False, start_order=next_order)
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=next_order, auth_type='ldap')
team_mappers, next_order = team_map_to_gateway_format(team_map_value, start_order=next_order, auth_type='ldap')
role_mappers, _ = role_map_to_gateway_format(role_map_value, start_order=next_order)
found_configs.append(
{
'category': category,
'settings': config_data,
'org_mappers': org_mappers,
'team_mappers': team_mappers,
'role_mappers': role_mappers,
'allow_mappers': allow_mappers,
}
)
return found_configs
def _get_ldap_instance_config(self, prefix):
"""
Get all LDAP configuration settings for a specific instance.
Args:
prefix: The setting prefix (e.g., 'AUTH_LDAP_' or 'AUTH_LDAP_1_')
Returns:
dict: Dictionary of LDAP configuration settings
"""
# Define all LDAP setting keys
ldap_keys = [
'SERVER_URI', # Required: LDAP server URI(s)
'BIND_DN', # Optional: Bind DN for authentication
'BIND_PASSWORD', # Optional: Bind password
'START_TLS', # Optional: Enable TLS
'CONNECTION_OPTIONS', # Optional: LDAP connection options
'USER_SEARCH', # Optional: User search configuration
'USER_DN_TEMPLATE', # Optional: User DN template
'USER_ATTR_MAP', # Optional: User attribute mapping
'GROUP_SEARCH', # Optional: Group search configuration
'GROUP_TYPE', # Optional: Group type class
'GROUP_TYPE_PARAMS', # Optional: Group type parameters
'REQUIRE_GROUP', # Optional: Required group DN
'DENY_GROUP', # Optional: Denied group DN
'USER_FLAGS_BY_GROUP', # Optional: User flags mapping
'ORGANIZATION_MAP', # Optional: Organization mapping
'TEAM_MAP', # Optional: Team mapping
]
config_data = {}
for key in ldap_keys:
setting_name = f"{prefix}{key}"
value = getattr(settings, setting_name, None)
# Handle special field types that need conversion
if key == 'GROUP_TYPE' and value:
# Convert GROUP_TYPE class to string representation
config_data[key] = type(value).__name__
elif key == 'SERVER_URI' and value:
# Convert SERVER_URI to list format if it's a comma-separated string
config_data[key] = [uri.strip() for uri in value.split(',')]
elif key in ['USER_SEARCH', 'GROUP_SEARCH'] and value:
# Convert LDAPSearch objects to list format [base_dn, scope, filter]
if hasattr(value, 'base_dn') and hasattr(value, 'filterstr'):
# Get the actual scope instead of hardcoding SCOPE_SUBTREE
scope = getattr(value, 'scope', ldap.SCOPE_SUBTREE) # 2 is SCOPE_SUBTREE default
scope_name = {ldap.SCOPE_BASE: 'SCOPE_BASE', ldap.SCOPE_ONELEVEL: 'SCOPE_ONELEVEL', ldap.SCOPE_SUBTREE: 'SCOPE_SUBTREE'}.get(
scope, 'SCOPE_SUBTREE'
)
config_data[key] = [value.base_dn, scope_name, value.filterstr]
else:
config_data[key] = value
elif key in ['USER_ATTR_MAP', 'GROUP_TYPE_PARAMS', 'USER_FLAGS_BY_GROUP', 'ORGANIZATION_MAP', 'TEAM_MAP']:
# Ensure dict fields are properly handled
config_data[key] = value if value is not None else {}
elif key == 'CONNECTION_OPTIONS' and value:
# CONNECTION_OPTIONS is a dict of LDAP options
config_data[key] = value if value is not None else {}
else:
# Store the value as-is for other fields
config_data[key] = value
return config_data
def create_gateway_authenticator(self, config):
"""Create an LDAP authenticator in Gateway."""
category = config['category']
settings = config['settings']
# Extract the first server URI for slug generation
authenticator_slug = self._generate_authenticator_slug('ldap', category)
# Build the gateway payload
gateway_config = {
'name': category,
'slug': authenticator_slug,
'type': 'ansible_base.authentication.authenticator_plugins.ldap',
'create_objects': True,
'remove_users': False,
'enabled': True,
'configuration': self._build_ldap_configuration(settings),
}
self._write_output(f'Creating LDAP authenticator: {gateway_config["name"]}')
# LDAP authenticators have auto-generated fields that should be ignored during comparison
# BIND_PASSWORD - encrypted value, can't be compared
ignore_keys = []
# Submit the authenticator using the base class method
return self.submit_authenticator(gateway_config, config=config, ignore_keys=ignore_keys)
def _build_ldap_configuration(self, settings):
"""Build the LDAP configuration section for Gateway."""
config = {}
# Server URI is required
if settings.get('SERVER_URI'):
config['SERVER_URI'] = settings['SERVER_URI']
# Authentication settings
if settings.get('BIND_DN'):
config['BIND_DN'] = settings['BIND_DN']
if settings.get('BIND_PASSWORD'):
config['BIND_PASSWORD'] = settings['BIND_PASSWORD']
# TLS settings
if settings.get('START_TLS') is not None:
config['START_TLS'] = settings['START_TLS']
# User search configuration
if settings.get('USER_SEARCH'):
config['USER_SEARCH'] = settings['USER_SEARCH']
# User attribute mapping
if settings.get('USER_ATTR_MAP'):
config['USER_ATTR_MAP'] = settings['USER_ATTR_MAP']
# Group search configuration
if settings.get('GROUP_SEARCH'):
config['GROUP_SEARCH'] = settings['GROUP_SEARCH']
# Group type and parameters
if settings.get('GROUP_TYPE'):
config['GROUP_TYPE'] = settings['GROUP_TYPE']
if settings.get('GROUP_TYPE_PARAMS'):
config['GROUP_TYPE_PARAMS'] = settings['GROUP_TYPE_PARAMS']
# Connection options - convert numeric LDAP constants to string keys
if settings.get('CONNECTION_OPTIONS'):
config['CONNECTION_OPTIONS'] = self._convert_ldap_connection_options(settings['CONNECTION_OPTIONS'])
# User DN template
if settings.get('USER_DN_TEMPLATE'):
config['USER_DN_TEMPLATE'] = settings['USER_DN_TEMPLATE']
# REQUIRE_GROUP and DENY_GROUP are handled as allow mappers, not included in config
# USER_FLAGS_BY_GROUP is handled as role mappers, not included in config
return config
def _convert_ldap_connection_options(self, connection_options):
"""
Convert numeric LDAP connection option constants to their string representations.
Uses the actual constants from the python-ldap library.
Args:
connection_options: Dictionary with numeric LDAP option keys
Returns:
dict: Dictionary with string LDAP option keys
"""
# Comprehensive mapping using LDAP constants as keys
ldap_option_map = {
# Basic LDAP options
ldap.OPT_API_INFO: 'OPT_API_INFO',
ldap.OPT_DEREF: 'OPT_DEREF',
ldap.OPT_SIZELIMIT: 'OPT_SIZELIMIT',
ldap.OPT_TIMELIMIT: 'OPT_TIMELIMIT',
ldap.OPT_REFERRALS: 'OPT_REFERRALS',
ldap.OPT_RESULT_CODE: 'OPT_RESULT_CODE',
ldap.OPT_ERROR_NUMBER: 'OPT_ERROR_NUMBER',
ldap.OPT_RESTART: 'OPT_RESTART',
ldap.OPT_PROTOCOL_VERSION: 'OPT_PROTOCOL_VERSION',
ldap.OPT_SERVER_CONTROLS: 'OPT_SERVER_CONTROLS',
ldap.OPT_CLIENT_CONTROLS: 'OPT_CLIENT_CONTROLS',
ldap.OPT_API_FEATURE_INFO: 'OPT_API_FEATURE_INFO',
ldap.OPT_HOST_NAME: 'OPT_HOST_NAME',
ldap.OPT_DESC: 'OPT_DESC',
ldap.OPT_DIAGNOSTIC_MESSAGE: 'OPT_DIAGNOSTIC_MESSAGE',
ldap.OPT_ERROR_STRING: 'OPT_ERROR_STRING',
ldap.OPT_MATCHED_DN: 'OPT_MATCHED_DN',
ldap.OPT_DEBUG_LEVEL: 'OPT_DEBUG_LEVEL',
ldap.OPT_TIMEOUT: 'OPT_TIMEOUT',
ldap.OPT_REFHOPLIMIT: 'OPT_REFHOPLIMIT',
ldap.OPT_NETWORK_TIMEOUT: 'OPT_NETWORK_TIMEOUT',
ldap.OPT_URI: 'OPT_URI',
# TLS options
ldap.OPT_X_TLS: 'OPT_X_TLS',
ldap.OPT_X_TLS_CTX: 'OPT_X_TLS_CTX',
ldap.OPT_X_TLS_CACERTFILE: 'OPT_X_TLS_CACERTFILE',
ldap.OPT_X_TLS_CACERTDIR: 'OPT_X_TLS_CACERTDIR',
ldap.OPT_X_TLS_CERTFILE: 'OPT_X_TLS_CERTFILE',
ldap.OPT_X_TLS_KEYFILE: 'OPT_X_TLS_KEYFILE',
ldap.OPT_X_TLS_REQUIRE_CERT: 'OPT_X_TLS_REQUIRE_CERT',
ldap.OPT_X_TLS_CIPHER_SUITE: 'OPT_X_TLS_CIPHER_SUITE',
ldap.OPT_X_TLS_RANDOM_FILE: 'OPT_X_TLS_RANDOM_FILE',
ldap.OPT_X_TLS_DHFILE: 'OPT_X_TLS_DHFILE',
ldap.OPT_X_TLS_NEVER: 'OPT_X_TLS_NEVER',
ldap.OPT_X_TLS_HARD: 'OPT_X_TLS_HARD',
ldap.OPT_X_TLS_DEMAND: 'OPT_X_TLS_DEMAND',
ldap.OPT_X_TLS_ALLOW: 'OPT_X_TLS_ALLOW',
ldap.OPT_X_TLS_TRY: 'OPT_X_TLS_TRY',
ldap.OPT_X_TLS_CRL_NONE: 'OPT_X_TLS_CRL_NONE',
ldap.OPT_X_TLS_CRL_PEER: 'OPT_X_TLS_CRL_PEER',
ldap.OPT_X_TLS_CRL_ALL: 'OPT_X_TLS_CRL_ALL',
# SASL options
ldap.OPT_X_SASL_MECH: 'OPT_X_SASL_MECH',
ldap.OPT_X_SASL_REALM: 'OPT_X_SASL_REALM',
ldap.OPT_X_SASL_AUTHCID: 'OPT_X_SASL_AUTHCID',
ldap.OPT_X_SASL_AUTHZID: 'OPT_X_SASL_AUTHZID',
ldap.OPT_X_SASL_SSF: 'OPT_X_SASL_SSF',
ldap.OPT_X_SASL_SSF_EXTERNAL: 'OPT_X_SASL_SSF_EXTERNAL',
ldap.OPT_X_SASL_SECPROPS: 'OPT_X_SASL_SECPROPS',
ldap.OPT_X_SASL_SSF_MIN: 'OPT_X_SASL_SSF_MIN',
ldap.OPT_X_SASL_SSF_MAX: 'OPT_X_SASL_SSF_MAX',
}
# Add optional options that may not be available in all versions
optional_options = [
'OPT_TCP_USER_TIMEOUT',
'OPT_DEFBASE',
'OPT_X_TLS_VERSION',
'OPT_X_TLS_CIPHER',
'OPT_X_TLS_PEERCERT',
'OPT_X_TLS_CRLCHECK',
'OPT_X_TLS_CRLFILE',
'OPT_X_TLS_NEWCTX',
'OPT_X_TLS_PROTOCOL_MIN',
'OPT_X_TLS_PACKAGE',
'OPT_X_TLS_ECNAME',
'OPT_X_TLS_REQUIRE_SAN',
'OPT_X_TLS_PROTOCOL_MAX',
'OPT_X_TLS_PROTOCOL_SSL3',
'OPT_X_TLS_PROTOCOL_TLS1_0',
'OPT_X_TLS_PROTOCOL_TLS1_1',
'OPT_X_TLS_PROTOCOL_TLS1_2',
'OPT_X_TLS_PROTOCOL_TLS1_3',
'OPT_X_SASL_NOCANON',
'OPT_X_SASL_USERNAME',
'OPT_CONNECT_ASYNC',
'OPT_X_KEEPALIVE_IDLE',
'OPT_X_KEEPALIVE_PROBES',
'OPT_X_KEEPALIVE_INTERVAL',
]
for option_name in optional_options:
if hasattr(ldap, option_name):
ldap_option_map[getattr(ldap, option_name)] = option_name
converted_options = {}
for key, value in connection_options.items():
if key in ldap_option_map:
converted_options[ldap_option_map[key]] = value
return converted_options
def _ldap_group_allow_to_gateway_format(self, result: list, ldap_group: str, deny=False, start_order=1):
"""Convert an LDAP require or deny group to a Gateway mapper
Args:
result: array to append the mapper to
ldap_group: An LDAP group query
deny: Whether the mapper denies or requires users to be in the group
start_order: Starting order value for the mappers
Returns:
tuple: (List of Gateway-compatible organization mappers, next_order)
"""
if ldap_group is None:
return result, start_order
if deny:
result.append(
{
"name": "LDAP-DenyGroup",
"authenticator": -1,
"map_type": "allow",
"revoke": True,
"triggers": {"groups": {"has_or": [ldap_group]}},
"order": start_order,
}
)
else:
result.append(
{
"name": "LDAP-RequireGroup",
"authenticator": -1,
"map_type": "allow",
"revoke": False,
"triggers": {"groups": {"has_and": [ldap_group]}},
"order": start_order,
}
)
return result, start_order + 1

View File

@@ -1,113 +0,0 @@
"""
Generic OIDC authenticator migrator.
This module handles the migration of generic OIDC authenticators from AWX to Gateway.
"""
from django.conf import settings
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
class OIDCMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of generic OIDC authenticators from AWX to Gateway.
"""
CATEGORY = "OIDC"
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.oidc"
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "OIDC"
def get_controller_config(self):
"""
Export generic OIDC authenticators. An OIDC authenticator is only exported if both,
key and secret, are defined. Otherwise it will be skipped.
Returns:
list: List of configured OIDC authentication providers with their settings
"""
key_value = getattr(settings, "SOCIAL_AUTH_OIDC_KEY", None)
secret_value = getattr(settings, "SOCIAL_AUTH_OIDC_SECRET", None)
oidc_endpoint = getattr(settings, "SOCIAL_AUTH_OIDC_OIDC_ENDPOINT", None)
# Skip if required settings are not configured
if not key_value or not secret_value or not oidc_endpoint:
return []
# Get additional OIDC configuration
verify_ssl = getattr(settings, "SOCIAL_AUTH_OIDC_VERIFY_SSL", True)
# Get organization and team mappings
org_map_value = self.get_social_org_map()
team_map_value = self.get_social_team_map()
# Convert org and team mappings from AWX to the Gateway format
# Start with order 1 and maintain sequence across both org and team mappers
org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1)
team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order)
config_data = {
"name": "default",
"type": self.AUTH_TYPE,
"enabled": False,
"create_objects": True,
"remove_users": False,
"configuration": {
"OIDC_ENDPOINT": oidc_endpoint,
"KEY": key_value,
"SECRET": secret_value,
"VERIFY_SSL": verify_ssl,
},
}
return [
{
"category": self.CATEGORY,
"settings": config_data,
"org_mappers": org_mappers,
"team_mappers": team_mappers,
}
]
def create_gateway_authenticator(self, config):
"""Create a generic OIDC authenticator in Gateway."""
category = config["category"]
config_settings = config["settings"]
# Generate authenticator name and slug
authenticator_name = "oidc"
authenticator_slug = self._generate_authenticator_slug("oidc", category)
self._write_output(f"\n--- Processing {category} authenticator ---")
self._write_output(f"Name: {authenticator_name}")
self._write_output(f"Slug: {authenticator_slug}")
self._write_output(f"Type: {config_settings['type']}")
# Build Gateway authenticator configuration
gateway_config = {
"name": authenticator_name,
"slug": authenticator_slug,
"type": config_settings["type"],
"enabled": config_settings["enabled"],
"create_objects": config_settings["create_objects"],
"remove_users": config_settings["remove_users"],
"configuration": config_settings["configuration"],
}
# OIDC authenticators have auto-generated fields that should be ignored during comparison
# CALLBACK_URL - automatically created by Gateway
# SCOPE - defaults are set by Gateway plugin
# SECRET - the secret is encrypted in Gateway, we have no way of comparing the decrypted value
ignore_keys = ['CALLBACK_URL', 'SCOPE']
# Submit the authenticator (create or update as needed)
result = self.submit_authenticator(gateway_config, ignore_keys, config)
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
valid_login_urls = ['/sso/login/oidc']
self.handle_login_override(config, valid_login_urls)
return result

View File

@@ -1,85 +0,0 @@
"""
RADIUS authenticator migrator.
This module handles the migration of RADIUS authenticators from AWX to Gateway.
"""
from django.conf import settings
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
class RADIUSMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of RADIUS authenticators from AWX to Gateway.
"""
CATEGORY = "RADIUS"
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.radius"
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "RADIUS"
def get_controller_config(self):
"""
Export RADIUS authenticators. A RADIUS authenticator is only exported if
required configuration is present.
Returns:
list: List of configured RADIUS authentication providers with their settings
"""
server = getattr(settings, "RADIUS_SERVER", None)
if not server:
return []
port = getattr(settings, "RADIUS_PORT", 1812)
secret = getattr(settings, "RADIUS_SECRET", "")
config_data = {
"name": "default",
"type": self.AUTH_TYPE,
"enabled": True,
"create_objects": True,
"remove_users": False,
"configuration": {
"SERVER": server,
"PORT": port,
"SECRET": secret,
},
}
return [
{
"category": self.CATEGORY,
"settings": config_data,
}
]
def create_gateway_authenticator(self, config):
"""Create a RADIUS authenticator in Gateway."""
category = config["category"]
config_settings = config["settings"]
# Generate authenticator name and slug
authenticator_name = "radius"
authenticator_slug = self._generate_authenticator_slug("radius", category)
self._write_output(f"\n--- Processing {category} authenticator ---")
self._write_output(f"Name: {authenticator_name}")
self._write_output(f"Slug: {authenticator_slug}")
self._write_output(f"Type: {config_settings['type']}")
# Build Gateway authenticator configuration
gateway_config = {
"name": authenticator_name,
"slug": authenticator_slug,
"type": config_settings["type"],
"enabled": config_settings["enabled"],
"create_objects": config_settings["create_objects"],
"remove_users": config_settings["remove_users"],
"configuration": config_settings["configuration"],
}
# Submit the authenticator (create or update as needed)
return self.submit_authenticator(gateway_config, config=config)

View File

@@ -1,308 +0,0 @@
"""
SAML authenticator migrator.
This module handles the migration of SAML authenticators from AWX to Gateway.
"""
from django.conf import settings
from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
ROLE_MAPPER = {
"is_superuser_role": {"role": None, "map_type": "is_superuser", "revoke": "remove_superusers"},
"is_system_auditor_role": {"role": "Platform Auditor", "map_type": "role", "revoke": "remove_system_auditors"},
}
ATTRIBUTE_VALUE_MAPPER = {
"is_superuser_attr": {"role": None, "map_type": "is_superuser", "value": "is_superuser_value", "revoke": "remove_superusers"},
"is_system_auditor_attr": {"role": "Platform Auditor", "map_type": "role", "value": "is_system_auditor_value", "revoke": "remove_system_auditors"},
}
ORG_ATTRIBUTE_MAPPER = {
"saml_attr": {"role": "Organization Member", "revoke": "remove"},
"saml_admin_attr": {"role": "Organization Admin", "revoke": "remove_admins"},
}
def _split_chunks(data: str, length: int = 64) -> list[str]:
return [data[i : i + length] for i in range(0, len(data), length)]
def _to_pem_cert(data: str) -> list[str]:
items = ["-----BEGIN CERTIFICATE-----"]
items += _split_chunks(data)
items.append("-----END CERTIFICATE-----")
return items
class SAMLMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of SAML authenticators from AWX to Gateway.
"""
CATEGORY = "SAML"
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.saml"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.next_order = 1
self.team_mappers = []
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "SAML"
def get_controller_config(self):
"""
Export SAML authenticators. A SAML authenticator is only exported if
required configuration is present.
Returns:
list: List of configured SAML authentication providers with their settings
"""
found_configs = []
enabled = False
remove_users = True
create_objects = getattr(settings, "SAML_AUTO_CREATE_OBJECTS", True)
idps = getattr(settings, "SOCIAL_AUTH_SAML_ENABLED_IDPS", {})
security_config = getattr(settings, "SOCIAL_AUTH_SAML_SECURITY_CONFIG", {})
# Get org and team mappings using the new fallback functions
org_map_value = self.get_social_org_map("SOCIAL_AUTH_SAML_ORGANIZATION_MAP")
team_map_value = self.get_social_team_map("SOCIAL_AUTH_SAML_TEAM_MAP")
self.extra_data = getattr(settings, "SOCIAL_AUTH_SAML_EXTRA_DATA", [])
self._add_to_extra_data(['Role', 'Role'])
support_contact = getattr(settings, "SOCIAL_AUTH_SAML_SUPPORT_CONTACT", {})
technical_contact = getattr(settings, "SOCIAL_AUTH_SAML_TECHNICAL_CONTACT", {})
org_info = getattr(settings, "SOCIAL_AUTH_SAML_ORG_INFO", {})
sp_private_key = getattr(settings, "SOCIAL_AUTH_SAML_SP_PRIVATE_KEY", None)
sp_public_cert = getattr(settings, "SOCIAL_AUTH_SAML_SP_PUBLIC_CERT", None)
sp_entity_id = getattr(settings, "SOCIAL_AUTH_SAML_SP_ENTITY_ID", None)
sp_extra = getattr(settings, "SOCIAL_AUTH_SAML_SP_EXTRA", {})
saml_team_attr = getattr(settings, "SOCIAL_AUTH_SAML_TEAM_ATTR", {})
org_attr = getattr(settings, "SOCIAL_AUTH_SAML_ORGANIZATION_ATTR", {})
user_flags_by_attr = getattr(settings, "SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR", {})
login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None)
org_mappers, self.next_order = org_map_to_gateway_format(org_map_value, start_order=self.next_order)
self.team_mappers, self.next_order = team_map_to_gateway_format(team_map_value, start_order=self.next_order)
self._team_attr_to_gateway_format(saml_team_attr)
self._user_flags_by_role_to_gateway_format(user_flags_by_attr)
self._user_flags_by_attr_value_to_gateway_format(user_flags_by_attr)
self._org_attr_to_gateway_format(org_attr)
for name, value in idps.items():
config_data = {
"name": name,
"type": self.AUTH_TYPE,
"enabled": enabled,
"create_objects": create_objects,
"remove_users": remove_users,
"configuration": {
"IDP_URL": value.get("url"),
"IDP_X509_CERT": "\n".join(_to_pem_cert(value.get("x509cert"))),
"IDP_ENTITY_ID": value.get("entity_id"),
"IDP_ATTR_EMAIL": value.get("attr_email"),
"IDP_ATTR_USERNAME": value.get("attr_username"),
"IDP_ATTR_FIRST_NAME": value.get("attr_first_name"),
"IDP_ATTR_LAST_NAME": value.get("attr_last_name"),
"IDP_ATTR_USER_PERMANENT_ID": value.get("attr_user_permanent_id"),
"IDP_GROUPS": value.get("attr_groups"),
"SP_ENTITY_ID": sp_entity_id,
"SP_PUBLIC_CERT": sp_public_cert,
"SP_PRIVATE_KEY": sp_private_key,
"ORG_INFO": org_info,
"TECHNICAL_CONTACT": technical_contact,
"SUPPORT_CONTACT": support_contact,
"SECURITY_CONFIG": security_config,
"SP_EXTRA": sp_extra,
"EXTRA_DATA": self.extra_data,
},
}
found_configs.append(
{
"category": self.CATEGORY,
"settings": config_data,
"org_mappers": org_mappers,
"team_mappers": self.team_mappers,
"login_redirect_override": login_redirect_override,
}
)
return found_configs
def create_gateway_authenticator(self, config):
"""Create a SAML authenticator in Gateway."""
category = config["category"]
config_settings = config["settings"]
name = config_settings["name"]
# Generate authenticator name and slug
authenticator_name = f"{category.replace('-', '_').title()}-{name}"
authenticator_slug = self._generate_authenticator_slug("saml", name)
self._write_output(f"\n--- Processing {category} authenticator ---")
self._write_output(f"Name: {authenticator_name}")
self._write_output(f"Slug: {authenticator_slug}")
self._write_output(f"Type: {config_settings['type']}")
# Build Gateway authenticator configuration
gateway_config = {
"name": authenticator_name,
"slug": authenticator_slug,
"type": config_settings["type"],
"enabled": False,
"create_objects": True, # Allow Gateway to create users/orgs/teams
"remove_users": False, # Don't remove users by default
"configuration": config_settings["configuration"],
}
# CALLBACK_URL - automatically created by Gateway
ignore_keys = ["CALLBACK_URL", "SP_PRIVATE_KEY"]
# Submit the authenticator (create or update as needed)
result = self.submit_authenticator(gateway_config, ignore_keys, config)
# Handle LOGIN_REDIRECT_OVERRIDE if applicable
valid_login_urls = [f'/sso/login/saml/?idp={name}', f'/sso/login/saml/?idp={name}/']
self.handle_login_override(config, valid_login_urls)
return result
def _team_attr_to_gateway_format(self, saml_team_attr):
saml_attr = saml_team_attr.get("saml_attr")
if not saml_attr:
return
revoke = saml_team_attr.get('remove', True)
self._add_to_extra_data([saml_attr, saml_attr])
for item in saml_team_attr["team_org_map"]:
team_list = item["team"]
if isinstance(team_list, str):
team_list = [team_list]
team = item.get("team_alias") or item["team"]
self.team_mappers.append(
{
"map_type": "team",
"role": "Team Member",
"organization": item["organization"],
"team": team,
"name": "Team" + "-" + team + "-" + item["organization"],
"revoke": revoke,
"authenticator": -1,
"triggers": {"attributes": {saml_attr: {"in": team_list}, "join_condition": "or"}},
"order": self.next_order,
}
)
self.next_order += 1
def _user_flags_by_role_to_gateway_format(self, user_flags_by_attr):
for k, v in ROLE_MAPPER.items():
if k in user_flags_by_attr:
if v['role']:
name = f"Role-{v['role']}"
else:
name = f"Role-{v['map_type']}"
revoke = user_flags_by_attr.get(v['revoke'], True)
self.team_mappers.append(
{
"map_type": v["map_type"],
"role": v["role"],
"name": name,
"organization": None,
"team": None,
"revoke": revoke,
"order": self.next_order,
"authenticator": -1,
"triggers": {
"attributes": {
"Role": {"in": user_flags_by_attr[k]},
"join_condition": "or",
}
},
}
)
self.next_order += 1
def _user_flags_by_attr_value_to_gateway_format(self, user_flags_by_attr):
for k, v in ATTRIBUTE_VALUE_MAPPER.items():
if k in user_flags_by_attr:
value = user_flags_by_attr.get(v['value'])
if value:
if isinstance(value, list):
value = {'in': value}
else:
value = {'in': [value]}
else:
value = {}
revoke = user_flags_by_attr.get(v['revoke'], True)
attr_name = user_flags_by_attr[k]
self._add_to_extra_data([attr_name, attr_name])
if v['role']:
name = f"Role-{v['role']}-attr"
else:
name = f"Role-{v['map_type']}-attr"
self.team_mappers.append(
{
"map_type": v["map_type"],
"role": v["role"],
"name": name,
"organization": None,
"team": None,
"revoke": revoke,
"order": self.next_order,
"authenticator": -1,
"triggers": {
"attributes": {
attr_name: value,
"join_condition": "or",
}
},
}
)
self.next_order += 1
def _org_attr_to_gateway_format(self, org_attr):
for k, v in ORG_ATTRIBUTE_MAPPER.items():
if k in org_attr:
attr_name = org_attr.get(k)
organization = "{% " + f"for_attr_value('{attr_name}')" + " %}"
revoke = org_attr.get(v['revoke'], True)
self._add_to_extra_data([attr_name, attr_name])
name = f"Role-{v['role']}-attr"
self.team_mappers.append(
{
"map_type": 'organization',
"role": v['role'],
"name": name,
"organization": organization,
"team": None,
"revoke": revoke,
"order": self.next_order,
"authenticator": -1,
"triggers": {
"attributes": {
attr_name: {},
"join_condition": "or",
}
},
}
)
self.next_order += 1
def _add_to_extra_data(self, item: list):
if item not in self.extra_data:
self.extra_data.append(item)

View File

@@ -1,197 +0,0 @@
"""
Settings migrator.
This module handles the migration of AWX settings to Gateway.
"""
from django.conf import settings
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
class SettingsMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of AWX settings to Gateway.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Define transformer functions for each setting
self.setting_transformers = {
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': self._transform_social_auth_username_is_full_email,
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': self._transform_allow_oauth2_for_external_users,
}
def _convert_setting_name(self, setting):
keys = {
"CUSTOM_LOGIN_INFO": "custom_login_info",
"CUSTOM_LOGO": "custom_logo",
}
return keys.get(setting, setting)
def _transform_social_auth_username_is_full_email(self, value):
# SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL is a boolean and does not need to be transformed
return value
def _transform_allow_oauth2_for_external_users(self, value):
# ALLOW_OAUTH2_FOR_EXTERNAL_USERS is a boolean and does not need to be transformed
return value
def get_authenticator_type(self):
"""Get the human-readable authenticator type name."""
return "Settings"
def get_controller_config(self):
"""
Export relevant AWX settings that need to be migrated to Gateway.
Returns:
list: List of configured settings that need to be migrated
"""
# Define settings that should be migrated from AWX to Gateway
settings_to_migrate = [
'SESSION_COOKIE_AGE',
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL',
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
'LOGIN_REDIRECT_OVERRIDE',
'ORG_ADMINS_CAN_SEE_ALL_USERS',
'MANAGE_ORGANIZATION_AUTH',
]
found_configs = []
for setting_name in settings_to_migrate:
# Handle LOGIN_REDIRECT_OVERRIDE specially
if setting_name == 'LOGIN_REDIRECT_OVERRIDE':
if BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator:
# Use the URL computed by the authenticator migrator
setting_value = BaseAuthenticatorMigrator.login_redirect_override_new_url
else:
# Use the original controller setting value
setting_value = getattr(settings, setting_name, None)
else:
setting_value = getattr(settings, setting_name, None)
# Only include settings that have non-None and non-empty values
if setting_value is not None and setting_value != "":
# Apply transformer function if available
transformer = self.setting_transformers.get(setting_name)
if transformer:
setting_value = transformer(setting_value)
# Skip migration if transformer returned None or empty string
if setting_value is not None and setting_value != "":
found_configs.append(
{
'category': 'global-settings',
'setting_name': setting_name,
'setting_value': setting_value,
'org_mappers': [], # Settings don't have mappers
'team_mappers': [], # Settings don't have mappers
'role_mappers': [], # Settings don't have mappers
'allow_mappers': [], # Settings don't have mappers
}
)
else:
self._write_output(f'\nIgnoring {setting_name} because it is None or empty after transformation')
else:
self._write_output(f'\nIgnoring {setting_name} because it is None or empty')
return found_configs
def create_gateway_authenticator(self, config):
"""
Migrate AWX settings to Gateway.
Note: This doesn't create authenticators, but updates Gateway settings.
"""
setting_name = config['setting_name']
setting_value = config['setting_value']
self._write_output(f'\n--- Migrating setting: {setting_name} ---')
try:
gateway_setting_name = self._convert_setting_name(setting_name)
# Get current gateway setting value to check if update is needed
current_gateway_value = self.gateway_client.get_gateway_setting(gateway_setting_name)
# Compare current gateway value with controller value
if current_gateway_value == setting_value:
self._write_output(f'↷ Setting unchanged: {setting_name} (value already matches)', 'warning')
return {'success': True, 'action': 'skipped', 'error': None}
self._write_output(f'Current value: {current_gateway_value}')
self._write_output(f'New value: {setting_value}')
# Use the new update_gateway_setting method
self.gateway_client.update_gateway_setting(gateway_setting_name, setting_value)
self._write_output(f'✓ Successfully migrated setting: {setting_name}', 'success')
# Return success result in the expected format
return {'success': True, 'action': 'updated', 'error': None}
except Exception as e:
self._write_output(f'✗ Failed to migrate setting {setting_name}: {str(e)}', 'error')
return {'success': False, 'action': 'failed', 'error': str(e)}
def migrate(self):
"""
Main entry point - orchestrates the settings migration process.
Returns:
dict: Summary of migration results
"""
# Get settings from AWX/Controller
configs = self.get_controller_config()
if not configs:
self._write_output('No settings found to migrate.', 'warning')
return {
'created': 0,
'updated': 0,
'unchanged': 0,
'failed': 0,
'mappers_created': 0,
'mappers_updated': 0,
'mappers_failed': 0,
'settings_created': 0,
'settings_updated': 0,
'settings_unchanged': 0,
'settings_failed': 0,
}
self._write_output(f'Found {len(configs)} setting(s) to migrate.', 'success')
# Process each setting
created_settings = []
updated_settings = []
unchanged_settings = []
failed_settings = []
for config in configs:
result = self.create_gateway_authenticator(config)
if result['success']:
if result['action'] == 'created':
created_settings.append(config)
elif result['action'] == 'updated':
updated_settings.append(config)
elif result['action'] == 'skipped':
unchanged_settings.append(config)
else:
failed_settings.append(config)
# Settings don't have mappers, or authenticators, so authenticator and mapper counts are always 0
return {
'created': 0,
'updated': 0,
'unchanged': 0,
'failed': 0,
'mappers_created': 0,
'mappers_updated': 0,
'mappers_failed': 0,
'settings_created': len(created_settings),
'settings_updated': len(updated_settings),
'settings_unchanged': len(unchanged_settings),
'settings_failed': len(failed_settings),
}

View File

@@ -1,93 +0,0 @@
"""
TACACS+ authenticator migrator.
This module handles the migration of TACACS+ authenticators from AWX to Gateway.
"""
from django.conf import settings
from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator
class TACACSMigrator(BaseAuthenticatorMigrator):
"""
Handles the migration of TACACS+ authenticators from AWX to Gateway.
"""
CATEGORY = "TACACSPLUS"
AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.tacacs"
def get_authenticator_type(self):
"""Get the human-readable authenticator type name.
Named TACACSPLUS because `+` is not allowed in authenticator slug.
"""
return "TACACSPLUS"
def get_controller_config(self):
"""
Export TACACS+ authenticator. A TACACS+ authenticator is only exported if
required configuration is present.
Returns:
list: List of configured TACACS+ authentication providers with their settings
"""
host = getattr(settings, "TACACSPLUS_HOST", None)
if not host:
return []
port = getattr(settings, "TACACSPLUS_PORT", 49)
secret = getattr(settings, "TACACSPLUS_SECRET", "")
session_timeout = getattr(settings, "TACACSPLUS_SESSION_TIMEOUT", 5)
auth_protocol = getattr(settings, "TACACSPLUS_AUTH_PROTOCOL", "ascii")
rem_addr = getattr(settings, "TACACSPLUS_REM_ADDR", False)
config_data = {
"name": "default",
"type": self.AUTH_TYPE,
"enabled": True,
"create_objects": True,
"remove_users": False,
"configuration": {
"HOST": host,
"PORT": port,
"SECRET": secret,
"SESSION_TIMEOUT": session_timeout,
"AUTH_PROTOCOL": auth_protocol,
"REM_ADDR": rem_addr,
},
}
return [
{
"category": self.CATEGORY,
"settings": config_data,
}
]
def create_gateway_authenticator(self, config):
"""Create a TACACS+ authenticator in Gateway."""
category = config["category"]
config_settings = config["settings"]
# Generate authenticator name and slug
authenticator_name = "tacacs"
authenticator_slug = self._generate_authenticator_slug("tacacs", category)
self._write_output(f"\n--- Processing {category} authenticator ---")
self._write_output(f"Name: {authenticator_name}")
self._write_output(f"Slug: {authenticator_slug}")
self._write_output(f"Type: {config_settings['type']}")
# Build Gateway authenticator configuration
gateway_config = {
"name": authenticator_name,
"slug": authenticator_slug,
"type": config_settings["type"],
"enabled": config_settings["enabled"],
"create_objects": config_settings["create_objects"],
"remove_users": config_settings["remove_users"],
"configuration": config_settings["configuration"],
}
# Submit the authenticator (create or update as needed)
return self.submit_authenticator(gateway_config, config=config)

View File

@@ -37,7 +37,6 @@ in the future. The `DOCUMENTATION` for each module will report this.
You can specify authentication by host, username, and password.
<<<<<<< HEAD
These can be specified via (from highest to lowest precedence):
- direct module parameters
@@ -55,8 +54,6 @@ verify_ssl = true
username = foo
password = bar
```
=======
>>>>>>> tower/test_stable-2.6
## Release and Upgrade Notes

View File

@@ -263,14 +263,7 @@ plugin_routing:
removal_date: '2022-01-23'
warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_node_wait instead.
redirect: awx.awx.workflow_node_wait
<<<<<<< HEAD
role:
deprecation:
removal_version: '25.0.0'
warning_text: This is replaced by the DAB role system, via the role_definition module.
=======
application:
deprecation:
removal_version: '25.0.0'
warning_text: The application module manages a legacy authentication feature that is being phased out, migrate to token-based authentication instead.
>>>>>>> tower/test_stable-2.6

View File

@@ -40,7 +40,6 @@ options:
- A dictionary structure as returned by the token module.
- If value not set, will try environment variable C(CONTROLLER_OAUTH_TOKEN) and then config files
type: raw
aliases: [ controller_oauthtoken ]
version_added: "3.7.0"
validate_certs:
description:

View File

@@ -40,20 +40,11 @@ options:
version: '4.0.0'
why: Collection name change
alternatives: 'TOWER_PASSWORD, AAP_PASSWORD'
<<<<<<< HEAD
aap_token:
=======
oauth_token:
>>>>>>> tower/test_stable-2.6
description:
- The OAuth token to use.
env:
- name: AAP_TOKEN
<<<<<<< HEAD
=======
- name: CONTROLLER_OAUTH_TOKEN
- name: TOWER_OAUTH_TOKEN
>>>>>>> tower/test_stable-2.6
deprecated:
collection_name: 'awx.awx'
version: '4.0.0'

View File

@@ -73,15 +73,9 @@ DOCUMENTATION = """
"""
EXAMPLES = """
<<<<<<< HEAD
- name: Create a string for a schedule
debug:
msg: "{{ lookup('awx.awx.schedule_rrule', 'none', start_date='1979-09-13 03:45:07') }}"
=======
- name: Create a string for a schedule
debug:
msg: "{{ lookup('awx.awx.schedule_rrule', 'none', start_date='1979-09-13 03:45:07') }}"
>>>>>>> tower/test_stable-2.6
"""
RETURN = """

View File

@@ -107,7 +107,6 @@ DOCUMENTATION = """
"""
EXAMPLES = """
<<<<<<< HEAD
- name: Create a ruleset for everyday except Sundays
set_fact:
complex_rule: "{{ lookup(awx.awx.schedule_rruleset, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}"
@@ -119,19 +118,6 @@ EXAMPLES = """
interval: 1
byweekday: 'sunday'
include: false
=======
- name: Create a ruleset for everyday except Sundays
set_fact:
complex_rule: "{{ lookup(awx.awx.schedule_rruleset, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}"
vars:
rrules:
- frequency: 'day'
interval: 1
- frequency: 'day'
interval: 1
byweekday: 'sunday'
include: False
>>>>>>> tower/test_stable-2.6
"""
RETURN = """

View File

@@ -75,10 +75,6 @@ class ControllerModule(AnsibleModule):
aap_token=dict(
type='raw',
no_log=True,
<<<<<<< HEAD
=======
aliases=['controller_oauthtoken',],
>>>>>>> tower/test_stable-2.6
required=False,
fallback=(env_fallback, ['CONTROLLER_OAUTH_TOKEN', 'TOWER_OAUTH_TOKEN', 'AAP_TOKEN'])
),
@@ -136,23 +132,6 @@ class ControllerModule(AnsibleModule):
if direct_value is not None:
setattr(self, short_param, direct_value)
<<<<<<< HEAD
=======
# Perform magic depending on whether aap_token is a string or a dict
if self.params.get('aap_token'):
token_param = self.params.get('aap_token')
if isinstance(token_param, dict):
if 'token' in token_param:
self.oauth_token = self.params.get('aap_token')['token']
else:
self.fail_json(msg="The provided dict in aap_token did not properly contain the token entry")
elif isinstance(token_param, string_types):
self.oauth_token = self.params.get('aap_token')
else:
error_msg = "The provided aap_token type was not valid ({0}). Valid options are str or dict.".format(type(token_param).__name__)
self.fail_json(msg=error_msg)
>>>>>>> tower/test_stable-2.6
# Perform some basic validation
if not self.host.startswith(("https://", "http://")): # NOSONAR
self.host = "https://{0}".format(self.host)

View File

@@ -1,166 +0,0 @@
#!/usr/bin/python
# coding: utf-8 -*-
# (c) 2020,Geoffrey Bachelot <bachelotg@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: application
author: "Geoffrey Bacheot (@jffz)"
short_description: create, update, or destroy Automation Platform Controller applications
deprecated:
removed_in: '25.0.0'
why: This module manages a legacy authentication feature that is being phased out.
alternative: Migrate to token-based authentication.
description:
- Create, update, or destroy Automation Platform Controller applications. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- Name of the application.
required: True
type: str
new_name:
description:
- Setting this option will change the existing name (looked up via the name field).
type: str
description:
description:
- Description of the application.
type: str
authorization_grant_type:
description:
- The grant type the user must use for acquire tokens for this application.
choices: ["password", "authorization-code"]
type: str
required: False
client_type:
description:
- Set to public or confidential depending on how secure the client device is.
choices: ["public", "confidential"]
type: str
required: False
organization:
description:
- Name, ID, or named URL of organization for application.
type: str
required: True
redirect_uris:
description:
- Allowed urls list, space separated. Required when authorization-grant-type=authorization-code
type: list
elements: str
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
type: str
skip_authorization:
description:
- Set True to skip authorization step for completely trusted applications.
type: bool
extends_documentation_fragment: awx.awx.auth
'''
EXAMPLES = '''
- name: Add Foo application
application:
name: "Foo"
description: "Foo bar application"
organization: "test"
state: present
authorization_grant_type: password
client_type: public
- name: Add Foo application
application:
name: "Foo"
description: "Foo bar application"
organization: "test"
state: present
authorization_grant_type: authorization-code
client_type: confidential
redirect_uris:
- http://tower.com/api/v2/
'''
from ..module_utils.controller_api import ControllerAPIModule
def main():
# Any additional arguments that are not fields of the item can be added here
argument_spec = dict(
name=dict(required=True),
new_name=dict(),
description=dict(),
authorization_grant_type=dict(choices=["password", "authorization-code"]),
client_type=dict(choices=['public', 'confidential']),
organization=dict(required=True),
redirect_uris=dict(type="list", elements='str'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
skip_authorization=dict(type='bool'),
)
# Create a module for ourselves
module = ControllerAPIModule(argument_spec=argument_spec)
# Extract our parameters
name = module.params.get('name')
new_name = module.params.get("new_name")
description = module.params.get('description')
authorization_grant_type = module.params.get('authorization_grant_type')
client_type = module.params.get('client_type')
organization = module.params.get('organization')
redirect_uris = module.params.get('redirect_uris')
skip_authorization = module.params.get('skip_authorization')
state = module.params.get('state')
# Attempt to look up the related items the user specified (these will fail the module if not found)
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up application based on the provided name and org ID
application = module.get_one('applications', name_or_id=name, check_exists=(state == 'exists'), **{'data': {'organization': org_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(application)
# Create the data that gets sent for create and update
application_fields = {
'name': new_name if new_name else (module.get_item_name(application) if application else name),
'organization': org_id,
}
if authorization_grant_type is not None:
application_fields['authorization_grant_type'] = authorization_grant_type
if client_type is not None:
application_fields['client_type'] = client_type
if description is not None:
application_fields['description'] = description
if redirect_uris is not None:
application_fields['redirect_uris'] = ' '.join(redirect_uris)
if skip_authorization is not None:
application_fields['skip_authorization'] = skip_authorization
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False)
if 'client_id' in response:
module.json_output['client_id'] = response['client_id']
if 'client_secret' in response:
module.json_output['client_secret'] = response['client_secret']
module.exit_json(**module.json_output)
if __name__ == '__main__':
main()

View File

@@ -180,13 +180,8 @@ EXAMPLES = '''
- frequency: 'day'
interval: 1
- frequency: 'day'
<<<<<<< HEAD
interval: 1
byweekday: 'sunday'
=======
every: 1
on_days: 'sunday'
>>>>>>> tower/test_stable-2.6
include: false
- name: Delete 'my_schedule' schedule for my_workflow

View File

@@ -19,11 +19,8 @@ from ansible.module_utils.six import raise_from
from ansible_base.rbac.models import RoleDefinition, DABPermission
from ansible_base.rbac import permission_registry
<<<<<<< HEAD
from awx.main.tests.conftest import load_all_credentials # noqa: F401; pylint: disable=unused-import
=======
>>>>>>> tower/test_stable-2.6
from awx.main.tests.functional.conftest import _request
from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-import
from awx.main.models import (

View File

@@ -20,10 +20,7 @@ def test_create_organization(run_module, admin_user):
'controller_username': None,
'controller_password': None,
'validate_certs': None,
<<<<<<< HEAD
=======
'aap_token': None,
>>>>>>> tower/test_stable-2.6
'controller_config_file': None,
}
@@ -56,10 +53,7 @@ def test_galaxy_credential_order(run_module, admin_user):
'controller_username': None,
'controller_password': None,
'validate_certs': None,
<<<<<<< HEAD
=======
'aap_token': None,
>>>>>>> tower/test_stable-2.6
'controller_config_file': None,
'galaxy_credentials': cred_ids,
}
@@ -84,10 +78,7 @@ def test_galaxy_credential_order(run_module, admin_user):
'controller_username': None,
'controller_password': None,
'validate_certs': None,
<<<<<<< HEAD
=======
'aap_token': None,
>>>>>>> tower/test_stable-2.6
'controller_config_file': None,
'galaxy_credentials': cred_ids,
}

View File

@@ -1,30 +0,0 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from awx.main.models import OAuth2AccessToken
@pytest.mark.django_db
def test_create_token(run_module, admin_user):
module_args = {
'description': 'barfoo',
'state': 'present',
'scope': 'read',
'controller_host': None,
'controller_username': None,
'controller_password': None,
'validate_certs': None,
'aap_token': None,
'controller_config_file': None,
}
result = run_module('token', module_args, admin_user)
assert result.get('changed'), result
tokens = OAuth2AccessToken.objects.filter(description='barfoo')
assert len(tokens) == 1, 'Tokens with description of barfoo != 0: {0}'.format(len(tokens))
assert tokens[0].scope == 'read', 'Token was not given read access'

View File

@@ -775,33 +775,6 @@
- "result is changed"
when: insights_found
- name: Create a valid Insights token credential
credential:
name: "{{ insights_cred_name2 }}"
organization: Default
state: present
credential_type: Insights
inputs:
client_id: joe
client_secret: secret
register: result
- assert:
that:
- "result is changed"
- name: Delete an Insights token credential
credential:
name: "{{ insights_cred_name2 }}"
organization: Default
state: absent
credential_type: Insights
register: result
- assert:
that:
- "result is changed"
- name: Create a valid Tower-to-Tower credential
credential:
name: "{{ tower_cred_name1 }}"

View File

@@ -7,16 +7,10 @@
ansible.builtin.set_fact:
plugin_name: "{{ controller_meta.prefix }}.schedule_rrule"
<<<<<<< HEAD
- name: Lookup with too many parameters (should fail)
ansible.builtin.set_fact:
_rrule: "{{ query(plugin_name, days_of_week=[1, 2], days_of_month=[15]) }}"
register: result_too_many_params
=======
- name: Test too many params (failure from validation of terms)
ansible.builtin.debug:
msg: "{{ lookup(plugin_name | string, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}"
>>>>>>> tower/test_stable-2.6
ignore_errors: true
- name: Assert proper error is reported for too many parameters
@@ -27,12 +21,8 @@
- name: Attempt invalid schedule_rrule lookup with bad frequency
ansible.builtin.debug:
<<<<<<< HEAD
msg: "{{ lookup(plugin_name, 'john', start_date='2020-04-16 03:45:07') }}"
register: result_bad_freq
=======
msg: "{{ lookup(plugin_name, 'john', start_date='2020-4-16 03:45:07') }}"
>>>>>>> tower/test_stable-2.6
ignore_errors: true
- name: Assert proper error is reported for bad frequency
@@ -44,10 +34,7 @@
- name: Test an invalid start date
ansible.builtin.debug:
msg: "{{ lookup(plugin_name, 'none', start_date='invalid') }}"
<<<<<<< HEAD
register: result_bad_date
=======
>>>>>>> tower/test_stable-2.6
ignore_errors: true
- name: Assert plugin error message for invalid start date
@@ -59,11 +46,7 @@
- name: Test end_on as count (generic success case)
ansible.builtin.debug:
msg: "{{ lookup(plugin_name, 'minute', start_date='2020-4-16 03:45:07', end_on='2') }}"
<<<<<<< HEAD
register: result_success
=======
register: result
>>>>>>> tower/test_stable-2.6
- name: Assert successful rrule generation
ansible.builtin.assert:

View File

@@ -1,154 +0,0 @@
---
- name: Generate a test ID
set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined
- name: Generate names
set_fact:
token_description: "AWX-Collection-tests-token-description-{{ test_id }}"
- name: Try to use a token as a dict which is missing the token parameter
job_list:
controller_oauthtoken:
not_token: "This has no token entry"
register: results
ignore_errors: true
- assert:
that:
- results is failed
- '"The provided dict in aap_token did not properly contain the token entry" == results.msg'
- name: Try to use a token as a list
job_list:
controller_oauthtoken:
- dummy_token
register: results
ignore_errors: true
- assert:
that:
- results is failed
- '"The provided aap_token type was not valid (list). Valid options are str or dict." == results.msg'
- name: Try to delete a token with no existing_token or existing_token_id
token:
state: absent
register: results
ignore_errors: true
- assert:
that:
- results is failed
# We don't assert a message here because it's handled by ansible
- name: Try to delete a token with both existing_token or existing_token_id
token:
existing_token:
id: 1234
existing_token_id: 1234
state: absent
register: results
ignore_errors: true
- assert:
that:
- results is failed
# We don't assert a message here because it's handled by ansible
- block:
- name: Create a Token
token:
description: '{{ token_description }}'
scope: "write"
state: present
register: new_token
- name: Validate our token works by token
job_list:
controller_oauthtoken: "{{ controller_token.token }}"
register: job_list
- name: Validate our token works by object
job_list:
controller_oauthtoken: "{{ controller_token }}"
register: job_list
always:
- name: Delete our Token with our own token
token:
existing_token: "{{ controller_token }}"
controller_oauthtoken: "{{ controller_token }}"
state: absent
when: controller_token is defined
register: results
- assert:
that:
- results is changed or results is skipped
- block:
- name: Create a second token
token:
description: '{{ token_description }}'
scope: "write"
state: present
register: results
- assert:
that:
- results is changed
always:
- name: Delete the second Token with our own token
token:
existing_token_id: "{{ controller_token['id'] }}"
controller_oauthtoken: "{{ controller_token }}"
state: absent
when: controller_token is defined
register: results
- assert:
that:
- results is changed or resuslts is skipped
- block:
- name: Create a less privileged token (read)
token:
description: '{{ token_description }}'
scope: "read"
state: present
register: read_only_token
- debug:
msg: "{{read_only_token}}"
- name: Exercise the aap_token parameter with the new token.
job_list:
aap_token: "{{ read_only_token.ansible_facts.controller_token.token }}"
- name: Ensure the new token is being used and not the default token for the tests.
token:
aap_token: "{{ read_only_token.ansible_facts.controller_token.token }}"
scope: "write"
state: present
ignore_errors: true
register: result
- assert:
that:
- "'You don\\'t have permission to POST' in result.msg"
always:
- name: Delete the less privileged token
token:
existing_token_id: "{{ read_only_token['id'] }}"
state: absent
when: read_only_token is defined
register: result
- assert:
that:
- result is changed

View File

@@ -119,11 +119,7 @@ The following notes are changes that may require changes to playbooks:
- The `notification_configuration` parameter of `tower_notification_template` has changed from a string to a dict. Please use the `lookup` plugin to read an existing file into a dict.
- `tower_credential` no longer supports passing a file name to `ssh_key_data`.
- The HipChat `notification_type` has been removed and can no longer be created using the `tower_notification_template` module.
<<<<<<< HEAD
- Lookup plugins now always return a list, and if you want a scalar value use `lookup` as opposed to `query`
=======
- Lookup plugins now always reutrn a list, and if you want a scalar value use `lookup` as opposed to `query`
>>>>>>> tower/test_stable-2.6
{% if collection_package | lower() == "awx" %}
## Running Unit Tests

View File

@@ -1,176 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

Binary file not shown.

View File

@@ -29,7 +29,7 @@ filterwarnings =
once:module 'sre_constants' is deprecated:DeprecationWarning:_pytest.assertion.rewrite
# FIXME: Delete this entry once `polymorphic` is updated.
once:pkg_resources is deprecated as an API. See https.//setuptools.pypa.io/en/latest/pkg_resources.html:DeprecationWarning:_pytest.assertion.rewrite
once:pkg_resources is deprecated as an API.
# FIXME: Delete this entry once `zope` is updated.
once:Deprecated call to `pkg_resources.declare_namespace.'zope'.`.\nImplementing implicit namespace packages .as specified in PEP 420. is preferred to `pkg_resources.declare_namespace`. See https.//setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages:DeprecationWarning:

View File

@@ -104,7 +104,7 @@ click==8.1.8
# via receptorctl
constantly==23.10.4
# via twisted
cryptography==42.0.8
cryptography==41.0.7
# via
# -r /awx_devel/requirements/requirements.in
# adal
@@ -117,7 +117,7 @@ cryptography==42.0.8
# pyjwt
# pyopenssl
# service-identity
cython==3.0.11
cython==3.1.3
# via -r /awx_devel/requirements/requirements.in
daphne==4.1.2
# via -r /awx_devel/requirements/requirements.in
@@ -365,11 +365,6 @@ propcache==0.2.1
# yarl
protobuf==5.29.3
# via
# aiohttp
# yarl
protobuf==4.25.8
# via
# -r /awx_devel/requirements/requirements.in
# googleapis-common-protos
# opentelemetry-proto
psutil==6.1.1
@@ -540,7 +535,7 @@ uwsgitop==0.12
# via -r /awx_devel/requirements/requirements.in
websocket-client==1.8.0
# via kubernetes
wheel==0.45.1
wheel==0.42.0
# via -r /awx_devel/requirements/requirements.in
wrapt==1.17.0
# via
@@ -556,7 +551,7 @@ zope-interface==7.2
# The following packages are considered to be unsafe in a requirements file:
pip==21.2.4
# via -r /awx_devel/requirements/requirements.in
setuptools==70.3.0
setuptools==80.9.0
# via
# -r /awx_devel/requirements/requirements.in
# asciichartpy

View File

@@ -1,11 +1,5 @@
git+https://github.com/ansible/system-certifi.git@devel#egg=certifi
<<<<<<< HEAD
git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner
awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel#egg=awx-plugins-core[credentials-github-app]
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest-filters,jwt_consumer,resource-registry,rbac,feature-flags]
awx_plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git
=======
# Remove pbr from requirements.in when moving ansible-runner to requirements.in
git+https://github.com/ansible/python3-saml.git@devel#egg=python3-saml
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest-filters,jwt_consumer,resource-registry,rbac,feature-flags]
>>>>>>> tower/test_stable-2.6